private void tryCreateFail(CoreAdminHandler admin, String name, String dataDir, String... errs)
      throws Exception {
    try {
      SolrQueryResponse resp = new SolrQueryResponse();

      SolrQueryRequest request =
          req(
              CoreAdminParams.ACTION,
              CoreAdminParams.CoreAdminAction.CREATE.toString(),
              CoreAdminParams.DATA_DIR,
              dataDir,
              CoreAdminParams.NAME,
              name,
              "schema",
              "schema.xml",
              "config",
              "solrconfig.xml");

      admin.handleRequestBody(request, resp);
      fail("Should have thrown an error");
    } catch (SolrException se) {
      // SolrException cause = (SolrException)se.getCause();
      assertEquals("Exception code should be 500", 500, se.code());
      for (String err : errs) {
        assertTrue(
            "Should have seen an exception containing the an error", se.getMessage().contains(err));
      }
    }
  }
 private Properties decryptPwd(Properties initProps) {
   String encryptionKey = initProps.getProperty("encryptKeyFile");
   if (initProps.getProperty("password") != null && encryptionKey != null) {
     // this means the password is encrypted and use the file to decode it
     try {
       try (Reader fr = new InputStreamReader(new FileInputStream(encryptionKey), UTF_8)) {
         char[] chars = new char[100]; // max 100 char password
         int len = fr.read(chars);
         if (len < 6)
           throw new DataImportHandlerException(
               SEVERE, "There should be a password of length 6 atleast " + encryptionKey);
         Properties props = new Properties();
         props.putAll(initProps);
         String password = null;
         try {
           password =
               CryptoKeys.decodeAES(initProps.getProperty("password"), new String(chars, 0, len))
                   .trim();
         } catch (SolrException se) {
           throw new DataImportHandlerException(SEVERE, "Error decoding password", se.getCause());
         }
         props.put("password", password);
         initProps = props;
       }
     } catch (IOException e) {
       throw new DataImportHandlerException(
           SEVERE, "Could not load encryptKeyFile  " + encryptionKey);
     }
   }
   return initProps;
 }
Exemple #3
0
  public NodeList getNodeList(String path, boolean errIfMissing) {
    XPath xpath = xpathFactory.newXPath();
    String xstr = normalize(path);

    try {
      NodeList nodeList = (NodeList) xpath.evaluate(xstr, doc, XPathConstants.NODESET);

      if (null == nodeList) {
        if (errIfMissing) {
          throw new RuntimeException(name + " missing " + path);
        } else {
          log.debug(name + " missing optional " + path);
          return null;
        }
      }

      log.trace(name + ":" + path + "=" + nodeList);
      return nodeList;

    } catch (XPathExpressionException e) {
      SolrException.log(log, "Error in xpath", e);
      throw new SolrException(
          SolrException.ErrorCode.SERVER_ERROR, "Error in xpath:" + xstr + " for " + name, e);
    } catch (SolrException e) {
      throw (e);
    } catch (Exception e) {
      SolrException.log(log, "Error in xpath", e);
      throw new SolrException(
          SolrException.ErrorCode.SERVER_ERROR, "Error in xpath:" + xstr + " for " + name, e);
    }
  }
Exemple #4
0
  public Node getNode(String path, Document doc, boolean errIfMissing) {
    XPath xpath = xpathFactory.newXPath();
    String xstr = normalize(path);

    try {
      NodeList nodes = (NodeList) xpath.evaluate(xstr, doc, XPathConstants.NODESET);
      if (nodes == null || 0 == nodes.getLength()) {
        if (errIfMissing) {
          throw new RuntimeException(name + " missing " + path);
        } else {
          log.debug(name + " missing optional " + path);
          return null;
        }
      }
      if (1 < nodes.getLength()) {
        throw new SolrException(
            SolrException.ErrorCode.SERVER_ERROR,
            name + " contains more than one value for config path: " + path);
      }
      Node nd = nodes.item(0);
      log.trace(name + ":" + path + "=" + nd);
      return nd;

    } catch (XPathExpressionException e) {
      SolrException.log(log, "Error in xpath", e);
      throw new SolrException(
          SolrException.ErrorCode.SERVER_ERROR, "Error in xpath:" + xstr + " for " + name, e);
    } catch (SolrException e) {
      throw (e);
    } catch (Exception e) {
      SolrException.log(log, "Error in xpath", e);
      throw new SolrException(
          SolrException.ErrorCode.SERVER_ERROR, "Error in xpath:" + xstr + " for " + name, e);
    }
  }
 public void assertQEx(String message, SolrQueryRequest req, SolrException.ErrorCode code) {
   try {
     h.query(req);
     fail(message);
   } catch (SolrException e) {
     assertEquals(code.code, e.code());
   } catch (Exception e2) {
     throw new RuntimeException("Exception during query", e2);
   }
 }
Exemple #6
0
 public void testWrongEncoding() throws Exception {
   String wrongEncoding = "stopwordsWrongEncoding.txt";
   SolrResourceLoader loader = new SolrResourceLoader("solr/collection1");
   // ensure we get our exception
   try {
     List<String> lines = loader.getLines(wrongEncoding);
     fail();
   } catch (SolrException expected) {
     assertTrue(expected.getCause() instanceof CharacterCodingException);
   }
 }
  @Test
  public void testBuildDocument() throws Exception {
    SolrCore core = h.getCore();

    // undefined field
    try {
      SolrInputDocument doc = new SolrInputDocument();
      doc.setField("unknown field", 12345, 1.0f);
      DocumentBuilder.toDocument(doc, core.getLatestSchema());
      fail("should throw an error");
    } catch (SolrException ex) {
      assertEquals("should be bad request", 400, ex.code());
    }
  }
  public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception {
    SolrParams params = req.getParams();
    params = adjustParams(params);
    req.setParams(params);
    TupleStream tupleStream = null;

    try {
      tupleStream = this.streamFactory.constructStream(params.get("expr"));
    } catch (Exception e) {
      // Catch exceptions that occur while the stream is being created. This will include streaming
      // expression parse rules.
      SolrException.log(logger, e);
      rsp.add("result-set", new DummyErrorStream(e));

      return;
    }

    int worker = params.getInt("workerID", 0);
    int numWorkers = params.getInt("numWorkers", 1);
    StreamContext context = new StreamContext();
    context.workerID = worker;
    context.numWorkers = numWorkers;
    context.setSolrClientCache(clientCache);
    tupleStream.setStreamContext(context);
    rsp.add("result-set", new TimerStream(new ExceptionStream(tupleStream)));
  }
 private void close(CacheValue val) {
   try {
     log.info("Closing directory: " + val.path);
     val.directory.close();
   } catch (Exception e) {
     SolrException.log(log, "Error closing directory", e);
   }
 }
Exemple #10
0
  private boolean syncReplicas(
      ZkController zkController,
      SolrCore core,
      ZkNodeProps leaderProps,
      boolean peerSyncOnlyWithActive) {
    boolean success = false;
    CloudDescriptor cloudDesc = core.getCoreDescriptor().getCloudDescriptor();
    String collection = cloudDesc.getCollectionName();
    String shardId = cloudDesc.getShardId();

    if (isClosed) {
      log.info("We have been closed, won't sync with replicas");
      return false;
    }

    // first sync ourselves - we are the potential leader after all
    try {
      success =
          syncWithReplicas(
              zkController, core, leaderProps, collection, shardId, peerSyncOnlyWithActive);
    } catch (Exception e) {
      SolrException.log(log, "Sync Failed", e);
    }
    try {
      if (isClosed) {
        log.info("We have been closed, won't attempt to sync replicas back to leader");
        return false;
      }

      if (success) {
        log.info("Sync Success - now sync replicas to me");

        syncToMe(zkController, collection, shardId, leaderProps, core.getCoreDescriptor());

      } else {
        log.info("Leader's attempt to sync with shard failed, moving to the next candidate");
        // lets see who seems ahead...
      }

    } catch (Exception e) {
      SolrException.log(log, "Sync Failed", e);
    }

    return success;
  }
Exemple #11
0
  /**
   * Builds a config:
   *
   * <p>Note that the 'name' parameter is used to obtain a valid input stream if no valid one is
   * provided through 'is'. If no valid stream is provided, a valid SolrResourceLoader instance
   * should be provided through 'loader' so the resource can be opened (@see
   * SolrResourceLoader#openResource); if no SolrResourceLoader instance is provided, a default one
   * will be created.
   *
   * <p>Consider passing a non-null 'name' parameter in all use-cases since it is used for logging
   * &amp; exception reporting.
   *
   * @param loader the resource loader used to obtain an input stream if 'is' is null
   * @param name the resource name used if the input stream 'is' is null
   * @param is the resource as a SAX InputSource
   * @param prefix an optional prefix that will be preprended to all non-absolute xpath expressions
   */
  public Config(
      SolrResourceLoader loader,
      String name,
      InputSource is,
      String prefix,
      boolean substituteProps)
      throws ParserConfigurationException, IOException, SAXException {
    if (loader == null) {
      loader = new SolrResourceLoader(SolrResourceLoader.locateSolrHome());
    }
    this.loader = loader;
    this.name = name;
    this.prefix = (prefix != null && !prefix.endsWith("/")) ? prefix + '/' : prefix;
    try {
      javax.xml.parsers.DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();

      if (is == null) {
        InputStream in = loader.openConfig(name);
        if (in instanceof ZkSolrResourceLoader.ZkByteArrayInputStream) {
          zkVersion = ((ZkSolrResourceLoader.ZkByteArrayInputStream) in).getStat().getVersion();
          log.info("loaded config {} with version {} ", name, zkVersion);
        }
        is = new InputSource(in);
        is.setSystemId(SystemIdResolver.createSystemIdFromResourceName(name));
      }

      // only enable xinclude, if a SystemId is available
      if (is.getSystemId() != null) {
        try {
          dbf.setXIncludeAware(true);
          dbf.setNamespaceAware(true);
        } catch (UnsupportedOperationException e) {
          log.warn(name + " XML parser doesn't support XInclude option");
        }
      }

      final DocumentBuilder db = dbf.newDocumentBuilder();
      db.setEntityResolver(new SystemIdResolver(loader));
      db.setErrorHandler(xmllog);
      try {
        doc = db.parse(is);
        origDoc = copyDoc(doc);
      } finally {
        // some XML parsers are broken and don't close the byte stream (but they should according to
        // spec)
        IOUtils.closeQuietly(is.getByteStream());
      }
      if (substituteProps) {
        DOMUtil.substituteProperties(doc, getSubstituteProperties());
      }
    } catch (ParserConfigurationException | SAXException | TransformerException e) {
      SolrException.log(log, "Exception during parsing file: " + name, e);
      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
    }
  }
Exemple #12
0
  public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception {
    SolrParams params = req.getParams();
    params = adjustParams(params);
    req.setParams(params);

    if (params.get("action") != null) {
      handleAdmin(req, rsp, params);
      return;
    }

    TupleStream tupleStream;

    try {
      tupleStream = this.streamFactory.constructStream(params.get("expr"));
    } catch (Exception e) {
      // Catch exceptions that occur while the stream is being created. This will include streaming
      // expression parse rules.
      SolrException.log(logger, e);
      rsp.add("result-set", new DummyErrorStream(e));

      return;
    }

    int worker = params.getInt("workerID", 0);
    int numWorkers = params.getInt("numWorkers", 1);
    StreamContext context = new StreamContext();
    context.workerID = worker;
    context.numWorkers = numWorkers;
    context.setSolrClientCache(clientCache);
    context.setModelCache(modelCache);
    context.put("core", this.coreName);
    context.put("solr-core", req.getCore());
    tupleStream.setStreamContext(context);

    // if asking for explanation then go get it
    if (params.getBool("explain", false)) {
      rsp.add("explanation", tupleStream.toExplanation(this.streamFactory));
    }

    if (tupleStream instanceof DaemonStream) {
      DaemonStream daemonStream = (DaemonStream) tupleStream;
      if (daemons.containsKey(daemonStream.getId())) {
        daemons.remove(daemonStream.getId()).close();
      }
      daemonStream.setDaemons(daemons);
      daemonStream.open(); // This will start the deamonStream
      daemons.put(daemonStream.getId(), daemonStream);
      rsp.add(
          "result-set",
          new DaemonResponseStream("Deamon:" + daemonStream.getId() + " started on " + coreName));
    } else {
      rsp.add("result-set", new TimerStream(new ExceptionStream(tupleStream)));
    }
  }
  /**
   * test that SolrExceptions thrown by HttpSolrClient can correctly encapsulate http status codes
   * even when not on the list of ErrorCodes solr may return.
   */
  public void testSolrExceptionCodeNotFromSolr() throws IOException, SolrServerException {
    final int status = 527;
    assertEquals(
        status
            + " didn't generate an UNKNOWN error code, someone modified the list of valid ErrorCode's w/o changing this test to work a different way",
        ErrorCode.UNKNOWN,
        ErrorCode.getErrorCode(status));

    try (HttpSolrClient client = new HttpSolrClient(jetty.getBaseUrl().toString() + "/debug/foo")) {
      DebugServlet.setErrorCode(status);
      try {
        SolrQuery q = new SolrQuery("foo");
        client.query(q, METHOD.GET);
        fail("Didn't get excepted exception from oversided request");
      } catch (SolrException e) {
        assertEquals("Unexpected exception status code", status, e.code());
      }
    } finally {
      DebugServlet.clear();
    }
  }
Exemple #14
0
  public void cancelCoreRecoveries() {

    List<SolrCore> cores = solrCores.getCores();

    // we must cancel without holding the cores sync
    // make sure we wait for any recoveries to stop
    for (SolrCore core : cores) {
      try {
        core.getSolrCoreState().cancelRecovery();
      } catch (Exception e) {
        SolrException.log(log, "Error canceling recovery for core", e);
      }
    }
  }
 protected Map<String, Object> getNext() {
   try {
     if (rowIterator == null) return null;
     if (rowIterator.hasNext()) return rowIterator.next();
     query = null;
     rowIterator = null;
     return null;
   } catch (Exception e) {
     SolrException.log(log, "getNext() failed for query '" + query + "'", e);
     query = null;
     rowIterator = null;
     wrapAndThrow(DataImportHandlerException.WARN, e);
     return null;
   }
 }
 private void doRequest(final Req req) {
   try {
     SolrClient solrClient = clients.getSolrClient(req);
     solrClient.request(req.uReq);
   } catch (Exception e) {
     SolrException.log(log, e);
     Error error = new Error();
     error.e = e;
     error.req = req;
     if (e instanceof SolrException) {
       error.statusCode = ((SolrException) e).code();
     }
     errors.add(error);
   }
 }
Exemple #17
0
  @Override
  public void warm(SolrIndexSearcher searcher, SolrCache<K, V> old) {
    if (regenerator == null) return;
    long warmingStartTime = System.nanoTime();
    LRUCache<K, V> other = (LRUCache<K, V>) old;

    // warm entries
    if (isAutowarmingOn()) {
      Object[] keys, vals = null;

      // Don't do the autowarming in the synchronized block, just pull out the keys and values.
      synchronized (other.map) {
        int sz = autowarm.getWarmCount(other.map.size());

        keys = new Object[sz];
        vals = new Object[sz];

        Iterator<Map.Entry<K, V>> iter = other.map.entrySet().iterator();

        // iteration goes from oldest (least recently used) to most recently used,
        // so we need to skip over the oldest entries.
        int skip = other.map.size() - sz;
        for (int i = 0; i < skip; i++) iter.next();

        for (int i = 0; i < sz; i++) {
          Map.Entry<K, V> entry = iter.next();
          keys[i] = entry.getKey();
          vals[i] = entry.getValue();
        }
      }

      // autowarm from the oldest to the newest entries so that the ordering will be
      // correct in the new cache.
      for (int i = 0; i < keys.length; i++) {
        try {
          boolean continueRegen = regenerator.regenerateItem(searcher, this, old, keys[i], vals[i]);
          if (!continueRegen) break;
        } catch (Exception e) {
          SolrException.log(log, "Error during auto-warming of key:" + keys[i], e);
        }
      }
    }

    warmupTime =
        TimeUnit.MILLISECONDS.convert(System.nanoTime() - warmingStartTime, TimeUnit.NANOSECONDS);
  }
Exemple #18
0
 /**
  * Send a document to Solr for indexing with re-try support in case of communication exception.
  */
 protected void addDocWithRetry(CloudSolrServer solr, SolrInputDocument doc, int retryInSecs)
     throws Exception {
   try {
     solr.add(doc);
   } catch (Exception solrExc) {
     // add some basic re-try logic in the event of a communication error
     Throwable rootCause = SolrException.getRootCause(solrExc);
     if (rootCause instanceof IOException) {
       log.error(
           rootCause.getClass().getSimpleName()
               + " when trying to send a document to SolrCloud, will re-try after waiting "
               + retryInSecs
               + " seconds; "
               + rootCause);
       try {
         Thread.sleep(retryInSecs * 1000);
       } catch (InterruptedException ignoreMe) {
       }
       // re-try this doc
       solr.add(doc);
     }
   }
 }
Exemple #19
0
 /**
  * Logs an error and throws an exception if any of the element(s) at the given elementXpath
  * contains an attribute name that is not among knownAttributes.
  */
 public void complainAboutUnknownAttributes(String elementXpath, String... knownAttributes) {
   SortedMap<String, SortedSet<String>> problems = new TreeMap<>();
   NodeList nodeList = getNodeList(elementXpath, false);
   for (int i = 0; i < nodeList.getLength(); ++i) {
     Element element = (Element) nodeList.item(i);
     Set<String> unknownAttributes = getUnknownAttributes(element, knownAttributes);
     if (null != unknownAttributes) {
       String elementName = element.getNodeName();
       SortedSet<String> allUnknownAttributes = problems.get(elementName);
       if (null == allUnknownAttributes) {
         allUnknownAttributes = new TreeSet<>();
         problems.put(elementName, allUnknownAttributes);
       }
       allUnknownAttributes.addAll(unknownAttributes);
     }
   }
   if (problems.size() > 0) {
     StringBuilder message = new StringBuilder();
     for (Map.Entry<String, SortedSet<String>> entry : problems.entrySet()) {
       if (message.length() > 0) {
         message.append(", ");
       }
       message.append('<');
       message.append(entry.getKey());
       for (String attributeName : entry.getValue()) {
         message.append(' ');
         message.append(attributeName);
         message.append("=\"...\"");
       }
       message.append('>');
     }
     message.insert(0, "Unknown attribute(s) on element(s): ");
     String msg = message.toString();
     SolrException.log(log, msg);
     throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, msg);
   }
 }
  @Test
  public void testCoreAdminHandler() throws Exception {
    final File workDir = createTempDir().toFile();

    final CoreContainer cores = h.getCoreContainer();

    final CoreAdminHandler admin = new CoreAdminHandler(cores);

    Path instDir;
    try (SolrCore template = cores.getCore("collection1")) {
      assertNotNull(template);
      instDir = template.getCoreDescriptor().getInstanceDir();
    }

    assertTrue("instDir doesn't exist: " + instDir, Files.exists(instDir));
    final File instPropFile = new File(workDir, "instProp");
    FileUtils.copyDirectory(instDir.toFile(), instPropFile);

    SolrQueryResponse resp = new SolrQueryResponse();
    // Sneaking in a test for using a bad core name
    try {
      admin.handleRequestBody(
          req(
              CoreAdminParams.ACTION,
              CoreAdminParams.CoreAdminAction.CREATE.toString(),
              CoreAdminParams.INSTANCE_DIR,
              instPropFile.getAbsolutePath(),
              CoreAdminParams.NAME,
              "ugly$core=name"),
          resp);

    } catch (SolrException se) {
      assertTrue(
          "Expected error message for bad core name.", se.toString().contains("Invalid core"));
    }
    CoreDescriptor cd = cores.getCoreDescriptor("ugly$core=name");
    assertNull("Should NOT have added this core!", cd);

    // create a new core (using CoreAdminHandler) w/ properties

    admin.handleRequestBody(
        req(
            CoreAdminParams.ACTION,
            CoreAdminParams.CoreAdminAction.CREATE.toString(),
            CoreAdminParams.INSTANCE_DIR,
            instPropFile.getAbsolutePath(),
            CoreAdminParams.NAME,
            "props",
            CoreAdminParams.PROPERTY_PREFIX + "hoss",
            "man",
            CoreAdminParams.PROPERTY_PREFIX + "foo",
            "baz"),
        resp);
    assertNull("Exception on create", resp.getException());

    cd = cores.getCoreDescriptor("props");
    assertNotNull("Core not added!", cd);
    assertEquals(cd.getCoreProperty("hoss", null), "man");
    assertEquals(cd.getCoreProperty("foo", null), "baz");

    // attempt to create a bogus core and confirm failure
    ignoreException("Could not load config");
    try {
      resp = new SolrQueryResponse();
      admin.handleRequestBody(
          req(
              CoreAdminParams.ACTION,
              CoreAdminParams.CoreAdminAction.CREATE.toString(),
              CoreAdminParams.NAME,
              "bogus_dir_core",
              CoreAdminParams.INSTANCE_DIR,
              "dir_does_not_exist_127896"),
          resp);
      fail("bogus collection created ok");
    } catch (SolrException e) {
      // :NOOP:
      // :TODO: CoreAdminHandler's exception messages are terrible, otherwise we could assert
      // something useful here
    }
    unIgnoreException("Could not load config");

    // check specifically for status of the failed core name
    resp = new SolrQueryResponse();
    admin.handleRequestBody(
        req(
            CoreAdminParams.ACTION,
            CoreAdminParams.CoreAdminAction.STATUS.toString(),
            CoreAdminParams.CORE,
            "bogus_dir_core"),
        resp);
    Map<String, Exception> failures = (Map<String, Exception>) resp.getValues().get("initFailures");
    assertNotNull("core failures is null", failures);

    NamedList<Object> status = (NamedList<Object>) resp.getValues().get("status");
    assertNotNull("core status is null", status);

    assertEquals("wrong number of core failures", 1, failures.size());
    Exception fail = failures.get("bogus_dir_core");
    assertNotNull("null failure for test core", fail);
    assertTrue(
        "init failure doesn't mention problem: " + fail.getCause().getMessage(),
        0 < fail.getCause().getMessage().indexOf("dir_does_not_exist"));

    assertEquals(
        "bogus_dir_core status isn't empty", 0, ((NamedList) status.get("bogus_dir_core")).size());

    // Try renaming the core, we should fail
    // First assert that the props core exists
    cd = cores.getCoreDescriptor("props");
    assertNotNull("Core disappeared!", cd);

    // now rename it something else just for kicks since we don't actually test this that I could
    // find.
    admin.handleRequestBody(
        req(
            CoreAdminParams.ACTION,
            CoreAdminParams.CoreAdminAction.RENAME.toString(),
            CoreAdminParams.CORE,
            "props",
            CoreAdminParams.OTHER,
            "rename_me"),
        resp);

    cd = cores.getCoreDescriptor("rename_me");
    assertNotNull("Core should have been renamed!", cd);

    // Rename it something bogus and see if you get an exception, the old core is still there and
    // the bogus one isn't
    try {
      admin.handleRequestBody(
          req(
              CoreAdminParams.ACTION,
              CoreAdminParams.CoreAdminAction.RENAME.toString(),
              CoreAdminParams.CORE,
              "rename_me",
              CoreAdminParams.OTHER,
              "bad$name"),
          resp);
    } catch (
        SolrException
            e) { // why the heck does create return a SolrException (admittedly wrapping an IAE)
      assertTrue(
          "Expected error message for bad core name.", e.getMessage().contains("Invalid core"));
    }

    cd = cores.getCoreDescriptor("bad$name");
    assertNull("Core should NOT exist!", cd);

    cd = cores.getCoreDescriptor("rename_me");
    assertNotNull("Core should have been renamed!", cd);

    // :TODO: because of SOLR-3665 we can't ask for status from all cores

  }
  // be sure this is called with the this sync lock
  // returns true if we closed the cacheValue, false if it will be closed later
  private boolean closeCacheValue(CacheValue cacheValue) {
    log.info("looking to close " + cacheValue.path + " " + cacheValue.closeEntries.toString());
    List<CloseListener> listeners = closeListeners.remove(cacheValue.directory);
    if (listeners != null) {
      for (CloseListener listener : listeners) {
        try {
          listener.preClose();
        } catch (Exception e) {
          SolrException.log(log, "Error executing preClose for directory", e);
        }
      }
    }
    cacheValue.closeCacheValueCalled = true;
    if (cacheValue.deleteOnClose) {
      // see if we are a subpath
      Collection<CacheValue> values = byPathCache.values();

      Collection<CacheValue> cacheValues = new ArrayList<>(values);
      cacheValues.remove(cacheValue);
      for (CacheValue otherCacheValue : cacheValues) {
        // if we are a parent path and a sub path is not already closed, get a sub path to close us
        // later
        if (isSubPath(cacheValue, otherCacheValue) && !otherCacheValue.closeCacheValueCalled) {
          // we let the sub dir remove and close us
          if (!otherCacheValue.deleteAfterCoreClose && cacheValue.deleteAfterCoreClose) {
            otherCacheValue.deleteAfterCoreClose = true;
          }
          otherCacheValue.removeEntries.addAll(cacheValue.removeEntries);
          otherCacheValue.closeEntries.addAll(cacheValue.closeEntries);
          cacheValue.closeEntries.clear();
          cacheValue.removeEntries.clear();
          return false;
        }
      }
    }

    boolean cl = false;
    for (CacheValue val : cacheValue.closeEntries) {
      close(val);
      if (val == cacheValue) {
        cl = true;
      }
    }

    for (CacheValue val : cacheValue.removeEntries) {
      if (!val.deleteAfterCoreClose) {
        log.info("Removing directory before core close: " + val.path);
        try {
          removeDirectory(val);
        } catch (Exception e) {
          SolrException.log(log, "Error removing directory", e);
        }
      } else {
        removeEntries.add(val);
      }
    }

    if (listeners != null) {
      for (CloseListener listener : listeners) {
        try {
          listener.postClose();
        } catch (Exception e) {
          SolrException.log(log, "Error executing postClose for directory", e);
        }
      }
    }
    return cl;
  }
  /*
   * (non-Javadoc)
   *
   * @see org.apache.solr.core.DirectoryFactory#close()
   */
  @Override
  public void close() throws IOException {
    synchronized (this) {
      log.info(
          "Closing "
              + this.getClass().getSimpleName()
              + " - "
              + byDirectoryCache.size()
              + " directories currently being tracked");
      this.closed = true;
      Collection<CacheValue> values = byDirectoryCache.values();
      for (CacheValue val : values) {
        log.debug("Closing {} - currently tracking: {}", this.getClass().getSimpleName(), val);
        try {
          // if there are still refs out, we have to wait for them
          int cnt = 0;
          while (val.refCnt != 0) {
            wait(100);

            if (cnt++ >= 120) {
              String msg =
                  "Timeout waiting for all directory ref counts to be released - gave up waiting on "
                      + val;
              log.error(msg);
              // debug
              // val.originTrace.printStackTrace();
              throw new SolrException(ErrorCode.SERVER_ERROR, msg);
            }
          }
          assert val.refCnt == 0 : val.refCnt;
        } catch (Exception e) {
          SolrException.log(log, "Error closing directory", e);
        }
      }

      values = byDirectoryCache.values();
      Set<CacheValue> closedDirs = new HashSet<>();
      for (CacheValue val : values) {
        try {
          for (CacheValue v : val.closeEntries) {
            assert v.refCnt == 0 : val.refCnt;
            log.debug("Closing directory when closing factory: " + v.path);
            boolean cl = closeCacheValue(v);
            if (cl) {
              closedDirs.add(v);
            }
          }
        } catch (Exception e) {
          SolrException.log(log, "Error closing directory", e);
        }
      }

      for (CacheValue val : removeEntries) {
        log.info("Removing directory after core close: " + val.path);
        try {
          removeDirectory(val);
        } catch (Exception e) {
          SolrException.log(log, "Error removing directory", e);
        }
      }

      for (CacheValue v : closedDirs) {
        removeFromCache(v);
      }
    }
  }
Exemple #23
0
  private void syncToMe(
      ZkController zkController,
      String collection,
      String shardId,
      ZkNodeProps leaderProps,
      CoreDescriptor cd) {

    // sync everyone else
    // TODO: we should do this in parallel at least
    List<ZkCoreNodeProps> nodes =
        zkController
            .getZkStateReader()
            .getReplicaProps(collection, shardId, cd.getCloudDescriptor().getCoreNodeName());
    if (nodes == null) {
      log.info(ZkCoreNodeProps.getCoreUrl(leaderProps) + " has no replicas");
      return;
    }

    ZkCoreNodeProps zkLeader = new ZkCoreNodeProps(leaderProps);
    for (ZkCoreNodeProps node : nodes) {
      try {
        log.info(
            ZkCoreNodeProps.getCoreUrl(leaderProps)
                + ": try and ask "
                + node.getCoreUrl()
                + " to sync");

        requestSync(
            node.getBaseUrl(), node.getCoreUrl(), zkLeader.getCoreUrl(), node.getCoreName());

      } catch (Exception e) {
        SolrException.log(log, "Error syncing replica to leader", e);
      }
    }

    for (; ; ) {
      ShardResponse srsp = shardHandler.takeCompletedOrError();
      if (srsp == null) break;
      boolean success = handleResponse(srsp);
      if (srsp.getException() != null) {
        SolrException.log(log, "Sync request error: " + srsp.getException());
      }

      if (!success) {
        try {
          log.info(
              ZkCoreNodeProps.getCoreUrl(leaderProps)
                  + ": Sync failed - asking replica ("
                  + srsp.getShardAddress()
                  + ") to recover.");
          if (isClosed) {
            log.info("We have been closed, don't request that a replica recover");
          } else {
            requestRecovery(
                leaderProps,
                ((ShardCoreRequest) srsp.getShardRequest()).baseUrl,
                ((ShardCoreRequest) srsp.getShardRequest()).coreName);
          }
        } catch (Exception e) {
          SolrException.log(
              log,
              ZkCoreNodeProps.getCoreUrl(leaderProps) + ": Could not tell a replica to recover",
              e);
        }
      } else {
        log.info(
            ZkCoreNodeProps.getCoreUrl(leaderProps)
                + ": "
                + " sync completed with "
                + srsp.getShardAddress());
      }
    }
  }
Exemple #24
0
  private static Map<String, String> getFieldTypes(
      String[] fields, String solrBaseUrl, String collection) {

    // collect mapping of Solr field to type
    Map<String, String> fieldTypeMap = new HashMap<String, String>();
    for (String field : fields) {

      if (fieldTypeMap.containsKey(field)) continue;

      // Hit Solr Schema API to get field type for field
      String fieldUrl = solrBaseUrl + collection + "/schema/fields/" + field;
      try {
        String fieldType = null;
        try {
          Map<String, Object> fieldMeta =
              SolrJsonSupport.getJson(SolrJsonSupport.getHttpClient(), fieldUrl, 2);
          fieldType = SolrJsonSupport.asString("/field/type", fieldMeta);
        } catch (SolrException solrExc) {
          int errCode = solrExc.code();
          if (errCode == 404) {
            int lio = field.lastIndexOf('_');
            if (lio != -1) {
              // see if the field is a dynamic field
              String dynField = "*" + field.substring(lio);

              fieldType = fieldTypeMap.get(dynField);
              if (fieldType == null) {
                String dynamicFieldsUrl =
                    solrBaseUrl + collection + "/schema/dynamicfields/" + dynField;
                try {
                  Map<String, Object> dynFieldMeta =
                      SolrJsonSupport.getJson(SolrJsonSupport.getHttpClient(), dynamicFieldsUrl, 2);
                  fieldType = SolrJsonSupport.asString("/dynamicField/type", dynFieldMeta);
                  fieldTypeMap.put(dynField, fieldType);
                } catch (Exception exc) {
                  // just ignore this and throw the outer exc
                  exc.printStackTrace();
                  throw solrExc;
                }
              }
            }
          }
        }

        if (fieldType == null) {
          log.warn("Can't figure out field type for field: " + field);
          continue;
        }

        String fieldTypeUrl = solrBaseUrl + collection + "/schema/fieldtypes/" + fieldType;
        Map<String, Object> fieldTypeMeta =
            SolrJsonSupport.getJson(SolrJsonSupport.getHttpClient(), fieldTypeUrl, 2);
        String fieldTypeClass = SolrJsonSupport.asString("/fieldType/class", fieldTypeMeta);

        // map all the other fields for this type to speed up the schema analysis
        List<String> otherFields = SolrJsonSupport.asList("/fieldType/fields", fieldTypeMeta);
        for (String other : otherFields) fieldTypeMap.put(other, fieldTypeClass);

        fieldTypeMap.put(field, fieldTypeClass);

      } catch (Exception exc) {
        log.warn("Can't get field type for field " + field + " due to: " + exc);
      }
    }

    return fieldTypeMap;
  }
  private void doRetriesIfNeeded() {
    // NOTE: retries will be forwards to a single url

    List<Error> errors = new ArrayList<>(this.errors);
    errors.addAll(clients.getErrors());
    List<Error> resubmitList = new ArrayList<>();

    for (Error err : errors) {
      try {
        String oldNodeUrl = err.req.node.getUrl();

        // if there is a retry url, we want to retry...
        boolean isRetry = err.req.node.checkRetry();

        boolean doRetry = false;
        int rspCode = err.statusCode;

        if (testing_errorHook != null) Diagnostics.call(testing_errorHook, err.e);

        // this can happen in certain situations such as close
        if (isRetry) {
          if (rspCode == 404 || rspCode == 403 || rspCode == 503) {
            doRetry = true;
          }

          // if it's a connect exception, lets try again
          if (err.e instanceof SolrServerException) {
            if (((SolrServerException) err.e).getRootCause() instanceof ConnectException) {
              doRetry = true;
            }
          }

          if (err.e instanceof ConnectException) {
            doRetry = true;
          }

          if (err.req.retries < maxRetriesOnForward && doRetry) {
            err.req.retries++;

            SolrException.log(
                SolrCmdDistributor.log,
                "forwarding update to "
                    + oldNodeUrl
                    + " failed - retrying ... retries: "
                    + err.req.retries
                    + " "
                    + err.req.cmdString
                    + " params:"
                    + err.req.uReq.getParams()
                    + " rsp:"
                    + rspCode,
                err.e);
            try {
              Thread.sleep(retryPause);
            } catch (InterruptedException e) {
              Thread.currentThread().interrupt();
              log.warn(null, e);
            }

            resubmitList.add(err);
          } else {
            allErrors.add(err);
          }
        } else {
          allErrors.add(err);
        }
      } catch (Exception e) {
        // continue on
        log.error("Unexpected Error while doing request retries", e);
      }
    }

    clients.clearErrors();
    this.errors.clear();
    for (Error err : resubmitList) {
      submit(err.req, false);
    }

    if (resubmitList.size() > 0) {
      blockAndDoRetries();
    }
  }
Exemple #26
0
  public static QueryResponse querySolr(
      SolrClient solrServer,
      SolrQuery solrQuery,
      int startIndex,
      String cursorMark,
      StreamingResponseCallback callback)
      throws SolrServerException {
    QueryResponse resp = null;
    try {
      if (cursorMark != null) {
        solrQuery.setStart(0);
        solrQuery.set("cursorMark", cursorMark);
      } else {
        solrQuery.setStart(startIndex);
      }

      if (callback != null) {
        resp = solrServer.queryAndStreamResponse(solrQuery, callback);
      } else {
        resp = solrServer.query(solrQuery);
      }
    } catch (Exception exc) {

      log.error("Query [" + solrQuery + "] failed due to: " + exc);

      // re-try once in the event of a communications error with the server
      Throwable rootCause = SolrException.getRootCause(exc);
      boolean wasCommError =
          (rootCause instanceof ConnectException
              || rootCause instanceof IOException
              || rootCause instanceof SocketException);
      if (wasCommError) {
        try {
          Thread.sleep(2000L);
        } catch (InterruptedException ie) {
          Thread.interrupted();
        }

        try {
          if (callback != null) {
            resp = solrServer.queryAndStreamResponse(solrQuery, callback);
          } else {
            resp = solrServer.query(solrQuery);
          }
        } catch (Exception excOnRetry) {
          if (excOnRetry instanceof SolrServerException) {
            throw (SolrServerException) excOnRetry;
          } else {
            throw new SolrServerException(excOnRetry);
          }
        }
      } else {
        if (exc instanceof SolrServerException) {
          throw (SolrServerException) exc;
        } else {
          throw new SolrServerException(exc);
        }
      }
    }

    return resp;
  }
  public void testFlowWithEmpty() throws Exception {
    // reused state
    Map<String, Exception> failures = null;
    Collection<String> cores = null;
    Exception fail = null;

    init("empty_flow");

    // solr.xml
    File solrXml = new File(solrHome, "solr.xml");
    FileUtils.write(solrXml, EMPTY_SOLR_XML, IOUtils.CHARSET_UTF_8.toString());

    // ----
    // init the CoreContainer
    cc.load(solrHome.getAbsolutePath(), solrXml);

    // check that we have the cores we expect
    cores = cc.getCoreNames();
    assertNotNull("core names is null", cores);
    assertEquals("wrong number of cores", 0, cores.size());

    // check that we have the failures we expect
    failures = cc.getCoreInitFailures();
    assertNotNull("core failures is a null map", failures);
    assertEquals("wrong number of core failures", 0, failures.size());

    // -----
    // try to add a collection with a path that doesn't exist
    final CoreDescriptor bogus = new CoreDescriptor(cc, "bogus", "bogus_path");
    try {
      ignoreException(Pattern.quote("bogus_path"));
      cc.create(bogus);
      fail("bogus inst dir failed to trigger exception from create");
    } catch (SolrException e) {
      assertTrue(
          "init exception doesn't mention bogus dir: " + e.getCause().getCause().getMessage(),
          0 < e.getCause().getCause().getMessage().indexOf("bogus_path"));
    }

    // check that we have the cores we expect
    cores = cc.getCoreNames();
    assertNotNull("core names is null", cores);
    assertEquals("wrong number of cores", 0, cores.size());

    // check that we have the failures we expect
    failures = cc.getCoreInitFailures();
    assertNotNull("core failures is a null map", failures);
    assertEquals("wrong number of core failures", 1, failures.size());
    fail = failures.get("bogus");
    assertNotNull("null failure for test core", fail);
    assertTrue(
        "init failure doesn't mention problem: " + fail.getCause().getMessage(),
        0 < fail.getCause().getMessage().indexOf("bogus_path"));

    // check that we get null accessing a non-existent core
    assertNull(cc.getCore("does_not_exist"));
    // check that we get a 500 accessing the core with an init failure
    try {
      SolrCore c = cc.getCore("bogus");
      fail("Failed to get Exception on accessing core with init failure");
    } catch (SolrException ex) {
      assertEquals(500, ex.code());
      // double wrapped
      String cause = ex.getCause().getCause().getMessage();
      assertTrue(
          "getCore() ex cause doesn't mention init fail: " + cause,
          0 < cause.indexOf("bogus_path"));
    }

    // let the test end here, with some recorded failures, and let cleanUp()
    // verify that there is no problem shuting down CoreContainer with known
    // SolrCore failures
  }
  public void testFlowBadFromStart() throws Exception {

    // reused state
    Map<String, Exception> failures = null;
    Collection<String> cores = null;
    Exception fail = null;

    init("bad_flow");

    // start with two collections: one valid, and one broken
    File solrXml = new File(solrHome, "solr.xml");
    FileUtils.write(solrXml, BAD_SOLR_XML, IOUtils.CHARSET_UTF_8.toString());

    // our "ok" collection
    FileUtils.copyFile(
        getFile("solr/collection1/conf/solrconfig-basic.xml"),
        FileUtils.getFile(solrHome, "col_ok", "conf", "solrconfig.xml"));
    FileUtils.copyFile(
        getFile("solr/collection1/conf/schema-minimal.xml"),
        FileUtils.getFile(solrHome, "col_ok", "conf", "schema.xml"));

    // our "bad" collection
    ignoreException(Pattern.quote("DummyMergePolicy"));
    FileUtils.copyFile(
        getFile("solr/collection1/conf/bad-mp-solrconfig.xml"),
        FileUtils.getFile(solrHome, "col_bad", "conf", "solrconfig.xml"));
    FileUtils.copyFile(
        getFile("solr/collection1/conf/schema-minimal.xml"),
        FileUtils.getFile(solrHome, "col_bad", "conf", "schema.xml"));

    // -----
    // init the  CoreContainer with the mix of ok/bad cores
    cc.load(solrHome.getAbsolutePath(), solrXml);

    // check that we have the cores we expect
    cores = cc.getCoreNames();
    assertNotNull("core names is null", cores);
    assertEquals("wrong number of cores", 1, cores.size());
    assertTrue("col_ok not found", cores.contains("col_ok"));

    // check that we have the failures we expect
    failures = cc.getCoreInitFailures();
    assertNotNull("core failures is a null map", failures);
    assertEquals("wrong number of core failures", 1, failures.size());
    fail = failures.get("col_bad");
    assertNotNull("null failure for test core", fail);
    assertTrue(
        "init failure doesn't mention problem: " + fail.getMessage(),
        0 < fail.getMessage().indexOf("DummyMergePolicy"));

    // check that we get null accessing a non-existent core
    assertNull(cc.getCore("does_not_exist"));
    // check that we get a 500 accessing the core with an init failure
    try {
      SolrCore c = cc.getCore("col_bad");
      fail("Failed to get Exception on accessing core with init failure");
    } catch (SolrException ex) {
      assertEquals(500, ex.code());
      // double wrapped
      String cause = ex.getCause().getCause().getMessage();
      assertTrue(
          "getCore() ex cause doesn't mention init fail: " + cause,
          0 < cause.indexOf("DummyMergePolicy"));
    }

    // -----
    // "fix" the bad collection
    FileUtils.copyFile(
        getFile("solr/collection1/conf/solrconfig-basic.xml"),
        FileUtils.getFile(solrHome, "col_bad", "conf", "solrconfig.xml"));
    final CoreDescriptor fixed = new CoreDescriptor(cc, "col_bad", "col_bad");
    cc.register("col_bad", cc.create(fixed), false);

    // check that we have the cores we expect
    cores = cc.getCoreNames();
    assertNotNull("core names is null", cores);
    assertEquals("wrong number of cores", 2, cores.size());
    assertTrue("col_ok not found", cores.contains("col_ok"));
    assertTrue("col_bad not found", cores.contains("col_bad"));

    // check that we have the failures we expect
    failures = cc.getCoreInitFailures();
    assertNotNull("core failures is a null map", failures);
    assertEquals("wrong number of core failures", 0, failures.size());

    // -----
    // try to add a collection with a path that doesn't exist
    final CoreDescriptor bogus = new CoreDescriptor(cc, "bogus", "bogus_path");
    try {
      ignoreException(Pattern.quote("bogus_path"));
      cc.create(bogus);
      fail("bogus inst dir failed to trigger exception from create");
    } catch (SolrException e) {
      assertTrue(
          "init exception doesn't mention bogus dir: " + e.getCause().getCause().getMessage(),
          0 < e.getCause().getCause().getMessage().indexOf("bogus_path"));
    }

    // check that we have the cores we expect
    cores = cc.getCoreNames();
    assertNotNull("core names is null", cores);
    assertEquals("wrong number of cores", 2, cores.size());
    assertTrue("col_ok not found", cores.contains("col_ok"));
    assertTrue("col_bad not found", cores.contains("col_bad"));

    // check that we have the failures we expect
    failures = cc.getCoreInitFailures();
    assertNotNull("core failures is a null map", failures);
    assertEquals("wrong number of core failures", 1, failures.size());
    fail = failures.get("bogus");
    assertNotNull("null failure for test core", fail);
    assertTrue(
        "init failure doesn't mention problem: " + fail.getCause().getMessage(),
        0 < fail.getCause().getMessage().indexOf("bogus_path"));

    // check that we get null accessing a non-existent core
    assertNull(cc.getCore("does_not_exist"));
    // check that we get a 500 accessing the core with an init failure
    try {
      SolrCore c = cc.getCore("bogus");
      fail("Failed to get Exception on accessing core with init failure");
    } catch (SolrException ex) {
      assertEquals(500, ex.code());
      // double wrapped
      String cause = ex.getCause().getCause().getMessage();
      assertTrue(
          "getCore() ex cause doesn't mention init fail: " + cause,
          0 < cause.indexOf("bogus_path"));
    }

    // -----
    // register bogus as an alias for col_ok and confirm failure goes away
    cc.register("bogus", cc.getCore("col_ok"), false);

    // check that we have the cores we expect
    cores = cc.getCoreNames();
    assertNotNull("core names is null", cores);
    assertEquals("wrong number of cores", 3, cores.size());
    assertTrue("col_ok not found", cores.contains("col_ok"));
    assertTrue("col_bad not found", cores.contains("col_bad"));
    assertTrue("bogus not found", cores.contains("bogus"));

    // check that we have the failures we expect
    failures = cc.getCoreInitFailures();
    assertNotNull("core failures is a null map", failures);
    assertEquals("wrong number of core failures", 0, failures.size());

    // -----
    // break col_bad's config and try to RELOAD to add failure

    final long col_bad_old_start = getCoreStartTime(cc, "col_bad");

    FileUtils.write(
        FileUtils.getFile(solrHome, "col_bad", "conf", "solrconfig.xml"),
        "This is giberish, not valid XML <",
        IOUtils.CHARSET_UTF_8.toString());

    try {
      ignoreException(Pattern.quote("SAX"));
      cc.reload("col_bad");
      fail("corrupt solrconfig.xml failed to trigger exception from reload");
    } catch (SolrException e) {
      assertTrue(
          "We're supposed to have a wrapped SAXParserException here, but we don't",
          e.getCause() instanceof SAXParseException);
      SAXParseException se = (SAXParseException) e.getCause();
      assertTrue(
          "reload exception doesn't refer to slrconfig.xml " + se.getSystemId(),
          0 < se.getSystemId().indexOf("solrconfig.xml"));
    }

    assertEquals(
        "Failed core reload should not have changed start time",
        col_bad_old_start,
        getCoreStartTime(cc, "col_bad"));

    // check that we have the cores we expect
    cores = cc.getCoreNames();
    assertNotNull("core names is null", cores);
    assertEquals("wrong number of cores", 3, cores.size());
    assertTrue("col_ok not found", cores.contains("col_ok"));
    assertTrue("col_bad not found", cores.contains("col_bad"));
    assertTrue("bogus not found", cores.contains("bogus"));

    // check that we have the failures we expect
    failures = cc.getCoreInitFailures();
    assertNotNull("core failures is a null map", failures);
    assertEquals("wrong number of core failures", 1, failures.size());
    fail = failures.get("col_bad");
    assertNotNull("null failure for test core", fail);
    assertTrue("init failure isn't SAXParseException", fail instanceof SAXParseException);
    assertTrue(
        "init failure doesn't mention problem: " + fail.toString(),
        0 < ((SAXParseException) fail).getSystemId().indexOf("solrconfig.xml"));

    // ----
    // fix col_bad's config (again) and RELOAD to fix failure
    FileUtils.copyFile(
        getFile("solr/collection1/conf/solrconfig-basic.xml"),
        FileUtils.getFile(solrHome, "col_bad", "conf", "solrconfig.xml"));
    cc.reload("col_bad");

    assertTrue(
        "Core reload should have changed start time",
        col_bad_old_start < getCoreStartTime(cc, "col_bad"));

    // check that we have the cores we expect
    cores = cc.getCoreNames();
    assertNotNull("core names is null", cores);
    assertEquals("wrong number of cores", 3, cores.size());
    assertTrue("col_ok not found", cores.contains("col_ok"));
    assertTrue("col_bad not found", cores.contains("col_bad"));
    assertTrue("bogus not found", cores.contains("bogus"));

    // check that we have the failures we expect
    failures = cc.getCoreInitFailures();
    assertNotNull("core failures is a null map", failures);
    assertEquals("wrong number of core failures", 0, failures.size());
  }
Exemple #29
0
  public String _format(LoggingEvent event) {
    String message = (String) event.getMessage();
    if (message == null) {
      message = "";
    }
    StringBuilder sb = new StringBuilder(message.length() + 80);

    long now = event.timeStamp;
    long timeFromStart = now - startTime;
    long timeSinceLast = now - lastTime;
    lastTime = now;
    String shortClassName =
        getShortClassName(
            event.getLocationInformation().getClassName(),
            event.getLocationInformation().getMethodName());

    /**
     * * sb.append(timeFromStart).append(' ').append(timeSinceLast); sb.append(' ');
     * sb.append(record.getSourceClassName()).append('.').append( record.getSourceMethodName());
     * sb.append(' '); sb.append(record.getLevel()); *
     */
    SolrRequestInfo requestInfo = SolrRequestInfo.getRequestInfo();
    SolrQueryRequest req = requestInfo == null ? null : requestInfo.getReq();
    SolrCore core = req == null ? null : req.getCore();
    ZkController zkController = null;
    CoreInfo info = null;

    if (core != null) {
      info = coreInfoMap.get(core.hashCode());
      if (info == null) {
        info = new CoreInfo();
        info.shortId = "C" + Integer.toString(CoreInfo.maxCoreNum++);
        coreInfoMap.put(core.hashCode(), info);

        if (sb.length() == 0) sb.append("ASYNC ");
        sb.append(" NEW_CORE " + info.shortId);
        sb.append(" name=" + core.getName());
        sb.append(" " + core);
      }

      zkController = core.getCoreDescriptor().getCoreContainer().getZkController();
      if (zkController != null) {
        if (info.url == null) {
          info.url = zkController.getBaseUrl() + "/" + core.getName();
          sb.append(" url=" + info.url + " node=" + zkController.getNodeName());
        }

        Map<String, Object> coreProps = getReplicaProps(zkController, core);
        if (info.coreProps == null || !coreProps.equals(info.coreProps)) {
          info.coreProps = coreProps;
          final String corePropsString =
              "coll:"
                  + core.getCoreDescriptor().getCloudDescriptor().getCollectionName()
                  + " core:"
                  + core.getName()
                  + " props:"
                  + coreProps;
          sb.append(" " + info.shortId + "_STATE=" + corePropsString);
        }
      }
    }

    if (sb.length() > 0) sb.append('\n');
    sb.append(timeFromStart);

    // sb.append("\nL").append(record.getSequenceNumber()); // log number is
    // useful for sequencing when looking at multiple parts of a log file, but
    // ms since start should be fine.
    appendThread(sb, event);

    appendMDC(sb);

    // todo: should be able to get port from core container for non zk tests

    if (info != null) {
      sb.append(' ').append(info.shortId); // core
    }

    if (shortClassName.length() > 0) {
      sb.append(' ').append(shortClassName);
    }

    if (event.getLevel() != Level.INFO) {
      sb.append(' ').append(event.getLevel());
    }

    sb.append(' ');
    appendMultiLineString(sb, message);
    ThrowableInformation thInfo = event.getThrowableInformation();
    if (thInfo != null) {
      Throwable th = event.getThrowableInformation().getThrowable();
      if (th != null) {
        sb.append(' ');
        String err = SolrException.toStr(th);
        String ignoredMsg = SolrException.doIgnore(th, err);
        if (ignoredMsg != null) {
          sb.append(ignoredMsg);
        } else {
          sb.append(err);
        }
      }
    }

    sb.append('\n');

    /**
     * * Isn't core specific... prob better logged from zkController if (info != null) {
     * ClusterState clusterState = zkController.getClusterState(); if (info.clusterState !=
     * clusterState) { // something has changed in the matrix... sb.append(zkController.getBaseUrl()
     * + " sees new ClusterState:"); } } *
     */
    return sb.toString();
  }
 private static boolean shouldRetry(Exception exc) {
   Throwable rootCause = SolrException.getRootCause(exc);
   return (rootCause instanceof ConnectException || rootCause instanceof SocketException);
 }