@Before public void setUp() throws Exception { String zpath = System.getProperty("java.io.tmpdir") + "/ZeppelinTest_" + System.currentTimeMillis(); zeppelinDir = new File(zpath); zeppelinDir.mkdirs(); new File(zeppelinDir, "conf").mkdirs(); notebooksDir = Joiner.on(File.separator).join(zpath, "notebook"); File notebookDir = new File(notebooksDir); notebookDir.mkdirs(); String testNoteDir = Joiner.on(File.separator).join(notebooksDir, TEST_NOTE_ID); FileUtils.copyDirectory( new File(Joiner.on(File.separator).join("src", "test", "resources", TEST_NOTE_ID)), new File(testNoteDir)); System.setProperty(ConfVars.ZEPPELIN_HOME.getVarName(), zeppelinDir.getAbsolutePath()); System.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_DIR.getVarName(), notebookDir.getAbsolutePath()); System.setProperty( ConfVars.ZEPPELIN_INTERPRETERS.getVarName(), "org.apache.zeppelin.interpreter.mock.MockInterpreter1,org.apache.zeppelin.interpreter.mock.MockInterpreter2"); System.setProperty( ConfVars.ZEPPELIN_NOTEBOOK_STORAGE.getVarName(), "org.apache.zeppelin.notebook.repo.GitNotebookRepo"); MockInterpreter1.register("mock1", "org.apache.zeppelin.interpreter.mock.MockInterpreter1"); MockInterpreter2.register("mock2", "org.apache.zeppelin.interpreter.mock.MockInterpreter2"); conf = ZeppelinConfiguration.create(); }
/** * process a sample with equal content buckets to see if the partitioning of the stream works well */ public void testSP_small_sample_with_proportional_huge_buffersize() throws IOException { int sampleSize = 5; // 5 * 20 == 100 int bufferSize = 100; final String str = new StringBuilder() .append(StringUtils.repeat("A", 20)) .append(StringUtils.repeat("B", 20)) .append(StringUtils.repeat("C", 20)) .append(StringUtils.repeat("D", 20)) .append(StringUtils.repeat("E", 20)) .toString(); OutputStream out = new ByteArrayOutputStream(); TestableRGStreamProcessor rgsp = new TestableRGStreamProcessor(sampleSize, out, bufferSize); assertEquals( "read size of `process` must match the input size", str.length(), rgsp.process(bufferedString(str))); assertEquals("ABCDE", out.toString()); // each bucket contains only the same char as we repeated the string in the exact matches size Map<Integer, List<Character>> buckets = rgsp.getBuckets(); assertTrue(Joiner.on("").join(buckets.get(0)).matches("^A+$")); assertTrue(Joiner.on("").join(buckets.get(1)).matches("^B+$")); assertTrue(Joiner.on("").join(buckets.get(2)).matches("^C+$")); assertTrue(Joiner.on("").join(buckets.get(3)).matches("^D+$")); assertTrue(Joiner.on("").join(buckets.get(4)).matches("^E+$")); // here we have less bucket content, cause our buffersize the maximum possible assertEquals(1, buckets.get(0).size()); }
/** check that the modulo parts are picked up correctly */ public void testSP_bigger_buffer_unproportinal_size() throws IOException { int sampleSize = 5; int bufferSize = 8; final String str = "AABBCCDE"; OutputStream out = new ByteArrayOutputStream(); TestableRGStreamProcessor rgsp = new TestableRGStreamProcessor(sampleSize, out, bufferSize); assertEquals( "read size of `process` must match the input size", str.length(), rgsp.process(bufferedString(str))); assertEquals("ABCDE", out.toString()); // each bucket contains only the same char as we repeated the string in the exact matches size Map<Integer, List<Character>> buckets = rgsp.getBuckets(); assertTrue(Joiner.on("").join(buckets.get(0)).matches("^A+$")); assertTrue(Joiner.on("").join(buckets.get(1)).matches("^B+$")); assertTrue(Joiner.on("").join(buckets.get(2)).matches("^C+$")); assertTrue(Joiner.on("").join(buckets.get(3)).matches("^D+$")); assertTrue(Joiner.on("").join(buckets.get(4)).matches("^E+$")); // here we have a bigger bucket content, cause our buffersize was small assertEquals(1, buckets.get(0).size()); assertEquals(1, buckets.get(1).size()); assertEquals(1, buckets.get(2).size()); assertEquals(1, buckets.get(3).size()); assertEquals(1, buckets.get(4).size()); }
protected String format( ELEMENT element, Iterable<ELEMENT> children, String separator, boolean needWrap, boolean needParenthesis, int maxChildren) { List<String> childStrs2 = Lists.newArrayList(); int width2 = 0; for (ELEMENT child : children) { String childStr = format(child, true); childStrs2.add(childStr); width2 += childStr.length(); if (childStr.contains("\n")) needWrap = true; } if (childStrs2.size() > maxChildren) needWrap = true; if (width2 > AUTO_WRAP_CHARS) needWrap = true; String cardinality = getCardinality(element); if (needWrap) { for (int i = 0; i < childStrs2.size(); i++) childStrs2.set(i, childStrs2.get(i).replaceAll("\\n", "\n" + INDENT)); String body = Joiner.on(separator + "\n" + INDENT).join(childStrs2); return "(\n" + INDENT + body + "\n)" + (cardinality == null ? "" : cardinality); } else { if (cardinality != null && childStrs2.size() > 1) needParenthesis = true; String body = Joiner.on(separator).join(childStrs2); if (needParenthesis) return "(" + body + ")" + (cardinality == null ? "" : cardinality); return body; } }
public static ModifyServiceResponseType modifyService(final ModifyServiceType request) throws Exception { final ModifyServiceResponseType reply = request.getReply(); try { if (NamedTransition.INSTANCE.apply(request)) { reply.markWinning(); } else { Component.State nextState = Component.State.valueOf(request.getState().toUpperCase()); ServiceConfiguration config = findService(request.getName()); Topology.transition(nextState).apply(config).get(); reply.markWinning(); } } catch (Exception ex) { Exceptions.maybeInterrupted(ex); throw new EucalyptusCloudException( "Failed to execute request transition: " + request.getState() + "\nDue to:\n" + Throwables.getRootCause(ex).getMessage() + "\nPossible arguments are: \n" + "TRANSITIONS\n\t" + Joiner.on("\n\t").join(Topology.Transitions.values()) + "STATES\n\t" + Joiner.on("\n\t").join(Component.State.values()), ex); } return reply; }
private static void addElement( List<String> lines, Patch patch, Map<String, RevFeatureType> featureTypes) throws IOException { String[] headerTokens = lines.get(0).split("\t"); if (headerTokens.length == 4 || headerTokens.length == 3) { // feature or feature type // modified // modification if (lines.size() == 1) { // feature type FeatureTypeDiff diff = new FeatureTypeDiff( headerTokens[0], ObjectId.valueOf(headerTokens[1]), ObjectId.valueOf(headerTokens[2])); patch.addAlteredTree(diff); } else { // feature String element = Joiner.on("\n").join(lines.subList(1, lines.size())); ByteArrayInputStream stream; stream = new ByteArrayInputStream(element.getBytes(Charsets.UTF_8)); String operation = headerTokens[0].trim(); if (operation.equals("M")) { String fullPath = headerTokens[1].trim(); String oldMetadataId = headerTokens[2].trim(); String newMetadataId = headerTokens[3].trim(); RevFeatureType newRevFeatureType = featureTypes.get(newMetadataId); RevFeatureType oldRevFeatureType = featureTypes.get(oldMetadataId); Map<PropertyDescriptor, AttributeDiff> map = Maps.newHashMap(); for (int i = 1; i < lines.size(); i++) { addDifference(lines.get(i), map, oldRevFeatureType, newRevFeatureType); } FeatureDiff featureDiff = new FeatureDiff(fullPath, map, oldRevFeatureType, newRevFeatureType); patch.addModifiedFeature(featureDiff); } else if (operation.equals("A") || operation.equals("R")) { String fullPath = headerTokens[1].trim(); String featureTypeId = headerTokens[2].trim(); RevFeatureType revFeatureType; revFeatureType = featureTypes.get(featureTypeId); FeatureBuilder featureBuilder = new FeatureBuilder(revFeatureType); RevFeature revFeature = (RevFeature) serializer.read(null, stream); Feature feature = featureBuilder.build(NodeRef.nodeFromPath(fullPath), revFeature); if (operation.equals("R")) { patch.addRemovedFeature(fullPath, feature, revFeatureType); } else { patch.addAddedFeature(fullPath, feature, revFeatureType); } } else { throw new IllegalArgumentException("Wrong patch content: " + lines.get(0)); } } } else if (headerTokens.length == 1) { // feature type definition String element = Joiner.on("\n").join(lines); ByteArrayInputStream stream = new ByteArrayInputStream(element.getBytes(Charsets.UTF_8)); String[] tokens = lines.get(1).split("\t"); RevFeatureType featureType = (RevFeatureType) serializer.read(null, stream); featureTypes.put(featureType.getId().toString(), featureType); } else { throw new IllegalArgumentException("Wrong patch content: " + lines.get(0)); } }
@Test public void plagiarismCheckeServiceCheckTest() throws IOException, ParseException { String indexRoot = "index"; List<ContentAnalyzerType> contentAnalizersList = Lists.newArrayList( ContentAnalyzerType.SimpleContentAnalizerWithSimpleTokenizer, ContentAnalyzerType.BagOfWordsContentAnalizerWithOpenNLPTokenizer); IndexBuilderTest.setupIndex(indexRoot, contentAnalizersList); String[] args = { "--articleRepositoryFolders", Joiner.on(',').join(ArticleRepositoryTestUtil.FOLDERS), "--contentAnalyzers", Joiner.on(',').join(contentAnalizersList), "--indexPaths", indexRoot }; PlagiarismCheckerService.setupContext(args); PlagiarismCheckerService plagiarismCheckeService = new PlagiarismCheckerService(); for (int i = 0; i < ArticleRepositoryTestUtil.ARTICLES.length; ++i) { for (int j = 0; j < ArticleRepositoryTestUtil.ARTICLES[i].length; ++j) { Assert.assertEquals( Lists.newArrayList( new CheckResult( i, j, ArticleRepositoryTestUtil.ARTICLES[i][j], contentAnalizersList)), Lists.newArrayList( plagiarismCheckeService.check(ArticleRepositoryTestUtil.ARTICLES[i][j]))); } } IndexBuilderTest.deleteIndex(indexRoot, contentAnalizersList); }
private Collection<Entry> parseValue(String key, Object value) { if (value instanceof Boolean) { return Collections.singleton(Entry.create(key, value)); } else if (value instanceof Number) { return Collections.singleton(Entry.create(key, value)); } else if (value instanceof String) { return Collections.singleton(Entry.create(key, value)); } else if (value instanceof Map) { if (flatten) { return Collections.singleton( Entry.create( key, Joiner.on(listSeparator).withKeyValueSeparator(kvSeparator).join((Map) value))); } else { @SuppressWarnings("unchecked") final Map<String, Object> map = (Map<String, Object>) value; final List<Entry> result = new ArrayList<>(); for (Map.Entry<String, Object> entry : map.entrySet()) { result.addAll(parseValue(key + keySeparator + entry.getKey(), entry.getValue())); } return result; } } else if (value instanceof List) { final List values = (List) value; return Collections.singleton(Entry.create(key, Joiner.on(listSeparator).join(values))); } else if (value == null) { return Collections.singleton(Entry.create(key, "null")); } else { LOG.debug("Unknown type \"{}\" in key \"{}\"", value.getClass(), key); return Collections.emptySet(); } }
private void compileProtos() throws MojoExecutionException { List<String> args = Lists.newArrayList(); args.add("--proto_path=" + protoSourceDirectory); args.add("--java_out=" + generatedSourceDirectory); if (noOptions) { args.add("--no_options"); } if (enumOptions != null && enumOptions.length > 0) { args.add("--enum_options=" + Joiner.on(',').join(enumOptions)); } if (registryClass != null) { args.add("--registry_class=" + registryClass); } if (roots != null && roots.length > 0) { args.add("--roots=" + Joiner.on(',').join(roots)); } Collections.addAll(args, protoFiles); getLog().info("Invoking wire compiler with arguments:"); getLog().info(Joiner.on('\n').join(args)); try { // TODO(shawn) we don't have a great programatic interface to the compiler. // Not all exceptions should result in MojoFailureExceptions (i.e. bugs in this plugin that // invoke the compiler incorrectly). WireCompiler.main(args.toArray(new String[args.size()])); // Add the directory into which generated sources are placed as a compiled source root. project.addCompileSourceRoot(generatedSourceDirectory); } catch (Exception e) { throw new MojoExecutionException("Wire Plugin: Failure compiling proto sources.", e); } }
/** {@inheritDoc} */ @Override public void onMatching( String docUri, Type type, SortedSet<Annotation> goldAnnos, SortedSet<Annotation> sysAnnos) { if (goldAnnos.size() == 1 && sysAnnos.size() == 1) { Annotation goldAnno = goldAnnos.iterator().next(); Annotation sysAnno = sysAnnos.iterator().next(); if (goldAnno.getBegin() == sysAnno.getBegin() && goldAnno.getEnd() == sysAnno.getEnd()) { printRow( type.getShortName(), "Exact", goldAnno.getCoveredText(), String.valueOf(goldAnno.getBegin()), sysAnno.getCoveredText(), String.valueOf(sysAnno.getBegin()), docUri); return; } } printRow( type.getShortName(), "Partial", Joiner.on(" /// ").join(transform(goldAnnos, annoToTxt)), Joiner.on(", ").join(transform(goldAnnos, annoToOffset)), Joiner.on(" /// ").join(transform(sysAnnos, annoToTxt)), Joiner.on(", ").join(transform(sysAnnos, annoToOffset)), docUri); }
/** Find all {@code .thrift} files in the given directory. */ private ImmutableSet<File> findThriftFilesInDirectory(File directory) throws IOException { checkNotNull(directory); checkArgument(directory.isDirectory(), "%s is not a directory", directory); List<File> thriftFilesInDirectory = getFiles(directory, Joiner.on(",").join(includes), Joiner.on(",").join(excludes)); return ImmutableSet.copyOf(thriftFilesInDirectory); }
/** * Asserts that when compiling with the given compiler options, {@code original} is transformed * into {@code compiled}. If {@code warning} is non-null, we will also check if the given warning * type was emitted. */ private void test(String[] original, String[] compiled, DiagnosticType warning) { Compiler compiler = compile(original); if (warning == null) { assertEquals( "Expected no warnings or errors\n" + "Errors: \n" + Joiner.on("\n").join(compiler.getErrors()) + "Warnings: \n" + Joiner.on("\n").join(compiler.getWarnings()), 0, compiler.getErrors().length + compiler.getWarnings().length); } else { assertEquals(1, compiler.getWarnings().length); assertEquals(warning, compiler.getWarnings()[0].getType()); } Node root = compiler.getRoot().getLastChild(); if (useStringComparison) { assertEquals(Joiner.on("").join(compiled), compiler.toSource()); } else { Node expectedRoot = parse(compiled); String explanation = expectedRoot.checkTreeEquals(root); assertNull( "\nExpected: " + compiler.toSource(expectedRoot) + "\nResult: " + compiler.toSource(root) + "\n" + explanation, explanation); } }
@Override public String toSql() { StringBuilder strBuilder = new StringBuilder(); if (withClause_ != null) strBuilder.append(withClause_.toSql() + " "); strBuilder.append("INSERT "); if (overwrite_) { strBuilder.append("OVERWRITE "); } else { strBuilder.append("INTO "); } strBuilder.append("TABLE " + originalTableName_); if (columnPermutation_ != null) { strBuilder.append("("); strBuilder.append(Joiner.on(", ").join(columnPermutation_)); strBuilder.append(")"); } if (partitionKeyValues_ != null) { List<String> values = Lists.newArrayList(); for (PartitionKeyValue pkv : partitionKeyValues_) { values.add( pkv.getColName() + (pkv.getValue() != null ? ("=" + pkv.getValue().toSql()) : "")); } strBuilder.append(" PARTITION (" + Joiner.on(", ").join(values) + ")"); } if (planHints_ != null) { strBuilder.append(" " + ToSqlUtils.getPlanHintsSql(planHints_)); } if (!needsGeneratedQueryStatement_) { strBuilder.append(" " + queryStmt_.toSql()); } return strBuilder.toString(); }
private String summarize(RevWalk rw, List<CodeReviewCommit> merged) throws IOException { if (merged.size() == 1) { CodeReviewCommit c = merged.get(0); rw.parseBody(c); return String.format("Merge \"%s\"", c.getShortMessage()); } LinkedHashSet<String> topics = new LinkedHashSet<>(4); for (CodeReviewCommit c : merged) { if (!Strings.isNullOrEmpty(c.change().getTopic())) { topics.add(c.change().getTopic()); } } if (topics.size() == 1) { return String.format("Merge changes from topic '%s'", Iterables.getFirst(topics, null)); } else if (topics.size() > 1) { return String.format("Merge changes from topics '%s'", Joiner.on("', '").join(topics)); } else { return String.format( "Merge changes %s%s", Joiner.on(',') .join( Iterables.transform( Iterables.limit(merged, 5), new Function<CodeReviewCommit, String>() { @Override public String apply(CodeReviewCommit in) { return in.change().getKey().abbreviate(); } })), merged.size() > 5 ? ", ..." : ""); } }
private static int fingerprint( final NetworkInfoSource source, final List<com.eucalyptus.cluster.Cluster> clusters, final Set<String> dirtyPublicAddresses, final String networkConfiguration) { final HashFunction hashFunction = goodFastHash(32); final Hasher hasher = hashFunction.newHasher(); final Funnel<VersionedNetworkView> versionedItemFunnel = new Funnel<VersionedNetworkView>() { @Override public void funnel(final VersionedNetworkView o, final PrimitiveSink primitiveSink) { primitiveSink.putString(o.getId(), StandardCharsets.UTF_8); primitiveSink.putChar('='); primitiveSink.putInt(o.getVersion()); } }; for (final Map.Entry<String, Iterable<? extends VersionedNetworkView>> entry : source.getView().entrySet()) { hasher.putString(entry.getKey(), StandardCharsets.UTF_8); for (final VersionedNetworkView item : entry.getValue()) { hasher.putObject(item, versionedItemFunnel); } } hasher.putString( Joiner.on(',').join(Sets.newTreeSet(Iterables.transform(clusters, HasName.GET_NAME))), StandardCharsets.UTF_8); hasher.putString( Joiner.on(',').join(Sets.newTreeSet(dirtyPublicAddresses)), StandardCharsets.UTF_8); hasher.putInt(networkConfiguration.hashCode()); return hasher.hash().asInt(); }
@Test public void testTable() { JavaFileObject source = JavaFileObjects.forSourceString( "test.Test", Joiner.on('\n') .join( "package test;", "import Table;", "import Key;", "@Table", "public class Test {", " @Key long id;", " String name;", "}")); JavaFileObject expectedSource = JavaFileObjects.forSourceString( "test/Test$$ViewBinder", Joiner.on('\n') .join( "// Generated code from Cying-ORM. Do not modify!", "package test;", "import android.content.ContentValues;", "import android.database.Cursor;", "import BaseDao;", "public class Test$$Dao extends BaseDao<Test> {", " private static String SQL=\"CREATE TABLE test (id INTEGER PRIMARY KEY AUTOINCREMENT,name TEXT);\"", " static {", " saveSQL(SQL);", " }", " @Override protected Test cursorToEntity(Cursor cursor) {", " Test entity=new Test();", " entity.id=cursor.getLong(cursor.getColumnIndex(\"id\"));", " entity.name=cursor.getString(cursor.getColumnIndex(\"name\"));", " return entity;", " }", " @Override protected ContentValues entityToValues(Test entity) {", " ContentValues values=new ContentValues();", " values.put(\"name\",entity.name)", " return values;", " }", " @Override public String getTableName() {", " return \"test\";", " }", " @Override public String getTableSQL() { return SQL; }", " @Override public String getIdentityName() {", " return \"id\";", " }", " @Override public long getIdentity(Test entity) {", " return entity.id;", " }", "}")); Truth.assertAbout(javaSource()) .that(source) .processedWith(new ORMProcessor()) .compilesWithoutError() .and() .generatesSources(expectedSource); }
@Test public void testNetworkNodeUsingYaml() throws BackendException, InterruptedException { ElasticsearchRunner esr = new ElasticsearchRunner(".", "networkNodeUsingYaml.yml"); esr.start(); ModifiableConfiguration config = GraphDatabaseConfiguration.buildGraphConfiguration(); config.set(INTERFACE, ElasticSearchSetup.NODE.toString(), INDEX_NAME); config.set( INDEX_CONF_FILE, Joiner.on(File.separator).join("target", "test-classes", "es_cfg_nodeclient.yml"), INDEX_NAME); Configuration indexConfig = config.restrictTo(INDEX_NAME); IndexProvider idx = new ElasticSearchIndex(indexConfig); simpleWriteAndQuery(idx); idx.close(); config = GraphDatabaseConfiguration.buildGraphConfiguration(); config.set(INTERFACE, ElasticSearchSetup.NODE.toString(), INDEX_NAME); config.set(HEALTH_REQUEST_TIMEOUT, "5s", INDEX_NAME); config.set( INDEX_CONF_FILE, Joiner.on(File.separator).join("target", "test-classes", "es_cfg_bogus_nodeclient.yml"), INDEX_NAME); indexConfig = config.restrictTo(INDEX_NAME); Throwable failure = null; try { idx = new ElasticSearchIndex(indexConfig); } catch (Throwable t) { failure = t; } // idx.close(); Assert.assertNotNull("ES client failed to throw exception on connection failure", failure); esr.stop(); }
@Override public String toString() { try { return toStringHelper(this) .add( "Event types:", Joiner.on(',') .join( Iterables.transform( getTypes(), new Function<Integer, String>() { @Override public String apply(final Integer type) { return EventType.valueOf(type).getName(); } }))) .add("Event properties:", Joiner.on(',').join(eventProperties)) .add("Path:", getPath()) .add("Date: ", getDate()) .add("Info:", getInfo()) .toString(); } catch (final RepositoryException e) { throw propagate(e); } }
@Test public void testOverride() throws IOException { Reader readerA = new StringReader(Joiner.on('\n').join("[cache]", " mode = dir,cassandra")); Reader readerB = new StringReader(Joiner.on('\n').join("[cache]", " mode =")); // Verify that no exception is thrown when a definition is overridden. BuckConfig.createFromReaders(ImmutableList.of(readerA, readerB)); }
private void changeMetadata(final String content, final Upload upload) { final Map<String, Object> params = new HashMap<>(METADATA_PARAMS_SIZE); params.putAll(getMetadataSocial(upload)); params.putAll(getMetadataMonetization(content, upload)); params.putAll(getMetadataMetadata(upload)); params.putAll(getMetadataPermissions(upload)); System.out.println(Joiner.on(MODIFIED_SEPERATOR).skipNulls().join(params.keySet())); params.put("modified_fields", Joiner.on(MODIFIED_SEPERATOR).skipNulls().join(params.keySet())); params.put("creator_share_feeds", "yes"); final String token = extractor(content, "var session_token = \"", "\""); params.put("session_token", token); params.put("action_edit_video", "1"); try { final HttpResponse<String> response = Unirest.post( String.format( "https://www.youtube.com/metadata_ajax?video_id=%s", upload.getVideoid())) .fields(params) .asString(); LOGGER.info(response.getBody()); } catch (final Exception e) { LOGGER.warn("Metadata not set", e); } }
@Test public void methodWithMultipleIds() { JavaFileObject source = JavaFileObjects.forSourceString( "test.Test", Joiner.on('\n') .join( "package test;", "import android.app.Activity;", "import android.view.View;", "import butterknife.OnClick;", "public class Test extends Activity {", " @OnClick({1, 2, 3}) void click() {}", "}")); JavaFileObject expectedSource = JavaFileObjects.forSourceString( "test/Test$$ViewInjector", Joiner.on('\n') .join( "package test;", "import android.view.View;", "import butterknife.ButterKnife.Finder;", "import butterknife.ButterKnife.Injector;", "public class Test$$ViewInjector<T extends test.Test> implements Injector<T> {", " @Override public void inject(final Finder finder, final T target, Object source) {", " View view;", " view = finder.findRequiredView(source, 1, \"method 'click'\");", " view.setOnClickListener(new butterknife.internal.DebouncingOnClickListener() {", " @Override public void doClick(android.view.View p0) {", " target.click();", " }", " });", " view = finder.findRequiredView(source, 2, \"method 'click'\");", " view.setOnClickListener(new butterknife.internal.DebouncingOnClickListener() {", " @Override public void doClick(android.view.View p0) {", " target.click();", " }", " });", " view = finder.findRequiredView(source, 3, \"method 'click'\");", " view.setOnClickListener(new butterknife.internal.DebouncingOnClickListener() {", " @Override public void doClick(android.view.View p0) {", " target.click();", " }", " });", " }", " @Override public void reset(T target) {", " }", "}")); ASSERT .about(javaSource()) .that(source) .processedWith(butterknifeProcessors()) .compilesWithoutError() .and() .generatesSources(expectedSource); }
private String streetAddress(Object name, Object descriptiveNumber, Object orientationNumber) { return Joiner.on(" ") .skipNulls() .join( asStringOrNull(name), Joiner.on("/") .skipNulls() .join(asStringOrNull(descriptiveNumber), asStringOrNull(orientationNumber))); }
private String buildCreateCommand() { StringBuilder result = new StringBuilder(CCM_COMMAND + " create"); result.append(" " + clusterName); result.append(" -i" + IP_PREFIX); result.append(" " + cassandraInstallArgs); if (nodes.length > 0) result.append(" -n " + Joiner.on(":").join(nodes)); if (startOptions.length > 0) result.append(" " + Joiner.on(" ").join(startOptions)); return result.toString(); }
@Override public String call() throws IOException { if (host != null) { InetAddress address = InetAddress.getByName(host); return Joiner.on('\n').join(_exportFile.exportsFor(address)); } else { return Joiner.on('\n').join(_exportFile.getExports()); } }
/** * MarkdownTxtmark can detect plain text links and produce HTML with links wrapped correctly. * * @throws Exception If there is some problem inside */ @Test @SuppressWarnings("PMD.AvoidInstantiatingObjectsInLoops") public void detectsLinks() throws Exception { final String[][] texts = { new String[] { "<a href=\"http://_google_.com\">g</a>", "<p><a href=\"http://_google_.com\">g</a></p>", }, new String[] { "http://foo.com", "<p><a href=\"http://foo.com\">http://foo.com</a></p>", }, new String[] { "(http://foo?com)", "<p>(<a href=\"http://foo?com\">http://foo?com</a>)</p>", }, new String[] { "(http://foo#com)", "<p>(<a href=\"http://foo#com\">http://foo#com</a>)</p>", }, new String[] { "(https://a?b=c)", "<p>(<a href=\"https://a?b=c\">https://a?b=c</a>)</p>", }, new String[] { "[foo](http://foo)", "<p><a href=\"http://foo\">foo</a></p>", }, new String[] { "[http://bar.com](http://bar.com)", "<p><a href=\"http://bar.com\">http://bar.com</a></p>", }, new String[] { "[http://googl.com]", "<p>[<a href=\"http://googl.com\">http://googl.com</a>]</p>", }, new String[] { "[google](http://www.google.com)", "<p><a href=\"http://www.google.com\">google</a></p>", }, new String[] { Joiner.on(MarkdownTxtmarkTest.EOL) .join("http://yahoo.com", "http://bar.com [http://af.com](http://af.com) end"), Joiner.on(MarkdownTxtmarkTest.EOL) .join( // @checkstyle LineLengthCheck (2 lines) "<p><a href=\"http://yahoo.com\">http://yahoo.com</a><br />", "<a href=\"http://bar.com\">http://bar.com</a> <a href=\"http://af.com\">http://af.com</a> end</p>"), }, new String[] { "![logo] (http://img.qulice.com/logo.svg)", // @checkstyle LineLengthCheck (1 line) "<p><img src=\"http://img.qulice.com/logo.svg\" alt=\"logo\" /></p>", }, new String[] { "![logo](http://img.qulice.com/pict.svg)", // @checkstyle LineLengthCheck (1 line) "<p><img src=\"http://img.qulice.com/pict.svg\" alt=\"logo\" /></p>", }, }; for (final String[] pair : texts) { MatcherAssert.assertThat( new MarkdownTxtmark().html(pair[0]).trim(), Matchers.equalTo(pair[1])); } }
private static String printMethodCall( Class<?> clazz, String method, boolean isStatic, List<String> args) { if (isStatic) { return String.format( "%s.%s(%s)", clazz.getCanonicalName(), method, Joiner.on(',').join(args)); } else { return String.format( "%s.%s(%s)", args.get(0), method, Joiner.on(',').join(args.subList(1, args.size()))); } }
@Override public Void visitWindow(WindowNode node, Void context) { printNode( node, "Window", format( "partition by = %s|order by = %s", Joiner.on(", ").join(node.getPartitionBy()), Joiner.on(", ").join(node.getOrderBy())), NODE_COLORS.get(NodeType.WINDOW)); return node.getSource().accept(this, context); }
@Override public void reportInvalidOptions(EventHandler reporter, BuildOptions buildOptions) { if (!Collections.disjoint(translationFlags, J2OBJC_BLACKLISTED_TRANSLATION_FLAGS)) { String errorMsg = String.format( INVALID_TRANSLATION_FLAGS_MSG_TEMPLATE, Joiner.on(",").join(translationFlags), Joiner.on(",").join(J2OBJC_BLACKLISTED_TRANSLATION_FLAGS)); reporter.handle(Event.error(errorMsg)); } }
@Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append(Joiner.on(", ").join(keys.values())); if (!columns.isEmpty()) sb.append(", ").append(Joiner.on(", ").join(columns.values())); sb.append(" => "); if (value != null) sb.append(value.name); if (!metadata.isEmpty()) sb.append("{").append(Joiner.on(", ").join(metadata.values())).append(" }"); return sb.toString(); }
PluginFilter(Settings settings, AnalysisMode mode) { if (settings.hasKey(CoreProperties.BATCH_INCLUDE_PLUGINS)) { whites.addAll(Arrays.asList(settings.getStringArray(CoreProperties.BATCH_INCLUDE_PLUGINS))); } if (settings.hasKey(CoreProperties.BATCH_EXCLUDE_PLUGINS)) { blacks.addAll(Arrays.asList(settings.getStringArray(CoreProperties.BATCH_EXCLUDE_PLUGINS))); } if (mode.isPreview()) { // These default values are not supported by Settings because the class CorePlugin // is not loaded yet. if (settings.hasKey(CoreProperties.DRY_RUN_INCLUDE_PLUGINS)) { LOG.warn( MessageFormat.format( PROPERTY_IS_DEPRECATED_MSG, CoreProperties.DRY_RUN_INCLUDE_PLUGINS, CoreProperties.PREVIEW_INCLUDE_PLUGINS)); whites.addAll( propertyValues( settings, CoreProperties.DRY_RUN_INCLUDE_PLUGINS, CoreProperties.PREVIEW_INCLUDE_PLUGINS_DEFAULT_VALUE)); } else { whites.addAll( propertyValues( settings, CoreProperties.PREVIEW_INCLUDE_PLUGINS, CoreProperties.PREVIEW_INCLUDE_PLUGINS_DEFAULT_VALUE)); } if (settings.hasKey(CoreProperties.DRY_RUN_EXCLUDE_PLUGINS)) { LOG.warn( MessageFormat.format( PROPERTY_IS_DEPRECATED_MSG, CoreProperties.DRY_RUN_EXCLUDE_PLUGINS, CoreProperties.PREVIEW_EXCLUDE_PLUGINS)); blacks.addAll( propertyValues( settings, CoreProperties.DRY_RUN_EXCLUDE_PLUGINS, CoreProperties.PREVIEW_EXCLUDE_PLUGINS_DEFAULT_VALUE)); } else { blacks.addAll( propertyValues( settings, CoreProperties.PREVIEW_EXCLUDE_PLUGINS, CoreProperties.PREVIEW_EXCLUDE_PLUGINS_DEFAULT_VALUE)); } } if (!whites.isEmpty()) { LOG.info("Include plugins: " + Joiner.on(", ").join(whites)); } if (!blacks.isEmpty()) { LOG.info("Exclude plugins: " + Joiner.on(", ").join(blacks)); } }