@Before public void setUp() throws Exception { String zpath = System.getProperty("java.io.tmpdir") + "/ZeppelinTest_" + System.currentTimeMillis(); zeppelinDir = new File(zpath); zeppelinDir.mkdirs(); new File(zeppelinDir, "conf").mkdirs(); notebooksDir = Joiner.on(File.separator).join(zpath, "notebook"); File notebookDir = new File(notebooksDir); notebookDir.mkdirs(); String testNoteDir = Joiner.on(File.separator).join(notebooksDir, TEST_NOTE_ID); FileUtils.copyDirectory( new File(Joiner.on(File.separator).join("src", "test", "resources", TEST_NOTE_ID)), new File(testNoteDir)); System.setProperty(ConfVars.ZEPPELIN_HOME.getVarName(), zeppelinDir.getAbsolutePath()); System.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_DIR.getVarName(), notebookDir.getAbsolutePath()); System.setProperty( ConfVars.ZEPPELIN_INTERPRETERS.getVarName(), "org.apache.zeppelin.interpreter.mock.MockInterpreter1,org.apache.zeppelin.interpreter.mock.MockInterpreter2"); System.setProperty( ConfVars.ZEPPELIN_NOTEBOOK_STORAGE.getVarName(), "org.apache.zeppelin.notebook.repo.GitNotebookRepo"); MockInterpreter1.register("mock1", "org.apache.zeppelin.interpreter.mock.MockInterpreter1"); MockInterpreter2.register("mock2", "org.apache.zeppelin.interpreter.mock.MockInterpreter2"); conf = ZeppelinConfiguration.create(); }
@Override public String toString() { try { return toStringHelper(this) .add( "Event types:", Joiner.on(',') .join( Iterables.transform( getTypes(), new Function<Integer, String>() { @Override public String apply(final Integer type) { return EventType.valueOf(type).getName(); } }))) .add("Event properties:", Joiner.on(',').join(eventProperties)) .add("Path:", getPath()) .add("Date: ", getDate()) .add("Info:", getInfo()) .toString(); } catch (final RepositoryException e) { throw propagate(e); } }
private Collection<Entry> parseValue(String key, Object value) { if (value instanceof Boolean) { return Collections.singleton(Entry.create(key, value)); } else if (value instanceof Number) { return Collections.singleton(Entry.create(key, value)); } else if (value instanceof String) { return Collections.singleton(Entry.create(key, value)); } else if (value instanceof Map) { if (flatten) { return Collections.singleton( Entry.create( key, Joiner.on(listSeparator).withKeyValueSeparator(kvSeparator).join((Map) value))); } else { @SuppressWarnings("unchecked") final Map<String, Object> map = (Map<String, Object>) value; final List<Entry> result = new ArrayList<>(); for (Map.Entry<String, Object> entry : map.entrySet()) { result.addAll(parseValue(key + keySeparator + entry.getKey(), entry.getValue())); } return result; } } else if (value instanceof List) { final List values = (List) value; return Collections.singleton(Entry.create(key, Joiner.on(listSeparator).join(values))); } else if (value == null) { return Collections.singleton(Entry.create(key, "null")); } else { LOG.debug("Unknown type \"{}\" in key \"{}\"", value.getClass(), key); return Collections.emptySet(); } }
/** * process a sample with equal content buckets to see if the partitioning of the stream works well */ public void testSP_small_sample_with_proportional_huge_buffersize() throws IOException { int sampleSize = 5; // 5 * 20 == 100 int bufferSize = 100; final String str = new StringBuilder() .append(StringUtils.repeat("A", 20)) .append(StringUtils.repeat("B", 20)) .append(StringUtils.repeat("C", 20)) .append(StringUtils.repeat("D", 20)) .append(StringUtils.repeat("E", 20)) .toString(); OutputStream out = new ByteArrayOutputStream(); TestableRGStreamProcessor rgsp = new TestableRGStreamProcessor(sampleSize, out, bufferSize); assertEquals( "read size of `process` must match the input size", str.length(), rgsp.process(bufferedString(str))); assertEquals("ABCDE", out.toString()); // each bucket contains only the same char as we repeated the string in the exact matches size Map<Integer, List<Character>> buckets = rgsp.getBuckets(); assertTrue(Joiner.on("").join(buckets.get(0)).matches("^A+$")); assertTrue(Joiner.on("").join(buckets.get(1)).matches("^B+$")); assertTrue(Joiner.on("").join(buckets.get(2)).matches("^C+$")); assertTrue(Joiner.on("").join(buckets.get(3)).matches("^D+$")); assertTrue(Joiner.on("").join(buckets.get(4)).matches("^E+$")); // here we have less bucket content, cause our buffersize the maximum possible assertEquals(1, buckets.get(0).size()); }
/** check that the modulo parts are picked up correctly */ public void testSP_bigger_buffer_unproportinal_size() throws IOException { int sampleSize = 5; int bufferSize = 8; final String str = "AABBCCDE"; OutputStream out = new ByteArrayOutputStream(); TestableRGStreamProcessor rgsp = new TestableRGStreamProcessor(sampleSize, out, bufferSize); assertEquals( "read size of `process` must match the input size", str.length(), rgsp.process(bufferedString(str))); assertEquals("ABCDE", out.toString()); // each bucket contains only the same char as we repeated the string in the exact matches size Map<Integer, List<Character>> buckets = rgsp.getBuckets(); assertTrue(Joiner.on("").join(buckets.get(0)).matches("^A+$")); assertTrue(Joiner.on("").join(buckets.get(1)).matches("^B+$")); assertTrue(Joiner.on("").join(buckets.get(2)).matches("^C+$")); assertTrue(Joiner.on("").join(buckets.get(3)).matches("^D+$")); assertTrue(Joiner.on("").join(buckets.get(4)).matches("^E+$")); // here we have a bigger bucket content, cause our buffersize was small assertEquals(1, buckets.get(0).size()); assertEquals(1, buckets.get(1).size()); assertEquals(1, buckets.get(2).size()); assertEquals(1, buckets.get(3).size()); assertEquals(1, buckets.get(4).size()); }
protected String format( ELEMENT element, Iterable<ELEMENT> children, String separator, boolean needWrap, boolean needParenthesis, int maxChildren) { List<String> childStrs2 = Lists.newArrayList(); int width2 = 0; for (ELEMENT child : children) { String childStr = format(child, true); childStrs2.add(childStr); width2 += childStr.length(); if (childStr.contains("\n")) needWrap = true; } if (childStrs2.size() > maxChildren) needWrap = true; if (width2 > AUTO_WRAP_CHARS) needWrap = true; String cardinality = getCardinality(element); if (needWrap) { for (int i = 0; i < childStrs2.size(); i++) childStrs2.set(i, childStrs2.get(i).replaceAll("\\n", "\n" + INDENT)); String body = Joiner.on(separator + "\n" + INDENT).join(childStrs2); return "(\n" + INDENT + body + "\n)" + (cardinality == null ? "" : cardinality); } else { if (cardinality != null && childStrs2.size() > 1) needParenthesis = true; String body = Joiner.on(separator).join(childStrs2); if (needParenthesis) return "(" + body + ")" + (cardinality == null ? "" : cardinality); return body; } }
public static ModifyServiceResponseType modifyService(final ModifyServiceType request) throws Exception { final ModifyServiceResponseType reply = request.getReply(); try { if (NamedTransition.INSTANCE.apply(request)) { reply.markWinning(); } else { Component.State nextState = Component.State.valueOf(request.getState().toUpperCase()); ServiceConfiguration config = findService(request.getName()); Topology.transition(nextState).apply(config).get(); reply.markWinning(); } } catch (Exception ex) { Exceptions.maybeInterrupted(ex); throw new EucalyptusCloudException( "Failed to execute request transition: " + request.getState() + "\nDue to:\n" + Throwables.getRootCause(ex).getMessage() + "\nPossible arguments are: \n" + "TRANSITIONS\n\t" + Joiner.on("\n\t").join(Topology.Transitions.values()) + "STATES\n\t" + Joiner.on("\n\t").join(Component.State.values()), ex); } return reply; }
private static void addElement( List<String> lines, Patch patch, Map<String, RevFeatureType> featureTypes) throws IOException { String[] headerTokens = lines.get(0).split("\t"); if (headerTokens.length == 4 || headerTokens.length == 3) { // feature or feature type // modified // modification if (lines.size() == 1) { // feature type FeatureTypeDiff diff = new FeatureTypeDiff( headerTokens[0], ObjectId.valueOf(headerTokens[1]), ObjectId.valueOf(headerTokens[2])); patch.addAlteredTree(diff); } else { // feature String element = Joiner.on("\n").join(lines.subList(1, lines.size())); ByteArrayInputStream stream; stream = new ByteArrayInputStream(element.getBytes(Charsets.UTF_8)); String operation = headerTokens[0].trim(); if (operation.equals("M")) { String fullPath = headerTokens[1].trim(); String oldMetadataId = headerTokens[2].trim(); String newMetadataId = headerTokens[3].trim(); RevFeatureType newRevFeatureType = featureTypes.get(newMetadataId); RevFeatureType oldRevFeatureType = featureTypes.get(oldMetadataId); Map<PropertyDescriptor, AttributeDiff> map = Maps.newHashMap(); for (int i = 1; i < lines.size(); i++) { addDifference(lines.get(i), map, oldRevFeatureType, newRevFeatureType); } FeatureDiff featureDiff = new FeatureDiff(fullPath, map, oldRevFeatureType, newRevFeatureType); patch.addModifiedFeature(featureDiff); } else if (operation.equals("A") || operation.equals("R")) { String fullPath = headerTokens[1].trim(); String featureTypeId = headerTokens[2].trim(); RevFeatureType revFeatureType; revFeatureType = featureTypes.get(featureTypeId); FeatureBuilder featureBuilder = new FeatureBuilder(revFeatureType); RevFeature revFeature = (RevFeature) serializer.read(null, stream); Feature feature = featureBuilder.build(NodeRef.nodeFromPath(fullPath), revFeature); if (operation.equals("R")) { patch.addRemovedFeature(fullPath, feature, revFeatureType); } else { patch.addAddedFeature(fullPath, feature, revFeatureType); } } else { throw new IllegalArgumentException("Wrong patch content: " + lines.get(0)); } } } else if (headerTokens.length == 1) { // feature type definition String element = Joiner.on("\n").join(lines); ByteArrayInputStream stream = new ByteArrayInputStream(element.getBytes(Charsets.UTF_8)); String[] tokens = lines.get(1).split("\t"); RevFeatureType featureType = (RevFeatureType) serializer.read(null, stream); featureTypes.put(featureType.getId().toString(), featureType); } else { throw new IllegalArgumentException("Wrong patch content: " + lines.get(0)); } }
protected static String getCreateStatement( String schemaName, String tableName, Map<String, String> offsetFields, Map<String, String> otherFields) { List<String> fieldFormats = new ArrayList<>(); for (Map.Entry<String, String> offsetFieldEntry : offsetFields.entrySet()) { fieldFormats.add(offsetFieldEntry.getKey() + " " + offsetFieldEntry.getValue()); } for (Map.Entry<String, String> otherFieldEntry : otherFields.entrySet()) { fieldFormats.add(otherFieldEntry.getKey() + " " + otherFieldEntry.getValue()); } if (!offsetFields.isEmpty()) { fieldFormats.add("PRIMARY KEY(" + COMMA_SPACE_JOINER.join(offsetFields.keySet()) + ")"); } String createQuery = String.format( CREATE_STATEMENT_TEMPLATE, schemaName, tableName, COMMA_SPACE_JOINER.join(fieldFormats)); LOG.info("Created Query : " + createQuery); return createQuery; }
@Test public void plagiarismCheckeServiceCheckTest() throws IOException, ParseException { String indexRoot = "index"; List<ContentAnalyzerType> contentAnalizersList = Lists.newArrayList( ContentAnalyzerType.SimpleContentAnalizerWithSimpleTokenizer, ContentAnalyzerType.BagOfWordsContentAnalizerWithOpenNLPTokenizer); IndexBuilderTest.setupIndex(indexRoot, contentAnalizersList); String[] args = { "--articleRepositoryFolders", Joiner.on(',').join(ArticleRepositoryTestUtil.FOLDERS), "--contentAnalyzers", Joiner.on(',').join(contentAnalizersList), "--indexPaths", indexRoot }; PlagiarismCheckerService.setupContext(args); PlagiarismCheckerService plagiarismCheckeService = new PlagiarismCheckerService(); for (int i = 0; i < ArticleRepositoryTestUtil.ARTICLES.length; ++i) { for (int j = 0; j < ArticleRepositoryTestUtil.ARTICLES[i].length; ++j) { Assert.assertEquals( Lists.newArrayList( new CheckResult( i, j, ArticleRepositoryTestUtil.ARTICLES[i][j], contentAnalizersList)), Lists.newArrayList( plagiarismCheckeService.check(ArticleRepositoryTestUtil.ARTICLES[i][j]))); } } IndexBuilderTest.deleteIndex(indexRoot, contentAnalizersList); }
private void compareTableMetaData() { if (this.srcTableMetaData == null) { throw new RuntimeException("src table meta is null"); } if (this.tgtTableMetaData == null) { throw new RuntimeException("tgt table meta is null"); } List<ColumnMetaData> srcColumnMetaDatas = srcTableMetaData.getColumnMetaDatas(); List<ColumnMetaData> tgtColumnMetaDatas = tgtTableMetaData.getColumnMetaDatas(); Iterator<ColumnMetaData> metaDataIterator = srcColumnMetaDatas.iterator(); while (metaDataIterator.hasNext()) { ColumnMetaData rowData = metaDataIterator.next(); int index = tgtColumnMetaDatas.indexOf(rowData); if (index > -1) { metaDataIterator.remove(); tgtColumnMetaDatas.remove(index); } } if (CollectionUtils.isNotEmpty(srcColumnMetaDatas) || CollectionUtils.isNotEmpty(tgtColumnMetaDatas)) { String errorInfo = String.format( "src table meta [%s] not equal tgt meta [%s]", joiner.join(srcColumnMetaDatas), joiner.join(tgtColumnMetaDatas)); throw new RuntimeException(errorInfo); } }
@Test public void testNetworkNodeUsingYaml() throws BackendException, InterruptedException { ElasticsearchRunner esr = new ElasticsearchRunner(".", "networkNodeUsingYaml.yml"); esr.start(); ModifiableConfiguration config = GraphDatabaseConfiguration.buildGraphConfiguration(); config.set(INTERFACE, ElasticSearchSetup.NODE.toString(), INDEX_NAME); config.set( INDEX_CONF_FILE, Joiner.on(File.separator).join("target", "test-classes", "es_cfg_nodeclient.yml"), INDEX_NAME); Configuration indexConfig = config.restrictTo(INDEX_NAME); IndexProvider idx = new ElasticSearchIndex(indexConfig); simpleWriteAndQuery(idx); idx.close(); config = GraphDatabaseConfiguration.buildGraphConfiguration(); config.set(INTERFACE, ElasticSearchSetup.NODE.toString(), INDEX_NAME); config.set(HEALTH_REQUEST_TIMEOUT, "5s", INDEX_NAME); config.set( INDEX_CONF_FILE, Joiner.on(File.separator).join("target", "test-classes", "es_cfg_bogus_nodeclient.yml"), INDEX_NAME); indexConfig = config.restrictTo(INDEX_NAME); Throwable failure = null; try { idx = new ElasticSearchIndex(indexConfig); } catch (Throwable t) { failure = t; } // idx.close(); Assert.assertNotNull("ES client failed to throw exception on connection failure", failure); esr.stop(); }
@Override @Transactional(rollbackFor = Exception.class) public void insert(T t) throws Exception { List<String> columns = new ArrayList<String>(); List<String> values = new ArrayList<String>(); MapSqlParameterSource namedParameters = new MapSqlParameterSource(); for (Field field : t.getClass().getDeclaredFields()) { String fieldName = field.getName(); field.setAccessible(true); Object value = field.get(t); columns.add(fieldName); values.add(String.format(":%s", fieldName)); namedParameters.addValue(fieldName, value); } StringBuilder sql = new StringBuilder(); sql.append("INSERT INTO ").append(table); sql.append(" (").append(comma.join(columns)).append(")"); sql.append(" VALUES"); sql.append(" (").append(comma.join(values)).append(")"); namedJdbcTemplate.update(sql.toString(), namedParameters); logger.info(sql); }
private void changeMetadata(final String content, final Upload upload) { final Map<String, Object> params = new HashMap<>(METADATA_PARAMS_SIZE); params.putAll(getMetadataSocial(upload)); params.putAll(getMetadataMonetization(content, upload)); params.putAll(getMetadataMetadata(upload)); params.putAll(getMetadataPermissions(upload)); System.out.println(Joiner.on(MODIFIED_SEPERATOR).skipNulls().join(params.keySet())); params.put("modified_fields", Joiner.on(MODIFIED_SEPERATOR).skipNulls().join(params.keySet())); params.put("creator_share_feeds", "yes"); final String token = extractor(content, "var session_token = \"", "\""); params.put("session_token", token); params.put("action_edit_video", "1"); try { final HttpResponse<String> response = Unirest.post( String.format( "https://www.youtube.com/metadata_ajax?video_id=%s", upload.getVideoid())) .fields(params) .asString(); LOGGER.info(response.getBody()); } catch (final Exception e) { LOGGER.warn("Metadata not set", e); } }
/** * Asserts that when compiling with the given compiler options, {@code original} is transformed * into {@code compiled}. If {@code warning} is non-null, we will also check if the given warning * type was emitted. */ private void test(String[] original, String[] compiled, DiagnosticType warning) { Compiler compiler = compile(original); if (warning == null) { assertEquals( "Expected no warnings or errors\n" + "Errors: \n" + Joiner.on("\n").join(compiler.getErrors()) + "Warnings: \n" + Joiner.on("\n").join(compiler.getWarnings()), 0, compiler.getErrors().length + compiler.getWarnings().length); } else { assertEquals(1, compiler.getWarnings().length); assertEquals(warning, compiler.getWarnings()[0].getType()); } Node root = compiler.getRoot().getLastChild(); if (useStringComparison) { assertEquals(Joiner.on("").join(compiled), compiler.toSource()); } else { Node expectedRoot = parse(compiled); String explanation = expectedRoot.checkTreeEquals(root); assertNull( "\nExpected: " + compiler.toSource(expectedRoot) + "\nResult: " + compiler.toSource(root) + "\n" + explanation, explanation); } }
/** {@inheritDoc} */ @Override public void onMatching( String docUri, Type type, SortedSet<Annotation> goldAnnos, SortedSet<Annotation> sysAnnos) { if (goldAnnos.size() == 1 && sysAnnos.size() == 1) { Annotation goldAnno = goldAnnos.iterator().next(); Annotation sysAnno = sysAnnos.iterator().next(); if (goldAnno.getBegin() == sysAnno.getBegin() && goldAnno.getEnd() == sysAnno.getEnd()) { printRow( type.getShortName(), "Exact", goldAnno.getCoveredText(), String.valueOf(goldAnno.getBegin()), sysAnno.getCoveredText(), String.valueOf(sysAnno.getBegin()), docUri); return; } } printRow( type.getShortName(), "Partial", Joiner.on(" /// ").join(transform(goldAnnos, annoToTxt)), Joiner.on(", ").join(transform(goldAnnos, annoToOffset)), Joiner.on(" /// ").join(transform(sysAnnos, annoToTxt)), Joiner.on(", ").join(transform(sysAnnos, annoToOffset)), docUri); }
@Override public String toSql() { StringBuilder strBuilder = new StringBuilder(); if (withClause_ != null) strBuilder.append(withClause_.toSql() + " "); strBuilder.append("INSERT "); if (overwrite_) { strBuilder.append("OVERWRITE "); } else { strBuilder.append("INTO "); } strBuilder.append("TABLE " + originalTableName_); if (columnPermutation_ != null) { strBuilder.append("("); strBuilder.append(Joiner.on(", ").join(columnPermutation_)); strBuilder.append(")"); } if (partitionKeyValues_ != null) { List<String> values = Lists.newArrayList(); for (PartitionKeyValue pkv : partitionKeyValues_) { values.add( pkv.getColName() + (pkv.getValue() != null ? ("=" + pkv.getValue().toSql()) : "")); } strBuilder.append(" PARTITION (" + Joiner.on(", ").join(values) + ")"); } if (planHints_ != null) { strBuilder.append(" " + ToSqlUtils.getPlanHintsSql(planHints_)); } if (!needsGeneratedQueryStatement_) { strBuilder.append(" " + queryStmt_.toSql()); } return strBuilder.toString(); }
private String createIndexTableStatementForIndex(String indexName, List<String> columns) { String tableName = IndexManager.tableNameForIndex(indexName); Joiner joiner = Joiner.on(" NONE,").skipNulls(); String cols = joiner.join(columns); return String.format("CREATE TABLE %s ( %s NONE )", tableName, cols); }
private void assertEqualsNicely(final NextEvent received) { synchronized (this) { boolean foundIt = false; final Iterator<NextEvent> it = nextExpectedEvent.iterator(); while (it.hasNext()) { final NextEvent ev = it.next(); if (ev == received) { it.remove(); foundIt = true; log.debug("Found expected event {}. Yeah!", received); break; } } if (!foundIt) { log.error( "Received unexpected event " + received + "; remaining expected events [" + SPACE_JOINER.join(nextExpectedEvent) + "]"); failed( "TestApiListener [ApiListenerStatus]: Received unexpected event " + received + "; remaining expected events [" + SPACE_JOINER.join(nextExpectedEvent) + "]"); } } }
@Test public void testOverride() throws IOException { Reader readerA = new StringReader(Joiner.on('\n').join("[cache]", " mode = dir,cassandra")); Reader readerB = new StringReader(Joiner.on('\n').join("[cache]", " mode =")); // Verify that no exception is thrown when a definition is overridden. BuckConfig.createFromReaders(ImmutableList.of(readerA, readerB)); }
private static int fingerprint( final NetworkInfoSource source, final List<com.eucalyptus.cluster.Cluster> clusters, final Set<String> dirtyPublicAddresses, final String networkConfiguration) { final HashFunction hashFunction = goodFastHash(32); final Hasher hasher = hashFunction.newHasher(); final Funnel<VersionedNetworkView> versionedItemFunnel = new Funnel<VersionedNetworkView>() { @Override public void funnel(final VersionedNetworkView o, final PrimitiveSink primitiveSink) { primitiveSink.putString(o.getId(), StandardCharsets.UTF_8); primitiveSink.putChar('='); primitiveSink.putInt(o.getVersion()); } }; for (final Map.Entry<String, Iterable<? extends VersionedNetworkView>> entry : source.getView().entrySet()) { hasher.putString(entry.getKey(), StandardCharsets.UTF_8); for (final VersionedNetworkView item : entry.getValue()) { hasher.putObject(item, versionedItemFunnel); } } hasher.putString( Joiner.on(',').join(Sets.newTreeSet(Iterables.transform(clusters, HasName.GET_NAME))), StandardCharsets.UTF_8); hasher.putString( Joiner.on(',').join(Sets.newTreeSet(dirtyPublicAddresses)), StandardCharsets.UTF_8); hasher.putInt(networkConfiguration.hashCode()); return hasher.hash().asInt(); }
/** Find all {@code .thrift} files in the given directory. */ private ImmutableSet<File> findThriftFilesInDirectory(File directory) throws IOException { checkNotNull(directory); checkArgument(directory.isDirectory(), "%s is not a directory", directory); List<File> thriftFilesInDirectory = getFiles(directory, Joiner.on(",").join(includes), Joiner.on(",").join(excludes)); return ImmutableSet.copyOf(thriftFilesInDirectory); }
private void compileProtos() throws MojoExecutionException { List<String> args = Lists.newArrayList(); args.add("--proto_path=" + protoSourceDirectory); args.add("--java_out=" + generatedSourceDirectory); if (noOptions) { args.add("--no_options"); } if (enumOptions != null && enumOptions.length > 0) { args.add("--enum_options=" + Joiner.on(',').join(enumOptions)); } if (registryClass != null) { args.add("--registry_class=" + registryClass); } if (roots != null && roots.length > 0) { args.add("--roots=" + Joiner.on(',').join(roots)); } Collections.addAll(args, protoFiles); getLog().info("Invoking wire compiler with arguments:"); getLog().info(Joiner.on('\n').join(args)); try { // TODO(shawn) we don't have a great programatic interface to the compiler. // Not all exceptions should result in MojoFailureExceptions (i.e. bugs in this plugin that // invoke the compiler incorrectly). WireCompiler.main(args.toArray(new String[args.size()])); // Add the directory into which generated sources are placed as a compiled source root. project.addCompileSourceRoot(generatedSourceDirectory); } catch (Exception e) { throw new MojoExecutionException("Wire Plugin: Failure compiling proto sources.", e); } }
@Test public void testTable() { JavaFileObject source = JavaFileObjects.forSourceString( "test.Test", Joiner.on('\n') .join( "package test;", "import Table;", "import Key;", "@Table", "public class Test {", " @Key long id;", " String name;", "}")); JavaFileObject expectedSource = JavaFileObjects.forSourceString( "test/Test$$ViewBinder", Joiner.on('\n') .join( "// Generated code from Cying-ORM. Do not modify!", "package test;", "import android.content.ContentValues;", "import android.database.Cursor;", "import BaseDao;", "public class Test$$Dao extends BaseDao<Test> {", " private static String SQL=\"CREATE TABLE test (id INTEGER PRIMARY KEY AUTOINCREMENT,name TEXT);\"", " static {", " saveSQL(SQL);", " }", " @Override protected Test cursorToEntity(Cursor cursor) {", " Test entity=new Test();", " entity.id=cursor.getLong(cursor.getColumnIndex(\"id\"));", " entity.name=cursor.getString(cursor.getColumnIndex(\"name\"));", " return entity;", " }", " @Override protected ContentValues entityToValues(Test entity) {", " ContentValues values=new ContentValues();", " values.put(\"name\",entity.name)", " return values;", " }", " @Override public String getTableName() {", " return \"test\";", " }", " @Override public String getTableSQL() { return SQL; }", " @Override public String getIdentityName() {", " return \"id\";", " }", " @Override public long getIdentity(Test entity) {", " return entity.id;", " }", "}")); Truth.assertAbout(javaSource()) .that(source) .processedWith(new ORMProcessor()) .compilesWithoutError() .and() .generatesSources(expectedSource); }
private String summarize(RevWalk rw, List<CodeReviewCommit> merged) throws IOException { if (merged.size() == 1) { CodeReviewCommit c = merged.get(0); rw.parseBody(c); return String.format("Merge \"%s\"", c.getShortMessage()); } LinkedHashSet<String> topics = new LinkedHashSet<>(4); for (CodeReviewCommit c : merged) { if (!Strings.isNullOrEmpty(c.change().getTopic())) { topics.add(c.change().getTopic()); } } if (topics.size() == 1) { return String.format("Merge changes from topic '%s'", Iterables.getFirst(topics, null)); } else if (topics.size() > 1) { return String.format("Merge changes from topics '%s'", Joiner.on("', '").join(topics)); } else { return String.format( "Merge changes %s%s", Joiner.on(',') .join( Iterables.transform( Iterables.limit(merged, 5), new Function<CodeReviewCommit, String>() { @Override public String apply(CodeReviewCommit in) { return in.change().getKey().abbreviate(); } })), merged.size() > 5 ? ", ..." : ""); } }
/** 与StringBuilder一起使用 */ @Test public void testJoiner04() { Joiner joiner = Joiner.on(",").useForNull("missing"); StringBuilder sb = new StringBuilder(); joiner.appendTo(sb, lists); System.out.println(sb.toString()); Assert.assertEquals("AAA, ,,missing,BBB,CCC", sb.toString()); }
private void setAvailableVirtualDatacenters(final List<Integer> ids) { if (ids == null || ids.size() == 0) { target.setAvailableVirtualDatacenters(""); } else { Joiner joiner = Joiner.on(",").skipNulls(); target.setAvailableVirtualDatacenters(joiner.join(ids)); } }
private String createIndexIndexStatementForIndex(String indexName, List<String> columns) { String tableName = IndexManager.tableNameForIndex(indexName); String sqlIndexName = tableName.concat("_index"); Joiner joiner = Joiner.on(",").skipNulls(); String cols = joiner.join(columns); return String.format("CREATE INDEX %s ON %s ( %s )", sqlIndexName, tableName, cols); }
/** skipNulls与useForNull不能同时使用 */ @Test public void testJoiner03() { Joiner joiner = Joiner.on(",").skipNulls(); joiner.useForNull("missing"); String result = joiner.join(lists); System.out.println(result); ; }
private static PlayerInfo parseFromString(int id, String playerString) throws FailedPlayer { PlayerInfo playerInfo = new PlayerInfo(); String[] commaParts = playerString.split(", "); if (commaParts.length != 2) { throw new FailedPlayer(id, "Found player without exactly one comma."); } playerInfo.lastName = commaParts[0]; String remainingString = commaParts[1]; List<String> spaceParts = Lists.newArrayList(remainingString.split(" ")); int numParts = spaceParts.size(); if (numParts < 3) { throw new FailedPlayer( id, "Found player with fewer than 3 symbols after the comma: '" + remainingString + "', Player " + playerString); } playerInfo.MLBTeam = Iterables.getLast(spaceParts); spaceParts.remove(playerInfo.MLBTeam); playerInfo.Position = Iterables.getLast(spaceParts); spaceParts.remove(playerInfo.Position); if (playerInfo.MLBTeam.length() < 2) { throw new FailedPlayer( id, "Incorrect team name '" + playerInfo.MLBTeam + "', from remainder string '" + remainingString + "'"); } if (playerInfo.Position.length() < 1) { throw new FailedPlayer( id, "Incorrect position '" + playerInfo.Position + "', from remainder string '" + remainingString + "'"); } if (spaceParts.size() < 1) { throw new FailedPlayer(id, "Found no parts remaining in the first name piece."); } Joiner joiner = Joiner.on(" "); playerInfo.firstName = joiner.join(spaceParts); return playerInfo; }