@Before public void setUp() { if (!Boolean.parseBoolean(System.getProperty("test.solr.verbose"))) { java.util.logging.Logger.getLogger("org.apache.solr") .setLevel(java.util.logging.Level.SEVERE); Utils.setLog4jLogLevel(org.apache.log4j.Level.WARN); } testDataParentPath = System.getProperty("test.data.path"); testConfigFname = System.getProperty("test.config.file"); // System.out.println("-----testDataParentPath = "+testDataParentPath); }
@BeforeClass public static void setUpOnce() throws Exception { miniCluster = System.getProperty("cluster.type").equals("mini"); securedCluster = System.getProperty("cluster.secured").equals("true"); System.out.println("realCluster - " + !miniCluster); System.out.println("securedCluster - " + securedCluster); Util.setLoggingThreshold("ERROR"); if (miniCluster) { if (hbase == null) { hbase = new HBaseTestingUtility(); conf = hbase.getConfiguration(); conf.set("zookeeper.session.timeout", "3600000"); conf.set("dfs.client.socket-timeout", "3600000"); if (securedCluster) { hbase.startMiniCluster(RS_COUNT); hbase.waitTableEnabled(AccessControlLists.ACL_TABLE_NAME, 30000L); admin = new HBaseAdminWrapper(conf); } else { hbase.startMiniCluster(RS_COUNT); admin = hbase.getHBaseAdmin(); } } } else { if (admin == null) { final String argsFileName = securedCluster ? "../../testClusterRealSecured.args" : "../../testClusterRealNonSecured.args"; if (!Util.isFile(argsFileName)) { throw new IllegalStateException( "You have to define args file " + argsFileName + " for tests."); } String[] testArgs = {argsFileName}; Args args = new TestArgs(testArgs); admin = HBaseClient.getAdmin(args); conf = admin.getConfiguration(); RS_COUNT = getServerNameList().size(); } } previousBalancerRunning = admin.setBalancerRunning(false, true); hConnection = HConnectionManager.createConnection(conf); USER_RW = User.createUserForTesting(conf, "rwuser", new String[0]); }
public static void main(String[] args) throws IOException { final File socketFile = new File(new File(System.getProperty("java.io.tmpdir")), "junixsocket-test.sock"); AFUNIXSocket sock = AFUNIXSocket.newInstance(); try { sock.connect(new AFUNIXSocketAddress(socketFile)); } catch (AFUNIXSocketException e) { System.out.println("Cannot connect to server. Have you started it?"); System.out.flush(); throw e; } System.out.println("Connected"); InputStream is = sock.getInputStream(); OutputStream os = sock.getOutputStream(); byte[] buf = new byte[128]; int read = is.read(buf); System.out.println("Server says: " + new String(buf, 0, read)); System.out.println("Replying to server..."); os.write("Hello Server".getBytes()); os.flush(); os.close(); is.close(); sock.close(); System.out.println("End of communication."); }
public static void main(String[] args) throws IOException { final File socketFile = new File(new File(System.getProperty("java.io.tmpdir")), "junixsocket-test.sock"); AFUNIXServerSocket server = AFUNIXServerSocket.newInstance(); server.bind(new AFUNIXSocketAddress(socketFile)); System.out.println("server: " + server); while (!Thread.interrupted()) { System.out.println("Waiting for connection..."); Socket sock = server.accept(); System.out.println("Connected: " + sock); InputStream is = sock.getInputStream(); OutputStream os = sock.getOutputStream(); System.out.println("Saying hello to client " + os); os.write("Hello, dear Client".getBytes()); os.flush(); byte[] buf = new byte[128]; int read = is.read(buf); System.out.println("Client's response: " + new String(buf, 0, read)); os.close(); is.close(); sock.close(); } }
public void execute(WorkerProcessContext workerProcessContext) { // Check environment assertThat(System.getProperty("test.system.property"), equalTo("value")); assertThat(System.getenv().get("TEST_ENV_VAR"), equalTo("value")); // Check ClassLoaders ClassLoader antClassLoader = Project.class.getClassLoader(); ClassLoader thisClassLoader = getClass().getClassLoader(); ClassLoader systemClassLoader = ClassLoader.getSystemClassLoader(); assertThat(antClassLoader, not(sameInstance(systemClassLoader))); assertThat(thisClassLoader, not(sameInstance(systemClassLoader))); assertThat(antClassLoader.getParent(), equalTo(systemClassLoader.getParent())); try { assertThat( thisClassLoader.loadClass(Project.class.getName()), sameInstance((Object) Project.class)); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } // Send some messages TestListenerInterface sender = workerProcessContext.getServerConnection().addOutgoing(TestListenerInterface.class); sender.send("message 1", 1); sender.send("message 2", 2); }
public class JUnitListener extends RunListener { private PrintWriter pw; private String outputFile = System.getProperty("junit.output.file"); public void testRunStarted(Description description) throws Exception { pw = new PrintWriter(new FileWriter(outputFile)); pw.println("testRunStarted"); } public void testRunFinished(Result result) throws Exception { pw.println("testRunFinished"); pw.close(); } public void testStarted(Description description) throws Exception { pw.println("testStarted " + description.getDisplayName()); } public void testFinished(Description description) throws Exception { pw.println("testFinished " + description.getDisplayName()); } public void testFailure(Failure failure) throws Exception { pw.println("testFailure " + failure.getDescription().getDisplayName()); } public void testAssumptionFailure(Failure failure) { pw.print("ASSUMPTION FAILURE"); } public void testIgnored(Description description) throws Exception { pw.println("testIgnored " + description.getDisplayName()); } }
@Test public void canFollowLogfile() throws IOException { File tempFile = File.createTempFile("commons-io", "", new File(System.getProperty("java.io.tmpdir"))); tempFile.deleteOnExit(); System.out.println("Temp file = " + tempFile.getAbsolutePath()); PrintStream log = new PrintStream(tempFile); LogfileFollower follower = new LogfileFollower(tempFile); List<String> lines; // Empty file: lines = follower.newLines(); assertEquals(0, lines.size()); // Write two lines: log.println("Line 1"); log.println("Line 2"); lines = follower.newLines(); assertEquals(2, lines.size()); assertEquals("Line 2", lines.get(1)); // Write one more line: log.println("Line 3"); lines = follower.newLines(); assertEquals(1, lines.size()); assertEquals("Line 3", lines.get(0)); // Write one and a half line and finish later: log.println("Line 4"); log.print("Line 5 begin"); lines = follower.newLines(); assertEquals(1, lines.size()); // End last line and start a new one: log.println(" end"); log.print("Line 6 begin"); lines = follower.newLines(); assertEquals(1, lines.size()); assertEquals("Line 5 begin end", lines.get(0)); // End last line: log.println(" end"); lines = follower.newLines(); assertEquals(1, lines.size()); assertEquals("Line 6 begin end", lines.get(0)); // A line only missing a newline: log.print("Line 7"); lines = follower.newLines(); assertEquals(0, lines.size()); log.println(); lines = follower.newLines(); assertEquals(1, lines.size()); assertEquals("Line 7", lines.get(0)); // Delete: log.close(); lines = follower.newLines(); assertEquals(0, lines.size()); }
private static DatabaseEngine databaseEngine() { String databaseEngine = System.getProperty(DATABASE_ENGINE_SYSTEM_PARAMETER); DatabaseEngine engine = databaseEngine != null ? DatabaseEngine.valueOf(databaseEngine) : DatabaseEngine.mysql; // logger.info("Using database enigine: " + engine); return engine; }
/** * Shuts down the test harness, and makes the best attempt possible to delete dataDir, unless the * system property "solr.test.leavedatadir" is set. */ @Override public void tearDown() throws Exception { log.info("####TEARDOWN_START " + getTestName()); if (factoryProp == null) { System.clearProperty("solr.directoryFactory"); } if (h != null) { h.close(); } String skip = System.getProperty("solr.test.leavedatadir"); if (null != skip && 0 != skip.trim().length()) { System.err.println( "NOTE: per solr.test.leavedatadir, dataDir will not be removed: " + dataDir.getAbsolutePath()); } else { if (!recurseDelete(dataDir)) { System.err.println( "!!!! WARNING: best effort to remove " + dataDir.getAbsolutePath() + " FAILED !!!!!"); } } resetExceptionIgnores(); super.tearDown(); }
/** * Tests sane default for local directory if directory no longer exists. * * @throws Exception */ @Test public void test08RestoreCanRevertToDefaultLocalIfLocalDirIsRemoved() throws Exception { assertTrue(testDir.delete()); session.disconnect(); session.restore(); assertTrue(session.getLocalDirectory().equals(System.getProperty("user.dir"))); }
KatScanner(String fileName) { try { scanner = new Scanner(new File(fileName)); scanner.useDelimiter(System.getProperty("line.separator")); } catch (FileNotFoundException e) { e.printStackTrace(); } }
@Before public void setup() throws IOException { // Avoiding "restx.shell.home_IS_UNDEFINED/ dir due to logs this.initialRestxShellHomeValue = System.getProperty("restx.shell.home"); if (this.initialRestxShellHomeValue == null) { System.setProperty("restx.shell.home", workDirectory.newFolder(".restx").getAbsolutePath()); } }
@Before public void setUp() throws Exception { lastTestFailed = false; // hope for the best, but set to true in asserts that fail // new output dir for each test tmpdir = new File( System.getProperty("java.io.tmpdir"), getClass().getSimpleName() + "-" + System.currentTimeMillis()) .getAbsolutePath(); // tmpdir = "/tmp"; }
/** * Tests creating file in appropriate local directory. * * @throws Exception unexpected */ @Test public void test04CanCreateFileInNewLocalDirectory() throws Exception { session.changeLocalDirectory(System.getProperty("user.dir")); testDir = new File(session.getLocalDirectory(), "goodSessionTestDir"); testDir.mkdir(); testDir.deleteOnExit(); session.changeLocalDirectory(testDir.getPath()); File testfile = new File(session.getLocalDirectory(), "goodSessionTest04"); testfile.createNewFile(); testfile.deleteOnExit(); assertTrue(testfile.getAbsolutePath().contains(testDir.getAbsolutePath())); // Force deletion now so we can delete containing directory later. assertTrue(testfile.delete()); }
public static String readProgramFromFile(String path) { StringBuilder programString = new StringBuilder(); String newLine = System.getProperty("line.separator"); Scanner scanner = new Scanner(""); try { scanner = new Scanner(new FileReader(path)); while (scanner.hasNextLine()) programString.append(scanner.nextLine().replaceAll("\\p{javaWhitespace}+$", "") + newLine); } catch (FileNotFoundException e) { System.err.println( "readProgramFromFile: " + "The file with the provided path was not found."); } finally { scanner.close(); } return programString.toString().trim(); }
@Before public void setUp() throws Exception { rootDirectory = new File( System.getProperty("java.io.tmpdir"), "libgdx/" + String.valueOf(System.nanoTime())); sourceDirectory = new File(rootDirectory, "source"); sourceDirectory.mkdirs(); targetDirectory = new File(rootDirectory, "target"); targetDirectory.mkdirs(); logger = new PrintWriterTreeLogger(); // when(generator.buildComposerFactory(packageName, className)) // .thenReturn(composerFactory); expectSourceValue(sourceDirectory.getAbsolutePath()); expectOutputValue(targetDirectory.getAbsolutePath()); expectEmptyProperty("gdx.assetfilterclass"); warDirectory = new File("war"); }
/** * Initializes things your test might need * * <ul> * <li>Creates a dataDir in the "java.io.tmpdir" * <li>initializes the TestHarness h using this data directory, and getSchemaPath() * <li>initializes the LocalRequestFactory lrf using sensible defaults. * </ul> */ @Override public void setUp() throws Exception { super.setUp(); log.info("####SETUP_START " + getTestName()); ignoreException("ignore_exception"); factoryProp = System.getProperty("solr.directoryFactory"); if (factoryProp == null) { System.setProperty("solr.directoryFactory", "solr.RAMDirectoryFactory"); } dataDir = new File(TEMP_DIR, getClass().getName() + "-" + System.currentTimeMillis()); dataDir.mkdirs(); String configFile = getSolrConfigFile(); System.setProperty("solr.solr.home", getSolrHome()); if (configFile != null) { solrConfig = TestHarness.createConfig(getSolrConfigFile()); h = new TestHarness(dataDir.getAbsolutePath(), solrConfig, getSchemaFile()); lrf = h.getRequestFactory("standard", 0, 20, CommonParams.VERSION, "2.2"); } log.info("####SETUP_END " + getTestName()); }
@BeforeClass public static void createDriver() { String chromeDriverExecutable = "chromedriver"; if (System.getProperty("os.name").toLowerCase(Locale.US).indexOf("windows") > -1) { chromeDriverExecutable += ".exe"; } File chromeDriver = new File("target/chromedriver/" + chromeDriverExecutable); if (!chromeDriver.exists()) { throw new RuntimeException("chromedriver could not be located!"); } ChromeDriverService chromeDriverService = new ChromeDriverService.Builder() .withVerbose(true) .usingAnyFreePort() .usingDriverExecutable(chromeDriver) .build(); driver = new ChromeDriver(chromeDriverService); }
public class VFSRepositoryGitFileSystemTest { // TODO change it to generic independent path private static final String REPOSITORY_ROOT = "designer-playground"; private static final String VFS_REPOSITORY_ROOT = "git://" + REPOSITORY_ROOT; private static final String USERNAME = "******"; private static final String PASSWORD = "******"; private static final String ORIGIN_URL = "https://github.com/mswiderski/designer-playground.git"; private static final String FETCH_COMMAND = "?fetch"; private JbpmProfileImpl profile; private static String gitLocalClone = System.getProperty("java.io.tmpdir") + File.separator + "git-repo"; private static Map<String, String> env = new HashMap<String, String>(); private static int counter = 0; private RepositoryDescriptor descriptor; private VFSFileSystemProducer producer; @BeforeClass public static void prepare() { env.put("username", USERNAME); env.put("password", PASSWORD); env.put("origin", ORIGIN_URL); env.put("fetch.cmd", FETCH_COMMAND); System.setProperty("org.kie.nio.git.dir", gitLocalClone); } @AfterClass public static void cleanup() { System.clearProperty("org.kie.nio.git.dir"); } @Before public void setup() { profile = new JbpmProfileImpl(); producer = new VFSFileSystemProducer(); env.put("repository.root", VFS_REPOSITORY_ROOT + counter); env.put("repository.globaldir", "/global"); descriptor = producer.produceFileSystem(env); } private void deleteFiles(File directory) { for (File file : directory.listFiles()) { if (file.isDirectory()) { deleteFiles(file); } file.delete(); } } @After public void teardown() { File repo = new File(gitLocalClone); if (repo.exists()) { deleteFiles(repo); } repo.delete(); repo = new File(".niogit"); if (repo.exists()) { deleteFiles(repo); } repo.delete(); counter++; } @Test public void testListDirectories() { Repository repository = new VFSRepository(producer.getIoService()); ((VFSRepository) repository).setDescriptor(descriptor); boolean rootFolderExists = repository.directoryExists("/processes"); assertTrue(rootFolderExists); Collection<Directory> directories = repository.listDirectories("/"); assertNotNull(directories); assertEquals(3, directories.size()); directories = repository.listDirectories("/processes"); assertNotNull(directories); assertEquals(1, directories.size()); } @Test public void testCreateDirectory() { Repository repository = new VFSRepository(producer.getIoService()); ((VFSRepository) repository).setDescriptor(descriptor); boolean rootFolderExists = repository.directoryExists("/test"); assertFalse(rootFolderExists); Directory directoryId = repository.createDirectory("/test"); assertNotNull(directoryId); assertEquals("test", directoryId.getName()); assertEquals("/", directoryId.getLocation()); assertNotNull(directoryId.getUniqueId()); rootFolderExists = repository.directoryExists("/test"); assertTrue(rootFolderExists); } @Test public void testDirectoryExists() { Repository repository = new VFSRepository(producer.getIoService()); ((VFSRepository) repository).setDescriptor(descriptor); boolean rootFolderExists = repository.directoryExists("/test"); assertFalse(rootFolderExists); Directory directoryId = repository.createDirectory("/test"); assertNotNull(directoryId); assertEquals("test", directoryId.getName()); assertEquals("/", directoryId.getLocation()); assertNotNull(directoryId.getUniqueId()); rootFolderExists = repository.directoryExists("/test"); assertTrue(rootFolderExists); AssetBuilder builder = AssetBuilderFactory.getAssetBuilder(Asset.AssetType.Byte); builder.content("simple content".getBytes()).type("png").name("test").location("/test"); String id = repository.createAsset(builder.getAsset()); assertNotNull(id); boolean assetPathShouldNotExists = repository.directoryExists("/test/test.png"); assertFalse(assetPathShouldNotExists); } @Test public void testDeleteDirectory() { Repository repository = new VFSRepository(producer.getIoService()); ((VFSRepository) repository).setDescriptor(descriptor); boolean rootFolderExists = repository.directoryExists("/test"); assertFalse(rootFolderExists); Directory directoryId = repository.createDirectory("/test"); assertNotNull(directoryId); rootFolderExists = repository.directoryExists("/test"); assertTrue(rootFolderExists); boolean deleted = repository.deleteDirectory("/test", true); assertTrue(deleted); rootFolderExists = repository.directoryExists("/test"); assertFalse(rootFolderExists); } @Test public void testDeleteNonEmptyDirectory() { Repository repository = new VFSRepository(producer.getIoService()); ((VFSRepository) repository).setDescriptor(descriptor); boolean rootFolderExists = repository.directoryExists("/test"); assertFalse(rootFolderExists); Directory directoryId = repository.createDirectory("/test/nested"); assertNotNull(directoryId); rootFolderExists = repository.directoryExists("/test"); assertTrue(rootFolderExists); rootFolderExists = repository.directoryExists("/test/nested"); assertTrue(rootFolderExists); boolean deleted = repository.deleteDirectory("/test", false); assertTrue(deleted); rootFolderExists = repository.directoryExists("/test"); assertFalse(rootFolderExists); } @Test public void testListAsset() { Repository repository = new VFSRepository(producer.getIoService()); ((VFSRepository) repository).setDescriptor(descriptor); boolean rootFolderExists = repository.directoryExists("/processes"); assertTrue(rootFolderExists); Collection<Asset> assets = repository.listAssets("/processes"); assertNotNull(assets); assertEquals(2, assets.size()); } @Test public void testListSingleTextAsset() { Repository repository = new VFSRepository(producer.getIoService()); ((VFSRepository) repository).setDescriptor(descriptor); boolean rootFolderExists = repository.directoryExists("/processes"); assertTrue(rootFolderExists); Collection<Asset> assets = repository.listAssets("/processes"); assertNotNull(assets); assertEquals(2, assets.size()); Asset<String> asset = assets.iterator().next(); assertEquals("bpmn2", asset.getAssetType()); assertEquals("BPMN2-ScriptTask.bpmn2", asset.getFullName()); assertEquals("BPMN2-ScriptTask", asset.getName()); assertEquals("/processes", asset.getAssetLocation()); } @Test public void testListSingleBinaryAsset() { Repository repository = new VFSRepository(producer.getIoService()); ((VFSRepository) repository).setDescriptor(descriptor); boolean rootFolderExists = repository.directoryExists("/images"); assertTrue(rootFolderExists); Collection<Asset> assets = repository.listAssets("/images"); assertNotNull(assets); assertEquals(1, assets.size()); Asset<String> asset = assets.iterator().next(); assertEquals("png", asset.getAssetType()); assertEquals("release-process.png", asset.getFullName()); assertEquals("release-process", asset.getName()); assertEquals("/images", asset.getAssetLocation()); } @Test public void testListNestedSingleTextAsset() { Repository repository = new VFSRepository(producer.getIoService()); ((VFSRepository) repository).setDescriptor(descriptor); boolean rootFolderExists = repository.directoryExists("/processes/nested"); assertTrue(rootFolderExists); Collection<Asset> assets = repository.listAssets("/processes/nested"); assertNotNull(assets); assertEquals(1, assets.size()); Asset<String> asset = assets.iterator().next(); assertEquals("bpmn2", asset.getAssetType()); assertEquals("BPMN2-UserTask.bpmn2", asset.getFullName()); assertEquals("BPMN2-UserTask", asset.getName()); assertEquals("/processes/nested", asset.getAssetLocation()); } @Test public void testLoadAssetFromPath() throws NoSuchFileException { Repository repository = new VFSRepository(producer.getIoService()); ((VFSRepository) repository).setDescriptor(descriptor); Asset<String> asset = repository.loadAssetFromPath("/processes/BPMN2-ScriptTask.bpmn2"); assertEquals("bpmn2", asset.getAssetType()); assertEquals("BPMN2-ScriptTask", asset.getName()); assertEquals("BPMN2-ScriptTask.bpmn2", asset.getFullName()); assertEquals("/processes", asset.getAssetLocation()); assertNotNull(asset.getAssetContent()); System.out.print(asset.getUniqueId()); } @Test public void testStoreSingleBinaryAsset() throws NoSuchFileException { Repository repository = new VFSRepository(producer.getIoService()); ((VFSRepository) repository).setDescriptor(descriptor); Collection<Asset> assets = repository.listAssets("/"); assertNotNull(assets); assertEquals(0, assets.size()); AssetBuilder builder = AssetBuilderFactory.getAssetBuilder(Asset.AssetType.Byte); builder.content("simple content".getBytes()).type("png").name("test").location("/"); String id = repository.createAsset(builder.getAsset()); assertNotNull(id); Asset<byte[]> asset = repository.loadAsset(id); assertEquals("png", asset.getAssetType()); assertEquals("test", asset.getName()); assertEquals("test.png", asset.getFullName()); assertEquals("/", asset.getAssetLocation()); assertFalse(asset.getAssetContent().length == 0); } @Test public void testStoreSingleBinaryAssetSpaceInName() throws NoSuchFileException { Repository repository = new VFSRepository(producer.getIoService()); ((VFSRepository) repository).setDescriptor(descriptor); Collection<Asset> assets = repository.listAssets("/"); assertNotNull(assets); assertEquals(0, assets.size()); AssetBuilder builder = AssetBuilderFactory.getAssetBuilder(Asset.AssetType.Byte); builder.content("simple content".getBytes()).type("png").name("test asset").location("/"); String id = repository.createAsset(builder.getAsset()); assertNotNull(id); Asset<byte[]> asset = repository.loadAsset(id); assertEquals("png", asset.getAssetType()); assertEquals("test asset", asset.getName()); assertEquals("test asset.png", asset.getFullName()); assertEquals("/", asset.getAssetLocation()); assertFalse(asset.getAssetContent().length == 0); } @Test public void testStoreSingleTextAsset() throws NoSuchFileException { Repository repository = new VFSRepository(producer.getIoService()); ((VFSRepository) repository).setDescriptor(descriptor); Collection<Asset> assets = repository.listAssets("/"); assertNotNull(assets); assertEquals(0, assets.size()); AssetBuilder builder = AssetBuilderFactory.getAssetBuilder(Asset.AssetType.Text); builder.content("simple content").type("txt").name("test").location("/"); String id = repository.createAsset(builder.getAsset()); assertNotNull(id); Asset<String> asset = repository.loadAsset(id); assertEquals("txt", asset.getAssetType()); assertEquals("test", asset.getName()); assertEquals("test.txt", asset.getFullName()); assertEquals("/", asset.getAssetLocation()); assertEquals("simple content", asset.getAssetContent()); } @Test public void testAssetExists() throws NoSuchFileException { Repository repository = new VFSRepository(producer.getIoService()); ((VFSRepository) repository).setDescriptor(descriptor); Collection<Asset> assets = repository.listAssets("/"); assertNotNull(assets); for (Asset aset : assets) { System.out.println(aset.getAssetLocation() + " " + aset.getFullName()); } assertEquals(0, assets.size()); AssetBuilder builder = AssetBuilderFactory.getAssetBuilder(Asset.AssetType.Text); builder.content("simple content").type("txt").name("test").location("/"); String id = repository.createAsset(builder.getAsset()); assertNotNull(id); boolean assetExists = repository.assetExists(id); assertTrue(assetExists); } @Test public void testListAssetsRecursively() { Repository repository = new VFSRepository(producer.getIoService()); ((VFSRepository) repository).setDescriptor(descriptor); AssetBuilder builder = AssetBuilderFactory.getAssetBuilder(Asset.AssetType.Text); builder.content("simple content").type("bpmn2").name("process").location("/1/2/3/4/5/6"); String id = repository.createAsset(builder.getAsset()); Collection<Asset> foundAsset = repository.listAssetsRecursively("/", new FilterByExtension("bpmn2")); assertNotNull(foundAsset); assertEquals(4, foundAsset.size()); } @Test public void testUpdateAsset() throws NoSuchFileException { Repository repository = new VFSRepository(producer.getIoService()); ((VFSRepository) repository).setDescriptor(descriptor); AssetBuilder builder = AssetBuilderFactory.getAssetBuilder(Asset.AssetType.Text); builder.content("simple content").type("bpmn2").name("process").location("/"); String id = repository.createAsset(builder.getAsset()); Collection<Asset> foundAsset = repository.listAssets("/", new FilterByExtension("bpmn2")); assertNotNull(foundAsset); assertEquals(1, foundAsset.size()); builder.content("updated content").uniqueId(id); id = repository.updateAsset(builder.getAsset(), "", ""); foundAsset = repository.listAssetsRecursively("/", new FilterByFileName("process.bpmn2")); assertNotNull(foundAsset); assertEquals(1, foundAsset.size()); String content = ((Asset<String>) repository.loadAsset(id)).getAssetContent(); assertNotNull(content); assertEquals("updated content", content); } @Test public void testDeleteAsset() throws NoSuchFileException { Repository repository = new VFSRepository(producer.getIoService()); ((VFSRepository) repository).setDescriptor(descriptor); AssetBuilder builder = AssetBuilderFactory.getAssetBuilder(Asset.AssetType.Text); builder.content("simple content").type("bpmn2").name("process").location("/"); String id = repository.createAsset(builder.getAsset()); Collection<Asset> foundAsset = repository.listAssets("/", new FilterByExtension("bpmn2")); assertNotNull(foundAsset); assertEquals(1, foundAsset.size()); boolean assetExistsBeforeDelete = repository.assetExists(id); assertTrue(assetExistsBeforeDelete); boolean deleted = repository.deleteAsset(id); assertTrue(deleted); boolean assetExists = repository.assetExists(id); assertFalse(assetExists); } @Test public void testDeleteAssetFromPath() throws NoSuchFileException { Repository repository = new VFSRepository(producer.getIoService()); ((VFSRepository) repository).setDescriptor(descriptor); AssetBuilder builder = AssetBuilderFactory.getAssetBuilder(Asset.AssetType.Text); builder.content("simple content").type("bpmn2").name("process").location("/"); String id = repository.createAsset(builder.getAsset()); Collection<Asset> foundAsset = repository.listAssets("/", new FilterByExtension("bpmn2")); assertNotNull(foundAsset); assertEquals(1, foundAsset.size()); boolean assetExistsBeforeDelete = repository.assetExists(id); assertTrue(assetExistsBeforeDelete); boolean deleted = repository.deleteAssetFromPath("/process.bpmn2"); assertTrue(deleted); boolean assetExists = repository.assetExists(id); assertFalse(assetExists); } @Test public void testCopyAsset() throws NoSuchFileException { Repository repository = new VFSRepository(producer.getIoService()); ((VFSRepository) repository).setDescriptor(descriptor); AssetBuilder builder = AssetBuilderFactory.getAssetBuilder(Asset.AssetType.Text); builder.content("simple content").type("bpmn2").name("process").location("/source"); String id = repository.createAsset(builder.getAsset()); Collection<Asset> foundAsset = repository.listAssets("/source", new FilterByExtension("bpmn2")); assertNotNull(foundAsset); assertEquals(1, foundAsset.size()); boolean assetExistsBeforeDelete = repository.assetExists(id); assertTrue(assetExistsBeforeDelete); boolean copied = repository.copyAsset(id, "/target"); assertTrue(copied); foundAsset = repository.listAssets("/target", new FilterByExtension("bpmn2")); assertNotNull(foundAsset); assertEquals(1, foundAsset.size()); boolean assetExists = repository.assetExists("/target/process.bpmn2"); assertTrue(assetExists); } @Ignore // git based vfs does not yet support move @Test public void testMoveAsset() throws NoSuchFileException { Repository repository = new VFSRepository(producer.getIoService()); ((VFSRepository) repository).setDescriptor(descriptor); AssetBuilder builder = AssetBuilderFactory.getAssetBuilder(Asset.AssetType.Text); builder.content("simple content").type("bpmn2").name("process").location("/source"); String id = repository.createAsset(builder.getAsset()); Collection<Asset> foundAsset = repository.listAssets("/source", new FilterByExtension("bpmn2")); assertNotNull(foundAsset); assertEquals(1, foundAsset.size()); boolean sourceAssetExists = repository.assetExists(id); assertTrue(sourceAssetExists); boolean copied = repository.moveAsset(id, "/target", null); assertTrue(copied); foundAsset = repository.listAssets("/target", new FilterByExtension("bpmn2")); assertNotNull(foundAsset); assertEquals(1, foundAsset.size()); boolean assetExists = repository.assetExists("/target/process.bpmn2"); assertTrue(assetExists); foundAsset = repository.listAssets("/source", new FilterByExtension("bpmn2")); assertNotNull(foundAsset); assertEquals(0, foundAsset.size()); sourceAssetExists = repository.assetExists(id); assertFalse(sourceAssetExists); } @Ignore // git based vfs does not yet support move @Test public void testMoveAndRenameAsset() throws NoSuchFileException { Repository repository = new VFSRepository(producer.getIoService()); ((VFSRepository) repository).setDescriptor(descriptor); AssetBuilder builder = AssetBuilderFactory.getAssetBuilder(Asset.AssetType.Text); builder.content("simple content").type("bpmn2").name("process").location("/source"); String id = repository.createAsset(builder.getAsset()); Collection<Asset> foundAsset = repository.listAssets("/source", new FilterByExtension("bpmn2")); assertNotNull(foundAsset); assertEquals(1, foundAsset.size()); boolean sourceAssetExists = repository.assetExists(id); assertTrue(sourceAssetExists); boolean copied = repository.moveAsset(id, "/target", "renamed.bpmn2"); assertTrue(copied); foundAsset = repository.listAssets("/target", new FilterByExtension("bpmn2")); assertNotNull(foundAsset); assertEquals(1, foundAsset.size()); boolean assetExists = repository.assetExists("/target/renamed.bpmn2"); assertTrue(assetExists); foundAsset = repository.listAssets("/source", new FilterByExtension("bpmn2")); assertNotNull(foundAsset); assertEquals(0, foundAsset.size()); sourceAssetExists = repository.assetExists(id); assertFalse(sourceAssetExists); } @Ignore // git based vfs does not yet support move @Test public void testRenameAsset() throws NoSuchFileException { Repository repository = new VFSRepository(producer.getIoService()); ((VFSRepository) repository).setDescriptor(descriptor); AssetBuilder builder = AssetBuilderFactory.getAssetBuilder(Asset.AssetType.Text); builder.content("simple content").type("bpmn2").name("process").location("/source"); String id = repository.createAsset(builder.getAsset()); Collection<Asset> foundAsset = repository.listAssets("/source", new FilterByExtension("bpmn2")); assertNotNull(foundAsset); assertEquals(1, foundAsset.size()); boolean sourceAssetExists = repository.assetExists(id); assertTrue(sourceAssetExists); boolean copied = repository.moveAsset(id, "/source", "renamed.bpmn2"); assertTrue(copied); foundAsset = repository.listAssets("/source", new FilterByExtension("bpmn2")); assertNotNull(foundAsset); assertEquals(1, foundAsset.size()); boolean assetExists = repository.assetExists("/source/renamed.bpmn2"); assertTrue(assetExists); foundAsset = repository.listAssets("/source", new FilterByExtension("bpmn2")); assertNotNull(foundAsset); assertEquals(1, foundAsset.size()); sourceAssetExists = repository.assetExists(id); assertFalse(sourceAssetExists); } // disabling this test for now // @Test public void testCopyDirectory() throws NoSuchFileException { Repository repository = new VFSRepository(producer.getIoService()); ((VFSRepository) repository).setDescriptor(descriptor); Directory sourceDir = repository.createDirectory("/source"); AssetBuilder builder = AssetBuilderFactory.getAssetBuilder(Asset.AssetType.Text); builder.content("simple content").type("bpmn2").name("process").location("/source"); String id = repository.createAsset(builder.getAsset()); Collection<Asset> foundAsset = repository.listAssets("/source", new FilterByExtension("bpmn2")); assertNotNull(foundAsset); assertEquals(1, foundAsset.size()); boolean assetExistsBeforeDelete = repository.assetExists(id); assertTrue(assetExistsBeforeDelete); boolean copied = repository.copyDirectory("/source", "/target"); assertTrue(copied); foundAsset = repository.listAssets("/target/source", new FilterByExtension("bpmn2")); assertNotNull(foundAsset); assertEquals(1, foundAsset.size()); boolean assetExists = repository.assetExists("/target/source/process.bpmn2"); assertTrue(assetExists); boolean copiedDirectoryExists = repository.directoryExists("/source"); assertTrue(copiedDirectoryExists); } @Ignore // git based vfs does not yet support move @Test public void testMoveDirectory() throws NoSuchFileException { Repository repository = new VFSRepository(producer.getIoService()); ((VFSRepository) repository).setDescriptor(descriptor); Directory sourceDir = repository.createDirectory("/source"); AssetBuilder builder = AssetBuilderFactory.getAssetBuilder(Asset.AssetType.Text); builder.content("simple content").type("bpmn2").name("process").location("/source"); String id = repository.createAsset(builder.getAsset()); Collection<Asset> foundAsset = repository.listAssets("/source", new FilterByExtension("bpmn2")); assertNotNull(foundAsset); assertEquals(1, foundAsset.size()); boolean assetExistsBeforeDelete = repository.assetExists(id); assertTrue(assetExistsBeforeDelete); boolean copied = repository.moveDirectory("/source", "/target", null); assertTrue(copied); foundAsset = repository.listAssets("/target/source", new FilterByExtension("bpmn2")); assertNotNull(foundAsset); assertEquals(1, foundAsset.size()); boolean assetExists = repository.assetExists("/target/source/process.bpmn2"); assertTrue(assetExists); boolean movedDirectoryExists = repository.directoryExists("/source"); assertFalse(movedDirectoryExists); } @Ignore // git based vfs does not yet support move @Test public void testMoveEmptyDirectory() throws NoSuchFileException { Repository repository = new VFSRepository(producer.getIoService()); ((VFSRepository) repository).setDescriptor(descriptor); Directory sourceDir = repository.createDirectory("/source"); boolean directoryExists = repository.directoryExists(sourceDir.getLocation() + sourceDir.getName()); assertTrue(directoryExists); Collection<Asset> foundAsset = repository.listAssets("/source", new FilterByExtension("bpmn2")); assertNotNull(foundAsset); assertEquals(0, foundAsset.size()); boolean copied = repository.moveDirectory("/source", "/", "target"); assertTrue(copied); boolean movedDirectoryExists = repository.directoryExists("/source"); assertFalse(movedDirectoryExists); movedDirectoryExists = repository.directoryExists("/target"); assertTrue(movedDirectoryExists); foundAsset = repository.listAssets("/target", new FilterByExtension("bpmn2")); assertNotNull(foundAsset); assertEquals(0, foundAsset.size()); } @Test public void testCreateGlobalDirOnNewProject() throws FileAlreadyExistsException { VFSRepository repository = new VFSRepository(producer.getIoService()); repository.setDescriptor(descriptor); Directory testProjectDir = repository.createDirectory("/mytestproject"); final KieProject mockProject = mock(KieProject.class); when(mockProject.getRootPath()) .thenReturn( Paths.convert( producer .getIoService() .get(URI.create(decodeUniqueId(testProjectDir.getUniqueId()))))); NewProjectEvent event = mock(NewProjectEvent.class); when(event.getProject()).thenReturn(mockProject); repository.createGlobalDirOnNewProject(event); boolean globalDirectoryExists = repository.directoryExists("/mytestproject/global"); assertTrue(globalDirectoryExists); Collection<Asset> foundFormTemplates = repository.listAssets("/mytestproject/global", new FilterByExtension("fw")); assertNotNull(foundFormTemplates); assertEquals(25, foundFormTemplates.size()); // call again to try to trigger FileAlreadyExistsException repository.createGlobalDirOnNewProject(event); boolean globalDirectoryStillExists = repository.directoryExists("/mytestproject/global"); assertTrue(globalDirectoryStillExists); // no new files or copies were added Collection<Asset> foundFormTemplatesAfterSecondCall = repository.listAssets("/mytestproject/global", new FilterByExtension("fw")); assertNotNull(foundFormTemplatesAfterSecondCall); assertEquals(25, foundFormTemplatesAfterSecondCall.size()); } private String decodeUniqueId(String uniqueId) { if (Base64Backport.isBase64(uniqueId)) { byte[] decoded = Base64.decodeBase64(uniqueId); try { String uri = new String(decoded, "UTF-8"); return UriUtils.encode(uri); } catch (UnsupportedEncodingException e) { } } return UriUtils.encode(uniqueId); } }
public class JbpmPreprocessingUnitVFSGitTest { // TODO change it to generic independent path private static final String REPOSITORY_ROOT = "designer-playground"; private static final String VFS_REPOSITORY_ROOT = "git://" + REPOSITORY_ROOT; private static final String USERNAME = "******"; private static final String PASSWORD = "******"; private static final String ORIGIN_URL = "https://github.com/mswiderski/designer-playground.git"; private static final String FETCH_COMMAND = "?fetch"; private JbpmProfileImpl profile; private static String gitLocalClone = System.getProperty("java.io.tmpdir") + "git-repo"; private static Map<String, String> env = new HashMap<String, String>(); private static int counter = -100; private RepositoryDescriptor descriptor; private VFSFileSystemProducer producer = new VFSFileSystemProducer(); @BeforeClass public static void prepare() { env.put("username", USERNAME); env.put("password", PASSWORD); env.put("origin", ORIGIN_URL); env.put("fetch.cmd", FETCH_COMMAND); System.setProperty("org.kie.nio.git.dir", gitLocalClone); } @AfterClass public static void cleanup() { System.clearProperty("org.kie.nio.git.dir"); } @Before public void setup() { profile = new JbpmProfileImpl(); producer = new VFSFileSystemProducer(); env.put("repository.root", VFS_REPOSITORY_ROOT); env.put("repository.globaldir", "/global"); descriptor = producer.produceFileSystem(env); } private void deleteFiles(File directory) { for (File file : directory.listFiles()) { if (file.isDirectory()) { deleteFiles(file); } file.delete(); } } @After public void teardown() { File repo = new File(gitLocalClone); if (repo.exists()) { deleteFiles(repo); } repo.delete(); repo = new File(".niogit"); if (repo.exists()) { deleteFiles(repo); } repo.delete(); counter++; } @Test public void testProprocess() { Repository repository = new VFSRepository(producer.getIoService()); ((VFSRepository) repository).setDescriptor(descriptor); profile.setRepository(repository); // prepare folders that will be used repository.createDirectory("/myprocesses"); repository.createDirectory("/global"); // prepare process asset that will be used to preprocess AssetBuilder builder = AssetBuilderFactory.getAssetBuilder(Asset.AssetType.Text); builder.content("bpmn2 content").type("bpmn2").name("process").location("/myprocesses"); String uniqueId = repository.createAsset(builder.getAsset()); // create instance of preprocessing unit JbpmPreprocessingUnit preprocessingUnitVFS = new JbpmPreprocessingUnit(new TestServletContext(), "/", null); // setup parameters Map<String, String> params = new HashMap<String, String>(); params.put("uuid", uniqueId); // run preprocess preprocessingUnitVFS.preprocess( new TestHttpServletRequest(params), null, new TestIDiagramProfile(repository), null); // validate results Collection<Asset> globalAssets = repository.listAssets("/global"); assertNotNull(globalAssets); assertEquals(30, globalAssets.size()); repository.assetExists("/global/backboneformsinclude.fw"); repository.assetExists("/global/backbonejsinclude.fw"); repository.assetExists("/global/cancelbutton.fw"); repository.assetExists("/global/checkbox.fw"); repository.assetExists("/global/customeditors.json"); repository.assetExists("/global/div.fw"); repository.assetExists("/global/dropdownmenu.fw"); repository.assetExists("/global/fieldset.fw"); repository.assetExists("/global/form.fw"); repository.assetExists("/global/handlebarsinclude.fw"); repository.assetExists("/global/htmlbasepage.fw"); repository.assetExists("/global/image.fw"); repository.assetExists("/global/jqueryinclude.fw"); repository.assetExists("/global/jquerymobileinclude.fw"); repository.assetExists("/global/link.fw"); repository.assetExists("/global/mobilebasepage.fw"); repository.assetExists("/global/orderedlist.fw"); repository.assetExists("/global/passwordfield.fw"); repository.assetExists("/global/radiobutton.fw"); repository.assetExists("/global/script.fw"); repository.assetExists("/global/submitbutton.fw"); repository.assetExists("/global/table.fw"); repository.assetExists("/global/textarea.fw"); repository.assetExists("/global/textfield.fw"); repository.assetExists("/global/themes.json"); repository.assetExists("/global/unorderedlist.fw"); repository.assetExists("/global/defaultemailicon.gif"); repository.assetExists("/global/defaultlogicon.gif"); repository.assetExists("/global/defaultservicenodeicon.png"); repository.assetExists("/global/.gitignore"); Collection<Asset> defaultStuff = repository.listAssets("/myprocesses"); assertNotNull(defaultStuff); assertEquals(3, defaultStuff.size()); repository.assetExists("/myprocesses/WorkDefinitions.wid"); // this is the process asset that was created for the test but let's check it anyway repository.assetExists("/myprocesses/process.bpmn2"); repository.assetExists("/myprocesses/.gitignore"); } }
@Before public void setUp() { webUrl = System.getProperty("integration.url"); webClient = new WebClient(); }
/** Context for type related api.meta tests. */ public class TypeUniverse { public final Unsafe unsafe; public static final double JAVA_VERSION = Double.valueOf(System.getProperty("java.specification.version")); public final MetaAccessProvider metaAccess; public final ConstantReflectionProvider constantReflection; public final SnippetReflectionProvider snippetReflection; public final Collection<Class<?>> classes = new HashSet<>(); public final Map<Class<?>, Class<?>> arrayClasses = new HashMap<>(); public final List<Constant> constants = new ArrayList<>(); public TypeUniverse() { Providers providers = Graal.getRequiredCapability(RuntimeProvider.class).getHostBackend().getProviders(); metaAccess = providers.getMetaAccess(); constantReflection = providers.getConstantReflection(); snippetReflection = Graal.getRequiredCapability(SnippetReflectionProvider.class); Unsafe theUnsafe = null; try { theUnsafe = Unsafe.getUnsafe(); } catch (Exception e) { try { Field theUnsafeField = Unsafe.class.getDeclaredField("theUnsafe"); theUnsafeField.setAccessible(true); theUnsafe = (Unsafe) theUnsafeField.get(null); } catch (Exception e1) { throw (InternalError) new InternalError("unable to initialize unsafe").initCause(e1); } } unsafe = theUnsafe; Class<?>[] initialClasses = { void.class, boolean.class, byte.class, short.class, char.class, int.class, float.class, long.class, double.class, Object.class, Class.class, ClassLoader.class, String.class, Serializable.class, Cloneable.class, Test.class, TestMetaAccessProvider.class, List.class, Collection.class, Map.class, Queue.class, HashMap.class, LinkedHashMap.class, IdentityHashMap.class, AbstractCollection.class, AbstractList.class, ArrayList.class }; for (Class<?> c : initialClasses) { addClass(c); } for (Field f : Constant.class.getDeclaredFields()) { int mods = f.getModifiers(); if (f.getType() == Constant.class && Modifier.isPublic(mods) && Modifier.isStatic(mods) && Modifier.isFinal(mods)) { try { Constant c = (Constant) f.get(null); if (c != null) { constants.add(c); } } catch (Exception e) { } } } for (Class<?> c : classes) { if (c != void.class && !c.isArray()) { constants.add(snippetReflection.forObject(Array.newInstance(c, 42))); } } constants.add(snippetReflection.forObject(new ArrayList<>())); constants.add(snippetReflection.forObject(new IdentityHashMap<>())); constants.add(snippetReflection.forObject(new LinkedHashMap<>())); constants.add(snippetReflection.forObject(new TreeMap<>())); constants.add(snippetReflection.forObject(new ArrayDeque<>())); constants.add(snippetReflection.forObject(new LinkedList<>())); constants.add(snippetReflection.forObject("a string")); constants.add(snippetReflection.forObject(42)); constants.add(snippetReflection.forObject(String.class)); constants.add(snippetReflection.forObject(String[].class)); } public synchronized Class<?> getArrayClass(Class<?> componentType) { Class<?> arrayClass = arrayClasses.get(componentType); if (arrayClass == null) { arrayClass = Array.newInstance(componentType, 0).getClass(); arrayClasses.put(componentType, arrayClass); } return arrayClass; } public static int dimensions(Class<?> c) { if (c.getComponentType() != null) { return 1 + dimensions(c.getComponentType()); } return 0; } private void addClass(Class<?> c) { if (classes.add(c)) { if (c.getSuperclass() != null) { addClass(c.getSuperclass()); } for (Class<?> sc : c.getInterfaces()) { addClass(sc); } for (Class<?> dc : c.getDeclaredClasses()) { addClass(dc); } for (Method m : c.getDeclaredMethods()) { addClass(m.getReturnType()); for (Class<?> p : m.getParameterTypes()) { addClass(p); } } if (c != void.class && dimensions(c) < 2) { Class<?> arrayClass = Array.newInstance(c, 0).getClass(); arrayClasses.put(c, arrayClass); addClass(arrayClass); } } } }
public abstract class BaseTest { public static final String newline = System.getProperty("line.separator"); public static final String pathSep = System.getProperty("path.separator"); /** Build up the full classpath we need, including the surefire path (if present) */ public static final String CLASSPATH = System.getProperty("java.class.path"); public String tmpdir = null; /** reset during setUp and set to true if we find a problem */ protected boolean lastTestFailed = false; /** * If error during parser execution, store stderr here; can't return stdout and stderr. This * doesn't trap errors from running antlr. */ protected String stderrDuringParse; @Before public void setUp() throws Exception { lastTestFailed = false; // hope for the best, but set to true in asserts that fail // new output dir for each test tmpdir = new File( System.getProperty("java.io.tmpdir"), getClass().getSimpleName() + "-" + System.currentTimeMillis()) .getAbsolutePath(); // tmpdir = "/tmp"; } @After public void tearDown() throws Exception { // remove tmpdir if no error. if (!lastTestFailed) eraseTempDir(); } protected org.antlr.v4.Tool newTool(String[] args) { Tool tool = new Tool(args); return tool; } protected Tool newTool() { org.antlr.v4.Tool tool = new Tool(new String[] {"-o", tmpdir}); return tool; } ATN createATN(Grammar g) { if (g.atn != null) return g.atn; semanticProcess(g); ParserATNFactory f = new ParserATNFactory(g); if (g.isLexer()) f = new LexerATNFactory((LexerGrammar) g); g.atn = f.createATN(); return g.atn; } protected void semanticProcess(Grammar g) { if (g.ast != null && !g.ast.hasErrors) { System.out.println(g.ast.toStringTree()); Tool antlr = new Tool(); SemanticPipeline sem = new SemanticPipeline(g); sem.process(); if (g.getImportedGrammars() != null) { // process imported grammars (if any) for (Grammar imp : g.getImportedGrammars()) { antlr.processNonCombinedGrammar(imp, false); } } } } public DFA createDFA(Grammar g, DecisionState s) { // PredictionDFAFactory conv = new PredictionDFAFactory(g, s); // DFA dfa = conv.createDFA(); // conv.issueAmbiguityWarnings(); // System.out.print("DFA="+dfa); // return dfa; return null; } // public void minimizeDFA(DFA dfa) { // DFAMinimizer dmin = new DFAMinimizer(dfa); // dfa.minimized = dmin.minimize(); // } List<Integer> getTypesFromString(Grammar g, String expecting) { List<Integer> expectingTokenTypes = new ArrayList<Integer>(); if (expecting != null && !expecting.trim().equals("")) { for (String tname : expecting.replace(" ", "").split(",")) { int ttype = g.getTokenType(tname); expectingTokenTypes.add(ttype); } } return expectingTokenTypes; } public List<Integer> getTokenTypes(String input, LexerATNSimulator lexerATN) { ANTLRStringStream in = new ANTLRStringStream(input); List<Integer> tokenTypes = new ArrayList<Integer>(); int ttype = 0; do { ttype = lexerATN.matchATN(in); tokenTypes.add(ttype); } while (ttype != Token.EOF); return tokenTypes; } public List<String> getTokenTypes(LexerGrammar lg, ATN atn, CharStream input, boolean adaptive) { LexerATNSimulator interp = new LexerATNSimulator(atn); List<String> tokenTypes = new ArrayList<String>(); int ttype; do { if (adaptive) ttype = interp.match(input, Lexer.DEFAULT_MODE); else ttype = interp.matchATN(input); if (ttype == Token.EOF) tokenTypes.add("EOF"); else { tokenTypes.add(lg.typeToTokenList.get(ttype)); } } while (ttype != Token.EOF); return tokenTypes; } List<ANTLRMessage> checkRuleDFA(String gtext, String ruleName, String expecting) throws Exception { ErrorQueue equeue = new ErrorQueue(); Grammar g = new Grammar(gtext, equeue); ATN atn = createATN(g); ATNState s = atn.ruleToStartState[g.getRule(ruleName).index]; if (s == null) { System.err.println("no such rule: " + ruleName); return null; } ATNState t = s.transition(0).target; if (!(t instanceof DecisionState)) { System.out.println(ruleName + " has no decision"); return null; } DecisionState blk = (DecisionState) t; checkRuleDFA(g, blk, expecting); return equeue.all; } List<ANTLRMessage> checkRuleDFA(String gtext, int decision, String expecting) throws Exception { ErrorQueue equeue = new ErrorQueue(); Grammar g = new Grammar(gtext, equeue); ATN atn = createATN(g); DecisionState blk = atn.decisionToState.get(decision); checkRuleDFA(g, blk, expecting); return equeue.all; } void checkRuleDFA(Grammar g, DecisionState blk, String expecting) throws Exception { DFA dfa = createDFA(g, blk); String result = null; if (dfa != null) result = dfa.toString(); assertEquals(expecting, result); } List<ANTLRMessage> checkLexerDFA(String gtext, String expecting) throws Exception { return checkLexerDFA(gtext, LexerGrammar.DEFAULT_MODE_NAME, expecting); } List<ANTLRMessage> checkLexerDFA(String gtext, String modeName, String expecting) throws Exception { ErrorQueue equeue = new ErrorQueue(); LexerGrammar g = new LexerGrammar(gtext, equeue); g.atn = createATN(g); // LexerATNToDFAConverter conv = new LexerATNToDFAConverter(g); // DFA dfa = conv.createDFA(modeName); // g.setLookaheadDFA(0, dfa); // only one decision to worry about // // String result = null; // if ( dfa!=null ) result = dfa.toString(); // assertEquals(expecting, result); // // return equeue.all; return null; } /** * Wow! much faster than compiling outside of VM. Finicky though. Had rules called r and modulo. * Wouldn't compile til I changed to 'a'. */ protected boolean compile(String fileName) { String classpathOption = "-classpath"; String[] args = new String[] { "javac", "-d", tmpdir, classpathOption, tmpdir + pathSep + CLASSPATH, tmpdir + "/" + fileName }; String cmdLine = "javac" + " -d " + tmpdir + " " + classpathOption + " " + tmpdir + pathSep + CLASSPATH + " " + fileName; // System.out.println("compile: "+cmdLine); File f = new File(tmpdir, fileName); JavaCompiler compiler = ToolProvider.getSystemJavaCompiler(); // DiagnosticCollector<JavaFileObject> diagnostics = // new DiagnosticCollector<JavaFileObject>(); StandardJavaFileManager fileManager = compiler.getStandardFileManager(null, null, null); Iterable<? extends JavaFileObject> compilationUnits = fileManager.getJavaFileObjectsFromFiles(Arrays.asList(f)); Iterable<String> compileOptions = Arrays.asList(new String[] {"-d", tmpdir, "-cp", tmpdir + pathSep + CLASSPATH}); JavaCompiler.CompilationTask task = compiler.getTask(null, fileManager, null, compileOptions, null, compilationUnits); boolean ok = task.call(); try { fileManager.close(); } catch (IOException ioe) { ioe.printStackTrace(System.err); } // List<String> errors = new ArrayList<String>(); // for (Diagnostic diagnostic : diagnostics.getDiagnostics()) { // errors.add( // String.valueOf(diagnostic.getLineNumber())+ // ": " + diagnostic.getMessage(null)); // } // if ( errors.size()>0 ) { // System.err.println("compile stderr from: "+cmdLine); // System.err.println(errors); // return false; // } return ok; /* File outputDir = new File(tmpdir); try { Process process = Runtime.getRuntime().exec(args, null, outputDir); StreamVacuum stdout = new StreamVacuum(process.getInputStream()); StreamVacuum stderr = new StreamVacuum(process.getErrorStream()); stdout.start(); stderr.start(); process.waitFor(); stdout.join(); stderr.join(); if ( stdout.toString().length()>0 ) { System.err.println("compile stdout from: "+cmdLine); System.err.println(stdout); } if ( stderr.toString().length()>0 ) { System.err.println("compile stderr from: "+cmdLine); System.err.println(stderr); } int ret = process.exitValue(); return ret==0; } catch (Exception e) { System.err.println("can't exec compilation"); e.printStackTrace(System.err); return false; } */ } /** Return true if all is ok, no errors */ protected boolean antlr( String fileName, String grammarFileName, String grammarStr, boolean debug) { boolean allIsWell = true; mkdir(tmpdir); writeFile(tmpdir, fileName, grammarStr); try { final List options = new ArrayList(); if (debug) { options.add("-debug"); } options.add("-o"); options.add(tmpdir); options.add("-lib"); options.add(tmpdir); options.add(new File(tmpdir, grammarFileName).toString()); final String[] optionsA = new String[options.size()]; options.toArray(optionsA); ErrorQueue equeue = new ErrorQueue(); Tool antlr = newTool(optionsA); antlr.addListener(equeue); antlr.processGrammarsOnCommandLine(); if (equeue.errors.size() > 0) { allIsWell = false; System.err.println("antlr reports errors from " + options); for (int i = 0; i < equeue.errors.size(); i++) { ANTLRMessage msg = (ANTLRMessage) equeue.errors.get(i); System.err.println(msg); } System.out.println("!!!\ngrammar:"); System.out.println(grammarStr); System.out.println("###"); } } catch (Exception e) { allIsWell = false; System.err.println("problems building grammar: " + e); e.printStackTrace(System.err); } return allIsWell; } protected String execLexer( String grammarFileName, String grammarStr, String lexerName, String input) { return execLexer(grammarFileName, grammarStr, lexerName, input, false); } protected String execLexer( String grammarFileName, String grammarStr, String lexerName, String input, boolean showDFA) { rawGenerateAndBuildRecognizer(grammarFileName, grammarStr, null, lexerName, false); writeFile(tmpdir, "input", input); writeLexerTestFile(lexerName, showDFA); compile("Test.java"); String output = execClass("Test"); if (stderrDuringParse != null && stderrDuringParse.length() > 0) { System.err.println(stderrDuringParse); } return output; } protected String execParser( String grammarFileName, String grammarStr, String parserName, String lexerName, String startRuleName, String input, boolean debug) { rawGenerateAndBuildRecognizer(grammarFileName, grammarStr, parserName, lexerName, debug); writeFile(tmpdir, "input", input); boolean parserBuildsTrees = grammarStr.indexOf("output=AST") >= 0 || grammarStr.indexOf("output = AST") >= 0; boolean parserBuildsTemplate = grammarStr.indexOf("output=template") >= 0 || grammarStr.indexOf("output = template") >= 0; return rawExecRecognizer( parserName, null, lexerName, startRuleName, null, parserBuildsTrees, parserBuildsTemplate, false, debug); } protected String execTreeParser( String parserGrammarFileName, String parserGrammarStr, String parserName, String treeParserGrammarFileName, String treeParserGrammarStr, String treeParserName, String lexerName, String parserStartRuleName, String treeParserStartRuleName, String input) { return execTreeParser( parserGrammarFileName, parserGrammarStr, parserName, treeParserGrammarFileName, treeParserGrammarStr, treeParserName, lexerName, parserStartRuleName, treeParserStartRuleName, input, false); } protected String execTreeParser( String parserGrammarFileName, String parserGrammarStr, String parserName, String treeParserGrammarFileName, String treeParserGrammarStr, String treeParserName, String lexerName, String parserStartRuleName, String treeParserStartRuleName, String input, boolean debug) { // build the parser rawGenerateAndBuildRecognizer( parserGrammarFileName, parserGrammarStr, parserName, lexerName, debug); // build the tree parser rawGenerateAndBuildRecognizer( treeParserGrammarFileName, treeParserGrammarStr, treeParserName, lexerName, debug); writeFile(tmpdir, "input", input); boolean parserBuildsTrees = parserGrammarStr.indexOf("output=AST") >= 0 || parserGrammarStr.indexOf("output = AST") >= 0; boolean treeParserBuildsTrees = treeParserGrammarStr.indexOf("output=AST") >= 0 || treeParserGrammarStr.indexOf("output = AST") >= 0; boolean parserBuildsTemplate = parserGrammarStr.indexOf("output=template") >= 0 || parserGrammarStr.indexOf("output = template") >= 0; return rawExecRecognizer( parserName, treeParserName, lexerName, parserStartRuleName, treeParserStartRuleName, parserBuildsTrees, parserBuildsTemplate, treeParserBuildsTrees, debug); } /** Return true if all is well */ protected boolean rawGenerateAndBuildRecognizer( String grammarFileName, String grammarStr, String parserName, String lexerName, boolean debug) { boolean allIsWell = antlr(grammarFileName, grammarFileName, grammarStr, debug); boolean ok; if (lexerName != null) { ok = compile(lexerName + ".java"); if (!ok) { allIsWell = false; } } if (parserName != null) { ok = compile(parserName + ".java"); if (!ok) { allIsWell = false; } } return allIsWell; } protected String rawExecRecognizer( String parserName, String treeParserName, String lexerName, String parserStartRuleName, String treeParserStartRuleName, boolean parserBuildsTrees, boolean parserBuildsTemplate, boolean treeParserBuildsTrees, boolean debug) { this.stderrDuringParse = null; if (treeParserBuildsTrees && parserBuildsTrees) { writeTreeAndTreeTestFile( parserName, treeParserName, lexerName, parserStartRuleName, treeParserStartRuleName, debug); } else if (parserBuildsTrees) { writeTreeTestFile( parserName, treeParserName, lexerName, parserStartRuleName, treeParserStartRuleName, debug); } else if (parserBuildsTemplate) { writeTemplateTestFile(parserName, lexerName, parserStartRuleName, debug); } else if (parserName == null) { writeLexerTestFile(lexerName, false); } else { writeTestFile(parserName, lexerName, parserStartRuleName, debug); } compile("Test.java"); return execClass("Test"); } public String execRecognizer() { try { String inputFile = new File(tmpdir, "input").getAbsolutePath(); String[] args = new String[] {"java", "-classpath", tmpdir + pathSep + CLASSPATH, "Test", inputFile}; // String cmdLine = "java -classpath "+CLASSPATH+pathSep+tmpdir+" Test " + new File(tmpdir, // "input").getAbsolutePath(); // System.out.println("execParser: "+cmdLine); Process process = Runtime.getRuntime().exec(args, null, new File(tmpdir)); StreamVacuum stdoutVacuum = new StreamVacuum(process.getInputStream()); StreamVacuum stderrVacuum = new StreamVacuum(process.getErrorStream()); stdoutVacuum.start(); stderrVacuum.start(); process.waitFor(); stdoutVacuum.join(); stderrVacuum.join(); String output = null; output = stdoutVacuum.toString(); if (stderrVacuum.toString().length() > 0) { this.stderrDuringParse = stderrVacuum.toString(); System.err.println("exec stderrVacuum: " + stderrVacuum); } return output; } catch (Exception e) { System.err.println("can't exec recognizer"); e.printStackTrace(System.err); } return null; } public String execClass(String className) { /* HOW TO GET STDOUT? try { ClassLoader cl_new = new DirectoryLoader(new File(tmpdir)); Class compiledClass = cl_new.loadClass(className); Method m = compiledClass.getMethod("main"); m.invoke(null); } catch (Exception ex) { ex.printStackTrace(System.err); } */ try { String[] args = new String[] { "java", "-classpath", tmpdir + pathSep + CLASSPATH, className, new File(tmpdir, "input").getAbsolutePath() }; // String cmdLine = "java -classpath "+CLASSPATH+pathSep+tmpdir+" Test " + new File(tmpdir, // "input").getAbsolutePath(); // System.out.println("execParser: "+cmdLine); Process process = Runtime.getRuntime().exec(args, null, new File(tmpdir)); StreamVacuum stdoutVacuum = new StreamVacuum(process.getInputStream()); StreamVacuum stderrVacuum = new StreamVacuum(process.getErrorStream()); stdoutVacuum.start(); stderrVacuum.start(); process.waitFor(); stdoutVacuum.join(); stderrVacuum.join(); String output = null; output = stdoutVacuum.toString(); if (stderrVacuum.toString().length() > 0) { this.stderrDuringParse = stderrVacuum.toString(); System.err.println("exec stderrVacuum: " + stderrVacuum); } return output; } catch (Exception e) { System.err.println("can't exec recognizer"); e.printStackTrace(System.err); } return null; } public void testErrors(String[] pairs, boolean printTree) { for (int i = 0; i < pairs.length; i += 2) { String input = pairs[i]; String expect = pairs[i + 1]; ErrorQueue equeue = new ErrorQueue(); Grammar g = null; try { String[] lines = input.split("\n"); String fileName = getFilenameFromFirstLineOfGrammar(lines[0]); g = new Grammar(fileName, input, equeue); } catch (org.antlr.runtime.RecognitionException re) { re.printStackTrace(System.err); } String actual = equeue.toString(g.tool); System.err.println(actual); String msg = input; msg = msg.replaceAll("\n", "\\\\n"); msg = msg.replaceAll("\r", "\\\\r"); msg = msg.replaceAll("\t", "\\\\t"); // ignore error number expect = stripErrorNum(expect); actual = stripErrorNum(actual); assertEquals("error in: " + msg, expect, actual); } } // can be multi-line // error(29): A.g:2:11: unknown attribute reference a in $a // error(29): A.g:2:11: unknown attribute reference a in $a String stripErrorNum(String errs) { String[] lines = errs.split("\n"); for (int i = 0; i < lines.length; i++) { String s = lines[i]; int lp = s.indexOf("error("); int rp = s.indexOf(')', lp); if (lp >= 0 && rp >= 0) { lines[i] = s.substring(0, lp) + s.substring(rp + 1, s.length()); } } return Utils.join(lines, "\n"); } public String getFilenameFromFirstLineOfGrammar(String line) { String fileName = "<string>"; int grIndex = line.lastIndexOf("grammar"); int semi = line.lastIndexOf(';'); if (grIndex >= 0 && semi >= 0) { int space = line.indexOf(' ', grIndex); fileName = line.substring(space + 1, semi) + ".g"; } if (fileName.length() == ".g".length()) fileName = "<string>"; return fileName; } // void ambig(List<Message> msgs, int[] expectedAmbigAlts, String expectedAmbigInput) // throws Exception // { // ambig(msgs, 0, expectedAmbigAlts, expectedAmbigInput); // } // void ambig(List<Message> msgs, int i, int[] expectedAmbigAlts, String expectedAmbigInput) // throws Exception // { // List<Message> amsgs = getMessagesOfType(msgs, AmbiguityMessage.class); // AmbiguityMessage a = (AmbiguityMessage)amsgs.get(i); // if ( a==null ) assertNull(expectedAmbigAlts); // else { // assertEquals(a.conflictingAlts.toString(), Arrays.toString(expectedAmbigAlts)); // } // assertEquals(expectedAmbigInput, a.input); // } // void unreachable(List<Message> msgs, int[] expectedUnreachableAlts) // throws Exception // { // unreachable(msgs, 0, expectedUnreachableAlts); // } // void unreachable(List<Message> msgs, int i, int[] expectedUnreachableAlts) // throws Exception // { // List<Message> amsgs = getMessagesOfType(msgs, UnreachableAltsMessage.class); // UnreachableAltsMessage u = (UnreachableAltsMessage)amsgs.get(i); // if ( u==null ) assertNull(expectedUnreachableAlts); // else { // assertEquals(u.conflictingAlts.toString(), Arrays.toString(expectedUnreachableAlts)); // } // } List<ANTLRMessage> getMessagesOfType(List<ANTLRMessage> msgs, Class c) { List<ANTLRMessage> filtered = new ArrayList<ANTLRMessage>(); for (ANTLRMessage m : msgs) { if (m.getClass() == c) filtered.add(m); } return filtered; } public static class StreamVacuum implements Runnable { StringBuffer buf = new StringBuffer(); BufferedReader in; Thread sucker; public StreamVacuum(InputStream in) { this.in = new BufferedReader(new InputStreamReader(in)); } public void start() { sucker = new Thread(this); sucker.start(); } public void run() { try { String line = in.readLine(); while (line != null) { buf.append(line); buf.append('\n'); line = in.readLine(); } } catch (IOException ioe) { System.err.println("can't read output from process"); } } /** wait for the thread to finish */ public void join() throws InterruptedException { sucker.join(); } public String toString() { return buf.toString(); } } protected void checkGrammarSemanticsError( ErrorQueue equeue, GrammarSemanticsMessage expectedMessage) throws Exception { ANTLRMessage foundMsg = null; for (int i = 0; i < equeue.errors.size(); i++) { ANTLRMessage m = (ANTLRMessage) equeue.errors.get(i); if (m.errorType == expectedMessage.errorType) { foundMsg = m; } } assertNotNull("no error; " + expectedMessage.errorType + " expected", foundMsg); assertTrue( "error is not a GrammarSemanticsMessage", foundMsg instanceof GrammarSemanticsMessage); assertEquals(Arrays.toString(expectedMessage.args), Arrays.toString(foundMsg.args)); if (equeue.size() != 1) { System.err.println(equeue); } } protected void checkGrammarSemanticsWarning( ErrorQueue equeue, GrammarSemanticsMessage expectedMessage) throws Exception { ANTLRMessage foundMsg = null; for (int i = 0; i < equeue.warnings.size(); i++) { ANTLRMessage m = equeue.warnings.get(i); if (m.errorType == expectedMessage.errorType) { foundMsg = m; } } assertNotNull("no error; " + expectedMessage.errorType + " expected", foundMsg); assertTrue( "error is not a GrammarSemanticsMessage", foundMsg instanceof GrammarSemanticsMessage); assertEquals(Arrays.toString(expectedMessage.args), Arrays.toString(foundMsg.args)); if (equeue.size() != 1) { System.err.println(equeue); } } protected void checkError(ErrorQueue equeue, ANTLRMessage expectedMessage) throws Exception { // System.out.println("errors="+equeue); ANTLRMessage foundMsg = null; for (int i = 0; i < equeue.errors.size(); i++) { ANTLRMessage m = (ANTLRMessage) equeue.errors.get(i); if (m.errorType == expectedMessage.errorType) { foundMsg = m; } } assertTrue("no error; " + expectedMessage.errorType + " expected", equeue.errors.size() > 0); assertTrue("too many errors; " + equeue.errors, equeue.errors.size() <= 1); assertNotNull("couldn't find expected error: " + expectedMessage.errorType, foundMsg); /* assertTrue("error is not a GrammarSemanticsMessage", foundMsg instanceof GrammarSemanticsMessage); */ assertTrue(Arrays.equals(expectedMessage.args, foundMsg.args)); } public static class FilteringTokenStream extends CommonTokenStream { public FilteringTokenStream(TokenSource src) { super(src); } Set<Integer> hide = new HashSet<Integer>(); protected void sync(int i) { super.sync(i); if (hide.contains(get(i).getType())) get(i).setChannel(Token.HIDDEN_CHANNEL); } public void setTokenTypeChannel(int ttype, int channel) { hide.add(ttype); } } public static void writeFile(String dir, String fileName, String content) { try { File f = new File(dir, fileName); FileWriter w = new FileWriter(f); BufferedWriter bw = new BufferedWriter(w); bw.write(content); bw.close(); w.close(); } catch (IOException ioe) { System.err.println("can't write file"); ioe.printStackTrace(System.err); } } protected void mkdir(String dir) { File f = new File(dir); f.mkdirs(); } protected void writeTestFile( String parserName, String lexerName, String parserStartRuleName, boolean debug) { ST outputFileST = new ST( "import org.antlr.v4.runtime.*;\n" + "import org.antlr.v4.runtime.tree.*;\n" + // "import org.antlr.v4.runtime.debug.*;\n" + "\n" + "public class Test {\n" + " public static void main(String[] args) throws Exception {\n" + " CharStream input = new ANTLRFileStream(args[0]);\n" + " <lexerName> lex = new <lexerName>(input);\n" + " CommonTokenStream tokens = new CommonTokenStream(lex);\n" + " <createParser>\n" + " parser.<parserStartRuleName>();\n" + " }\n" + "}"); ST createParserST = new ST( "class Profiler2 extends Profiler {\n" + " public void terminate() { ; }\n" + "}\n" + " Profiler2 profiler = new Profiler2();\n" + " <parserName> parser = new <parserName>(tokens,profiler);\n" + " profiler.setParser(parser);\n"); if (!debug) { createParserST = new ST(" <parserName> parser = new <parserName>(tokens);\n"); } outputFileST.add("createParser", createParserST); outputFileST.add("parserName", parserName); outputFileST.add("lexerName", lexerName); outputFileST.add("parserStartRuleName", parserStartRuleName); writeFile(tmpdir, "Test.java", outputFileST.render()); } protected void writeLexerTestFile(String lexerName, boolean showDFA) { ST outputFileST = new ST( "import org.antlr.v4.runtime.*;\n" + "\n" + "public class Test {\n" + " public static void main(String[] args) throws Exception {\n" + " CharStream input = new ANTLRFileStream(args[0]);\n" + " <lexerName> lex = new <lexerName>(input);\n" + " CommonTokenStream tokens = new CommonTokenStream(lex);\n" + " tokens.fill();\n" + " for (Object t : tokens.getTokens()) System.out.println(t);\n" + (showDFA ? "System.out.print(lex.getInterpreter().getDFA(Lexer.DEFAULT_MODE).toLexerString());\n" : "") + " }\n" + "}"); outputFileST.add("lexerName", lexerName); writeFile(tmpdir, "Test.java", outputFileST.render()); } protected void writeTreeTestFile( String parserName, String treeParserName, String lexerName, String parserStartRuleName, String treeParserStartRuleName, boolean debug) { ST outputFileST = new ST( "import org.antlr.v4.runtime.*;\n" + "import org.antlr.v4.runtime.tree.*;\n" + // "import org.antlr.v4.runtime.debug.*;\n" + "\n" + "public class Test {\n" + " public static void main(String[] args) throws Exception {\n" + " CharStream input = new ANTLRFileStream(args[0]);\n" + " <lexerName> lex = new <lexerName>(input);\n" + " TokenRewriteStream tokens = new TokenRewriteStream(lex);\n" + " <createParser>\n" + " ParserRuleContext r = parser.<parserStartRuleName>();\n" + " <if(!treeParserStartRuleName)>\n" + " if ( r.tree!=null ) {\n" + " System.out.println(((Tree)r.tree).toStringTree());\n" + " ((CommonTree)r.tree).sanityCheckParentAndChildIndexes();\n" + " }\n" + " <else>\n" + " CommonTreeNodeStream nodes = new CommonTreeNodeStream((Tree)r.tree);\n" + " nodes.setTokenStream(tokens);\n" + " <treeParserName> walker = new <treeParserName>(nodes);\n" + " walker.<treeParserStartRuleName>();\n" + " <endif>\n" + " }\n" + "}"); ST createParserST = new ST( "class Profiler2 extends Profiler {\n" + " public void terminate() { ; }\n" + "}\n" + " Profiler2 profiler = new Profiler2();\n" + " <parserName> parser = new <parserName>(tokens,profiler);\n" + " profiler.setParser(parser);\n"); if (!debug) { createParserST = new ST(" <parserName> parser = new <parserName>(tokens);\n"); } outputFileST.add("createParser", createParserST); outputFileST.add("parserName", parserName); outputFileST.add("treeParserName", treeParserName); outputFileST.add("lexerName", lexerName); outputFileST.add("parserStartRuleName", parserStartRuleName); outputFileST.add("treeParserStartRuleName", treeParserStartRuleName); writeFile(tmpdir, "Test.java", outputFileST.render()); } /** Parser creates trees and so does the tree parser */ protected void writeTreeAndTreeTestFile( String parserName, String treeParserName, String lexerName, String parserStartRuleName, String treeParserStartRuleName, boolean debug) { ST outputFileST = new ST( "import org.antlr.v4.runtime.*;\n" + "import org.antlr.v4.runtime.tree.*;\n" + // "import org.antlr.v4.runtime.debug.*;\n" + "\n" + "public class Test {\n" + " public static void main(String[] args) throws Exception {\n" + " CharStream input = new ANTLRFileStream(args[0]);\n" + " <lexerName> lex = new <lexerName>(input);\n" + " TokenRewriteStream tokens = new TokenRewriteStream(lex);\n" + " <createParser>\n" + " ParserRuleContext r = parser.<parserStartRuleName>();\n" + " ((CommonTree)r.tree).sanityCheckParentAndChildIndexes();\n" + " CommonTreeNodeStream nodes = new CommonTreeNodeStream((Tree)r.tree);\n" + " nodes.setTokenStream(tokens);\n" + " <treeParserName> walker = new <treeParserName>(nodes);\n" + " ParserRuleContext r2 = walker.<treeParserStartRuleName>();\n" + " CommonTree rt = ((CommonTree)r2.tree);\n" + " if ( rt!=null ) System.out.println(((CommonTree)r2.tree).toStringTree());\n" + " }\n" + "}"); ST createParserST = new ST( "class Profiler2 extends Profiler {\n" + " public void terminate() { ; }\n" + "}\n" + " Profiler2 profiler = new Profiler2();\n" + " <parserName> parser = new <parserName>(tokens,profiler);\n" + " profiler.setParser(parser);\n"); if (!debug) { createParserST = new ST(" <parserName> parser = new <parserName>(tokens);\n"); } outputFileST.add("createParser", createParserST); outputFileST.add("parserName", parserName); outputFileST.add("treeParserName", treeParserName); outputFileST.add("lexerName", lexerName); outputFileST.add("parserStartRuleName", parserStartRuleName); outputFileST.add("treeParserStartRuleName", treeParserStartRuleName); writeFile(tmpdir, "Test.java", outputFileST.render()); } protected void writeTemplateTestFile( String parserName, String lexerName, String parserStartRuleName, boolean debug) { ST outputFileST = new ST( "import org.antlr.v4.runtime.*;\n" + "import org.antlr.v4.stringtemplate.*;\n" + "import org.antlr.v4.stringtemplate.language.*;\n" + // "import org.antlr.v4.runtime.debug.*;\n" + "import java.io.*;\n" + "\n" + "public class Test {\n" + " static String templates =\n" + " \"group test;\"+" + " \"foo(x,y) ::= \\\"<x> <y>\\\"\";\n" + " static STGroup group =" + " new STGroup(new StringReader(templates)," + " AngleBracketTemplateLexer.class);" + " public static void main(String[] args) throws Exception {\n" + " CharStream input = new ANTLRFileStream(args[0]);\n" + " <lexerName> lex = new <lexerName>(input);\n" + " CommonTokenStream tokens = new CommonTokenStream(lex);\n" + " <createParser>\n" + " parser.setTemplateLib(group);\n" + " ParserRuleContext r = parser.<parserStartRuleName>();\n" + " if ( r.st!=null )\n" + " System.out.print(r.st.toString());\n" + " else\n" + " System.out.print(\"\");\n" + " }\n" + "}"); ST createParserST = new ST( "class Profiler2 extends Profiler {\n" + " public void terminate() { ; }\n" + "}\n" + " Profiler2 profiler = new Profiler2();\n" + " <parserName> parser = new <parserName>(tokens,profiler);\n" + " profiler.setParser(parser);\n"); if (!debug) { createParserST = new ST(" <parserName> parser = new <parserName>(tokens);\n"); } outputFileST.add("createParser", createParserST); outputFileST.add("parserName", parserName); outputFileST.add("lexerName", lexerName); outputFileST.add("parserStartRuleName", parserStartRuleName); writeFile(tmpdir, "Test.java", outputFileST.render()); } public void writeRecognizerAndCompile( String parserName, String treeParserName, String lexerName, String parserStartRuleName, String treeParserStartRuleName, boolean parserBuildsTrees, boolean parserBuildsTemplate, boolean treeParserBuildsTrees, boolean debug) { if (treeParserBuildsTrees && parserBuildsTrees) { writeTreeAndTreeTestFile( parserName, treeParserName, lexerName, parserStartRuleName, treeParserStartRuleName, debug); } else if (parserBuildsTrees) { writeTreeTestFile( parserName, treeParserName, lexerName, parserStartRuleName, treeParserStartRuleName, debug); } else if (parserBuildsTemplate) { writeTemplateTestFile(parserName, lexerName, parserStartRuleName, debug); } else if (parserName == null) { writeLexerTestFile(lexerName, debug); } else { writeTestFile(parserName, lexerName, parserStartRuleName, debug); } compile("Test.java"); } protected void eraseFiles(final String filesEndingWith) { File tmpdirF = new File(tmpdir); String[] files = tmpdirF.list(); for (int i = 0; files != null && i < files.length; i++) { if (files[i].endsWith(filesEndingWith)) { new File(tmpdir + "/" + files[i]).delete(); } } } protected void eraseFiles() { File tmpdirF = new File(tmpdir); String[] files = tmpdirF.list(); for (int i = 0; files != null && i < files.length; i++) { new File(tmpdir + "/" + files[i]).delete(); } } protected void eraseTempDir() { File tmpdirF = new File(tmpdir); if (tmpdirF.exists()) { eraseFiles(); tmpdirF.delete(); } } public String getFirstLineOfException() { if (this.stderrDuringParse == null) { return null; } String[] lines = this.stderrDuringParse.split("\n"); String prefix = "Exception in thread \"main\" "; return lines[0].substring(prefix.length(), lines[0].length()); } /** * When looking at a result set that consists of a Map/HashTable we cannot rely on the output * order, as the hashing algorithm or other aspects of the implementation may be different on * differnt JDKs or platforms. Hence we take the Map, convert the keys to a List, sort them and * Stringify the Map, which is a bit of a hack, but guarantees that we get the same order on all * systems. We assume that the keys are strings. * * @param m The Map that contains keys we wish to return in sorted order * @return A string that represents all the keys in sorted order. */ public String sortMapToString(Map m) { System.out.println("Map toString looks like: " + m.toString()); // Pass in crap, and get nothing back // if (m == null) { return null; } // Sort the keys in the Map // TreeMap nset = new TreeMap(m); System.out.println("Tree map looks like: " + nset.toString()); return nset.toString(); } public List<String> realElements(Vector elements) { return elements.subList(Token.MIN_USER_TOKEN_TYPE, elements.size()); } // override to track errors public void assertEquals(String msg, Object a, Object b) { try { Assert.assertEquals(msg, a, b); } catch (Error e) { lastTestFailed = true; throw e; } } public void assertEquals(Object a, Object b) { try { Assert.assertEquals(a, b); } catch (Error e) { lastTestFailed = true; throw e; } } public void assertEquals(String msg, long a, long b) { try { Assert.assertEquals(msg, a, b); } catch (Error e) { lastTestFailed = true; throw e; } } public void assertEquals(long a, long b) { try { Assert.assertEquals(a, b); } catch (Error e) { lastTestFailed = true; throw e; } } public void assertTrue(String msg, boolean b) { try { Assert.assertTrue(msg, b); } catch (Error e) { lastTestFailed = true; throw e; } } public void assertTrue(boolean b) { try { Assert.assertTrue(b); } catch (Error e) { lastTestFailed = true; throw e; } } public void assertFalse(String msg, boolean b) { try { Assert.assertFalse(msg, b); } catch (Error e) { lastTestFailed = true; throw e; } } public void assertFalse(boolean b) { try { Assert.assertFalse(b); } catch (Error e) { lastTestFailed = true; throw e; } } public void assertNotNull(String msg, Object p) { try { Assert.assertNotNull(msg, p); } catch (Error e) { lastTestFailed = true; throw e; } } public void assertNotNull(Object p) { try { Assert.assertNotNull(p); } catch (Error e) { lastTestFailed = true; throw e; } } public void assertNull(String msg, Object p) { try { Assert.assertNull(msg, p); } catch (Error e) { lastTestFailed = true; throw e; } } public void assertNull(Object p) { try { Assert.assertNull(p); } catch (Error e) { lastTestFailed = true; throw e; } } public static class IntTokenStream implements TokenStream { List<Integer> types; int p = 0; public IntTokenStream(List<Integer> types) { this.types = types; } public void consume() { p++; } public int LA(int i) { return LT(i).getType(); } public int mark() { return index(); } public int index() { return p; } public void rewind(int marker) { seek(marker); } public void rewind() {} public void release(int marker) { seek(marker); } public void seek(int index) { p = index; } public int size() { return types.size(); } public String getSourceName() { return null; } public Token LT(int i) { if ((p + i - 1) >= types.size()) return new CommonToken(-1); return new CommonToken(types.get(p + i - 1)); } public int range() { return 0; } public Token get(int i) { return new org.antlr.v4.runtime.CommonToken(types.get(i)); } public TokenSource getTokenSource() { return null; } public String toString(int start, int stop) { return null; } public String toString(Token start, Token stop) { return null; } } }
private void addLine(String str) { ans.append(str + System.getProperty("line.separator")); }
public abstract class StorableEntityFileDescriptorTest< TEntityName extends StorableEntityName, TEntityFileExtension extends StorableEntityFileExtension, TEntityFileDescriptor extends StorableEntityFileDescriptor<?, TEntityName, TEntityFileExtension>> { protected static final String WORKING_DIRECTORY = System.getProperty("user.dir"); protected static final File TEST_FILE_DIRECTORY = new File(WORKING_DIRECTORY, "test_files"); protected static final File TEST_FILE = new File(TEST_FILE_DIRECTORY, "test.bar"); protected static final File TEST_FILE_BAD_EXT = new File(TEST_FILE_DIRECTORY, "test.foobar"); // region BeforeClass / AfterClass @BeforeClass public static void createTestFiles() throws IOException { final boolean result = TEST_FILE_DIRECTORY.mkdir() && TEST_FILE.createNewFile() && TEST_FILE_BAD_EXT.createNewFile(); if (!result) { throw new RuntimeException("unable to initialize test suite"); } } @AfterClass public static void removeTestFiles() throws IOException { FileUtils.deleteDirectory(TEST_FILE_DIRECTORY); } // endregion // region construction @Test public void descriptorCannotBeCreatedAroundDirectory() { // Assert: ExceptionAssert.assertThrowsStorageException( v -> this.createDescriptor(TEST_FILE_DIRECTORY), this.getExceptionClass(), this.getExceptionValue( StorableEntityStorageException.Code.STORABLE_ENTITY_CANNOT_BE_DIRECTORY.value())); } @Test public void descriptorCannotBeCreatedAroundStorableEntityWithInvalidExtension() { // Assert: ExceptionAssert.assertThrowsStorageException( v -> this.createDescriptor(TEST_FILE_BAD_EXT), this.getExceptionClass(), this.getExceptionValue( StorableEntityStorageException.Code.STORABLE_ENTITY_HAS_INVALID_EXTENSION.value())); } @Test public void descriptorCanBeCreatedAroundStorableEntityWithValidExtension() { // Arrange: final File file = new File(Paths.get(TEST_FILE_DIRECTORY.toString(), "blah").toString(), "foo.bar"); // Act: final TEntityFileDescriptor descriptor = this.createDescriptor(file); // Assert: Assert.assertThat(descriptor.getName(), IsEqual.equalTo(this.createEntityName("foo"))); Assert.assertThat( descriptor.getFileExtension(), IsEqual.equalTo(this.createEntityFileExtension(".bar"))); Assert.assertThat( descriptor.getStorableEntityLocation(), IsEqual.equalTo(Paths.get(TEST_FILE_DIRECTORY.toString(), "blah", "foo.bar").toString())); } @Test public void descriptorCanBeCreatedAroundStorableEntityWithMixedCaseExtension() { // Arrange: final File file = new File(Paths.get(TEST_FILE_DIRECTORY.toString(), "BlAh").toString(), "FoO.BaR"); // Act: final TEntityFileDescriptor descriptor = this.createDescriptor(file); // Assert: Assert.assertThat(descriptor.getName(), IsEqual.equalTo(this.createEntityName("FoO"))); Assert.assertThat( descriptor.getFileExtension(), IsEqual.equalTo(this.createEntityFileExtension(".BaR"))); Assert.assertThat( descriptor.getStorableEntityLocation(), IsEqual.equalTo(Paths.get(TEST_FILE_DIRECTORY.toString(), "BlAh", "FoO.BaR").toString())); } @Test public void descriptorCanBeCreatedAroundUrlEncodedStorableEntityWithValidExtension() { // Arrange: final File file = new File( Paths.get(TEST_FILE_DIRECTORY.toString(), "blah").toString(), "%C3%B6%C3%A4%C3%BC%40.bar"); // Act: final TEntityFileDescriptor descriptor = this.createDescriptor(file); // Assert: Assert.assertThat(descriptor.getName(), IsEqual.equalTo(this.createEntityName("öäü@"))); Assert.assertThat( descriptor.getFileExtension(), IsEqual.equalTo(this.createEntityFileExtension(".bar"))); Assert.assertThat( descriptor.getStorableEntityLocation(), IsEqual.equalTo( Paths.get(TEST_FILE_DIRECTORY.toString(), "blah", "%C3%B6%C3%A4%C3%BC%40.bar") .toString())); } // endregion // region openRead @Test public void openReadCanOpenFileThatExists() throws IOException { // Arrange: final TEntityFileDescriptor descriptor = this.createDescriptor(TEST_FILE); // Act: try (final InputStream is = descriptor.openRead()) { // Assert: Assert.assertThat(is, IsNull.notNullValue()); } } @Test public void openReadCannotOpenFileThatDoesNotExist() { // Arrange: final File file = new File(TEST_FILE_DIRECTORY, "imaginary-read.bar"); final TEntityFileDescriptor descriptor = this.createDescriptor(file); // Act: ExceptionAssert.assertThrowsStorageException( v -> descriptor.openRead(), this.getExceptionClass(), this.getExceptionValue( StorableEntityStorageException.Code.STORABLE_ENTITY_DOES_NOT_EXIST.value())); } @Test public void openReadCannotOpenFileThatIsInvalid() { // Arrange: final File file = Mockito.mock(File.class); Mockito.when(file.getName()).thenReturn("fo\0o.bar"); Mockito.when(file.getAbsolutePath()).thenReturn("fo\0o.bar"); Mockito.when(file.exists()).thenReturn(true); final TEntityFileDescriptor descriptor = this.createDescriptor(file); // Act: ExceptionAssert.assertThrowsStorageException( v -> descriptor.openRead(), this.getExceptionClass(), this.getExceptionValue( StorableEntityStorageException.Code.STORABLE_ENTITY_COULD_NOT_BE_READ.value())); } @Test public void openReadThrowsIfModeIsNotRaw() { // Arrange: final TEntityFileDescriptor descriptor = this.createDescriptor(TEST_FILE); // Act + Assert: ExceptionAssert.assertThrows( v -> descriptor.openRead(StorableEntityReadMode.Decode), IllegalArgumentException.class); } @Test public void openReadWithReadModeDelegatesToOpenRead() throws IOException { // Arrange: final TEntityFileDescriptor descriptor = Mockito.spy(this.createDescriptor(TEST_FILE)); // Act + Assert: try (final InputStream is = descriptor.openRead(StorableEntityReadMode.Raw)) { Mockito.verify(descriptor, Mockito.times(1)).openRead(); } } // endregion // region openWrite @Test public void openWriteCanOpenFileThatExists() throws IOException { // Arrange: final TEntityFileDescriptor descriptor = this.createDescriptor(TEST_FILE); // Act: try (final OutputStream os = descriptor.openWrite()) { // Assert: Assert.assertThat(os, IsNull.notNullValue()); } } @Test public void openWriteCanOpenFileThatDoesNotExist() throws IOException { // Arrange: final File file = new File(TEST_FILE_DIRECTORY, "imaginary-write.bar"); final TEntityFileDescriptor descriptor = this.createDescriptor(file); // Act: try (final OutputStream os = descriptor.openWrite()) { // Assert: Assert.assertThat(os, IsNull.notNullValue()); } } @Test public void openWriteCannotOpenFileThatIsInvalid() { // Arrange: final File file = Mockito.mock(File.class); Mockito.when(file.getName()).thenReturn("fo\0o.bar"); Mockito.when(file.getAbsolutePath()).thenReturn("fo\0o.bar"); final TEntityFileDescriptor descriptor = this.createDescriptor(file); // Act: ExceptionAssert.assertThrowsStorageException( v -> descriptor.openWrite(), this.getExceptionClass(), this.getExceptionValue( StorableEntityStorageException.Code.STORABLE_ENTITY_COULD_NOT_BE_SAVED.value())); } // endregion // region delete @Test public void deleteDeletesUnderlyingStorableEntityFile() throws IOException { // Arrange: final File file = new File(TEST_FILE_DIRECTORY, "to-be-deleted.bar"); final TEntityFileDescriptor descriptor = this.createDescriptor(file); Assert.assertThat(file.createNewFile(), IsEqual.equalTo(true)); // Act: descriptor.delete(); // Assert: Assert.assertThat(file.exists(), IsEqual.equalTo(false)); } @Test public void deleteRaisesExceptionIfFileCannotBeDeleted() { // Arrange: final File file = Mockito.mock(File.class); Mockito.when(file.getName()).thenReturn("foo.bar"); Mockito.when(file.getAbsolutePath()).thenReturn("foo.bar"); final TEntityFileDescriptor descriptor = this.createDescriptor(file); Mockito.when(file.delete()).thenReturn(false); // Act: ExceptionAssert.assertThrowsStorageException( v -> descriptor.delete(), this.getExceptionClass(), this.getExceptionValue( StorableEntityStorageException.Code.STORABLE_ENTITY_COULD_NOT_BE_DELETED.value())); } // endregion // region serialization @Test public void descriptorCanBeSerialized() { // Arrange: final File file = new File(Paths.get(TEST_FILE_DIRECTORY.toString(), "blah").toString(), "foo.bar"); final TEntityFileDescriptor descriptor = this.createDescriptor(file); // Act: final JSONObject jsonObject = JsonSerializer.serializeToJson(descriptor); // Assert: Assert.assertThat(jsonObject.size(), IsEqual.equalTo(2)); Assert.assertThat(jsonObject.get(descriptor.getName().getLabel()), IsEqual.equalTo("foo")); Assert.assertThat( jsonObject.get("location"), IsEqual.equalTo(Paths.get(TEST_FILE_DIRECTORY.toString(), "blah", "foo.bar").toString())); } // endregion @Test public void getStorableEntityLocationReturnsAbsolutePathToStorableEntity() { // Arrange: final File file = new File(Paths.get(TEST_FILE_DIRECTORY.toString(), "blah").toString(), "foo.bar"); final TEntityFileDescriptor descriptor = this.createDescriptor(file); // Act: final String path = descriptor.getStorableEntityLocation(); // Assert: Assert.assertThat(path, IsEqual.equalTo(file.getAbsolutePath())); } protected abstract TEntityFileDescriptor createDescriptor(final File file); protected abstract Class<? extends StorableEntityStorageException> getExceptionClass(); protected abstract Integer getExceptionValue(final Integer originalValue); private TEntityName createEntityName(final String name) { return this.createDescriptor(new File(name + ".xyz")).getName(); } private TEntityFileExtension createEntityFileExtension(final String extension) { return this.createDescriptor(new File("xyz." + extension)).getFileExtension(); } }
@Ignore("Long running test") public class RemoteSubscriptionModelPerformanceTest { // TODO DS test having the server side on another machine private static final int _noOfPuts = 50; private static final int _noOfRunsToAverage = Boolean.parseBoolean(System.getProperty("quick", "true")) ? 2 : 100; private static final long _secondInNanos = 1_000_000_000L; private static final AtomicInteger counter = new AtomicInteger(); private static String _twoMbTestString; private static int _twoMbTestStringLength; private static Map<String, String> _testMap; private static VanillaAssetTree serverAssetTree, clientAssetTree; private static ServerEndpoint serverEndpoint; private final String _mapName = "PerfTestMap" + counter.incrementAndGet(); @BeforeClass public static void setUpBeforeClass() throws IOException, URISyntaxException { // YamlLogging.showServerReads = true; // YamlLogging.clientReads = true; char[] chars = new char[2 << 20]; Arrays.fill(chars, '~'); _twoMbTestString = new String(chars); _twoMbTestStringLength = _twoMbTestString.length(); serverAssetTree = new VanillaAssetTree(1).forTesting(); // The following line doesn't add anything and breaks subscriptions serverAssetTree .root() .addWrappingRule( MapView.class, "map directly to KeyValueStore", VanillaMapView::new, KeyValueStore.class); serverAssetTree .root() .addLeafRule( KeyValueStore.class, "use Chronicle Map", (context, asset) -> new ChronicleMapKeyValueStore( context .basePath(OS.TARGET) .entries(_noOfPuts) .averageValueSize(_twoMbTestStringLength), asset)); TCPRegistry.createServerSocketChannelFor("RemoteSubscriptionModelPerformanceTest.port"); serverEndpoint = new ServerEndpoint( "RemoteSubscriptionModelPerformanceTest.port", serverAssetTree, WireType.BINARY); clientAssetTree = new VanillaAssetTree(13) .forRemoteAccess("RemoteSubscriptionModelPerformanceTest.port", WireType.BINARY); } @AfterClass public static void tearDownAfterClass() throws IOException { clientAssetTree.close(); serverEndpoint.close(); serverAssetTree.close(); TcpChannelHub.closeAllHubs(); TCPRegistry.reset(); } @Before public void setUp() throws IOException { Files.deleteIfExists(Paths.get(OS.TARGET, _mapName)); _testMap = clientAssetTree.acquireMap(_mapName + "?putReturnsNull=true", String.class, String.class); _testMap.clear(); } @After public void tearDown() throws IOException { // System.out.println("Native memory used "+OS.memory().nativeMemoryUsed()); // System.gc(); } /** * Test that listening to events for a given key can handle 50 updates per second of 2 MB string * values. */ @Test public void testGetPerformance() { _testMap.clear(); IntStream.range(0, _noOfPuts) .forEach(i -> _testMap.put(TestUtils.getKey(_mapName, i), _twoMbTestString)); // Perform test a number of times to allow the JVM to warm up, but verify runtime against // average TestUtils.runMultipleTimesAndVerifyAvgRuntime( i -> _testMap.size(), () -> { IntStream.range(0, _noOfPuts).forEach(i -> _testMap.get(TestUtils.getKey(_mapName, i))); }, _noOfRunsToAverage, _secondInNanos * 3 / 2); } /** Test that 50 updates per second of 2 MB string values completes in 1 second. */ @Test public void testPutPerformance() { _testMap.clear(); // Perform test a number of times to allow the JVM to warm up, but verify runtime against // average TestUtils.runMultipleTimesAndVerifyAvgRuntime( i -> _testMap.size(), () -> { IntStream.range(0, _noOfPuts) .forEach(i -> _testMap.put(TestUtils.getKey(_mapName, i), _twoMbTestString)); }, _noOfRunsToAverage, _secondInNanos); } /** * Test that listening to events for a given key can handle 50 updates per second of 2 MB string * values. */ @Test public void testSubscriptionMapEventOnKeyPerformance() { _testMap.clear(); String key = TestUtils.getKey(_mapName, 0); // Create subscriber and register // Add 4 for the number of puts that is added to the string TestChronicleKeyEventSubscriber keyEventSubscriber = new TestChronicleKeyEventSubscriber(_twoMbTestStringLength); clientAssetTree.registerSubscriber( _mapName + "/" + key + "?bootstrap=false", String.class, keyEventSubscriber); Jvm.pause(100); Asset child = serverAssetTree.getAsset(_mapName).getChild(key); Assert.assertNotNull(child); Subscription subscription = child.subscription(false); Assert.assertEquals(1, subscription.subscriberCount()); long start = System.nanoTime(); // Perform test a number of times to allow the JVM to warm up, but verify runtime against // average TestUtils.runMultipleTimesAndVerifyAvgRuntime( () -> { IntStream.range(0, _noOfPuts) .forEach( i -> { _testMap.put(key, _twoMbTestString); }); }, _noOfRunsToAverage, 3 * _secondInNanos); waitFor(() -> keyEventSubscriber.getNoOfEvents().get() >= _noOfPuts * _noOfRunsToAverage); long time = System.nanoTime() - start; System.out.printf("Took %.3f seconds to receive all events%n", time / 1e9); // Test that the correct number of events was triggered on event listener Assert.assertEquals(_noOfPuts * _noOfRunsToAverage, keyEventSubscriber.getNoOfEvents().get()); clientAssetTree.unregisterSubscriber(_mapName + "/" + key, keyEventSubscriber); Jvm.pause(100); Assert.assertEquals(0, subscription.subscriberCount()); } /** * Test that listening to events for a given map can handle 50 updates per second of 2 MB string * values and are triggering events which contain both the key and value (topic). */ @Test public void testSubscriptionMapEventOnTopicPerformance() { _testMap.clear(); String key = TestUtils.getKey(_mapName, 0); // Create subscriber and register TestChronicleTopicSubscriber topicSubscriber = new TestChronicleTopicSubscriber(key, _twoMbTestStringLength); clientAssetTree.registerTopicSubscriber(_mapName, String.class, String.class, topicSubscriber); Jvm.pause(100); KVSSubscription subscription = (KVSSubscription) serverAssetTree.getAsset(_mapName).subscription(false); Assert.assertEquals(1, subscription.topicSubscriberCount()); // Perform test a number of times to allow the JVM to warm up, but verify runtime against // average TestUtils.runMultipleTimesAndVerifyAvgRuntime( i -> { System.out.println("test"); int events = _noOfPuts * i; waitFor(() -> events == topicSubscriber.getNoOfEvents().get()); Assert.assertEquals(events, topicSubscriber.getNoOfEvents().get()); }, () -> { IntStream.range(0, _noOfPuts) .forEach( i -> { _testMap.put(key, _twoMbTestString); }); }, _noOfRunsToAverage, 3 * _secondInNanos); // Test that the correct number of events was triggered on event listener int events = _noOfPuts * _noOfRunsToAverage; waitFor(() -> events == topicSubscriber.getNoOfEvents().get()); Assert.assertEquals(events, topicSubscriber.getNoOfEvents().get()); clientAssetTree.unregisterTopicSubscriber(_mapName, topicSubscriber); waitFor(() -> 0 == subscription.topicSubscriberCount()); Assert.assertEquals(0, subscription.topicSubscriberCount()); } /** * Tests the performance of an event listener on the map for Insert events of 2 MB strings. Expect * it to handle at least 50 2 MB updates per second. */ @Test public void testSubscriptionMapEventListenerInsertPerformance() { _testMap.clear(); YamlLogging.setAll(true); // Create subscriber and register TestChronicleMapEventListener mapEventListener = new TestChronicleMapEventListener(_mapName, _twoMbTestStringLength); Subscriber<MapEvent> mapEventSubscriber = e -> e.apply(mapEventListener); clientAssetTree.registerSubscriber(_mapName, MapEvent.class, mapEventSubscriber); Jvm.pause(100); KVSSubscription subscription = (KVSSubscription) serverAssetTree.getAsset(_mapName).subscription(false); Assert.assertEquals(1, subscription.entrySubscriberCount()); // Perform test a number of times to allow the JVM to warm up, but verify runtime against // average TestUtils.runMultipleTimesAndVerifyAvgRuntime( i -> { if (i > 0) { waitFor(() -> mapEventListener.getNoOfInsertEvents().get() >= _noOfPuts); Assert.assertEquals(_noOfPuts, mapEventListener.getNoOfInsertEvents().get()); } // Test that the correct number of events were triggered on event listener Assert.assertEquals(0, mapEventListener.getNoOfRemoveEvents().get()); Assert.assertEquals(0, mapEventListener.getNoOfUpdateEvents().get()); _testMap.clear(); mapEventListener.resetCounters(); }, () -> { IntStream.range(0, _noOfPuts) .forEach( i -> { _testMap.put(TestUtils.getKey(_mapName, i), _twoMbTestString); }); }, _noOfRunsToAverage, 2 * _secondInNanos); clientAssetTree.unregisterSubscriber(_mapName, mapEventSubscriber); Jvm.pause(1000); Assert.assertEquals(0, subscription.entrySubscriberCount()); } /** * Tests the performance of an event listener on the map for Update events of 2 MB strings. Expect * it to handle at least 50 2 MB updates per second. */ @Test public void testSubscriptionMapEventListenerUpdatePerformance() { _testMap.clear(); // Put values before testing as we want to ignore the insert events Function<Integer, Object> putFunction = a -> _testMap.put(TestUtils.getKey(_mapName, a), _twoMbTestString); IntStream.range(0, _noOfPuts) .forEach( i -> { putFunction.apply(i); }); Jvm.pause(100); // Create subscriber and register TestChronicleMapEventListener mapEventListener = new TestChronicleMapEventListener(_mapName, _twoMbTestStringLength); Subscriber<MapEvent> mapEventSubscriber = e -> e.apply(mapEventListener); clientAssetTree.registerSubscriber( _mapName + "?bootstrap=false", MapEvent.class, mapEventSubscriber); KVSSubscription subscription = (KVSSubscription) serverAssetTree.getAsset(_mapName).subscription(false); waitFor(() -> subscription.entrySubscriberCount() == 1); Assert.assertEquals(1, subscription.entrySubscriberCount()); // Perform test a number of times to allow the JVM to warm up, but verify runtime against // average TestUtils.runMultipleTimesAndVerifyAvgRuntime( i -> { if (i > 0) { waitFor(() -> mapEventListener.getNoOfUpdateEvents().get() >= _noOfPuts); // Test that the correct number of events were triggered on event listener Assert.assertEquals(_noOfPuts, mapEventListener.getNoOfUpdateEvents().get()); } Assert.assertEquals(0, mapEventListener.getNoOfInsertEvents().get()); Assert.assertEquals(0, mapEventListener.getNoOfRemoveEvents().get()); mapEventListener.resetCounters(); }, () -> { IntStream.range(0, _noOfPuts) .forEach( i -> { putFunction.apply(i); }); }, _noOfRunsToAverage, 3 * _secondInNanos); clientAssetTree.unregisterSubscriber(_mapName, mapEventSubscriber); waitFor(() -> subscription.entrySubscriberCount() == 0); Assert.assertEquals(0, subscription.entrySubscriberCount()); } private void waitFor(BooleanSupplier b) { for (int i = 1; i <= 40; i++) if (!b.getAsBoolean()) Jvm.pause(i * i); } /** * Tests the performance of an event listener on the map for Remove events of 2 MB strings. Expect * it to handle at least 50 2 MB updates per second. */ @Test public void testSubscriptionMapEventListenerRemovePerformance() { _testMap.clear(); // Put values before testing as we want to ignore the insert and update events // Create subscriber and register TestChronicleMapEventListener mapEventListener = new TestChronicleMapEventListener(_mapName, _twoMbTestStringLength); Subscriber<MapEvent> mapEventSubscriber = e -> e.apply(mapEventListener); clientAssetTree.registerSubscriber(_mapName, MapEvent.class, mapEventSubscriber); // Perform test a number of times to allow the JVM to warm up, but verify runtime against // average long runtimeInNanos = 0; for (int i = 0; i < _noOfRunsToAverage; i++) { // Put values before testing as we want to ignore the insert and update events IntStream.range(0, _noOfPuts) .forEach( c -> { _testMap.put(TestUtils.getKey(_mapName, c), _twoMbTestString); }); waitFor(() -> mapEventListener.getNoOfInsertEvents().get() >= _noOfPuts); mapEventListener.resetCounters(); long startTime = System.nanoTime(); IntStream.range(0, _noOfPuts) .forEach( c -> { _testMap.remove(TestUtils.getKey(_mapName, c)); }); runtimeInNanos += System.nanoTime() - startTime; waitFor(() -> mapEventListener.getNoOfRemoveEvents().get() >= _noOfPuts); // Test that the correct number of events were triggered on event listener Assert.assertEquals(0, mapEventListener.getNoOfInsertEvents().get()); Assert.assertEquals(_noOfPuts, mapEventListener.getNoOfRemoveEvents().get()); Assert.assertEquals(0, mapEventListener.getNoOfUpdateEvents().get()); } Assert.assertTrue((runtimeInNanos / (_noOfPuts * _noOfRunsToAverage)) <= 2 * _secondInNanos); clientAssetTree.unregisterSubscriber(_mapName, mapEventSubscriber); } /** * Checks that all updates triggered are for the key specified in the constructor and increments * the number of updates. */ class TestChronicleKeyEventSubscriber implements Subscriber<String> { private int _stringLength; private AtomicInteger _noOfEvents = new AtomicInteger(0); public TestChronicleKeyEventSubscriber(int stringLength) { _stringLength = stringLength; } public AtomicInteger getNoOfEvents() { return _noOfEvents; } @Override public void onMessage(String newValue) { if (newValue == null) { System.out.println("No value"); } else { Assert.assertEquals(_stringLength, newValue.length()); _noOfEvents.incrementAndGet(); } } } /** * Topic subscriber checking for each message that it is for the right key (in constructor) and * the expected size value. Increments event counter which can be checked at the end of the test. */ class TestChronicleTopicSubscriber implements TopicSubscriber<String, String> { private String _keyName; private int _stringLength; private AtomicInteger _noOfEvents = new AtomicInteger(0); public TestChronicleTopicSubscriber(String keyName, int stringLength) { _keyName = keyName; _stringLength = stringLength; } /** * Test that the topic/key is the one specified in constructor and the message is the expected * size. * * @throws InvalidSubscriberException */ @Override public void onMessage(String topic, String message) throws InvalidSubscriberException { Assert.assertEquals(_keyName, topic); Assert.assertEquals(_stringLength, message.length()); _noOfEvents.incrementAndGet(); } public AtomicInteger getNoOfEvents() { return _noOfEvents; } } /** * Map event listener for performance testing. Checks that the key is the one expected and the * size of the value is as expected. Increments event specific counters that can be used to check * agains the expected number of events. */ class TestChronicleMapEventListener implements MapEventListener<String, String> { private AtomicInteger _noOfInsertEvents = new AtomicInteger(0); private AtomicInteger _noOfUpdateEvents = new AtomicInteger(0); private AtomicInteger _noOfRemoveEvents = new AtomicInteger(0); private String _mapName; private int _stringLength; public TestChronicleMapEventListener(String mapName, int stringLength) { _mapName = mapName; _stringLength = stringLength; } @Override public void update(String assetName, String key, String oldValue, String newValue) { testKeyAndValue(key, newValue, _noOfUpdateEvents); } @Override public void insert(String assetName, String key, String value) { testKeyAndValue(key, value, _noOfInsertEvents); } @Override public void remove(String assetName, String key, String value) { testKeyAndValue(key, value, _noOfRemoveEvents); } public AtomicInteger getNoOfInsertEvents() { return _noOfInsertEvents; } public AtomicInteger getNoOfUpdateEvents() { return _noOfUpdateEvents; } public AtomicInteger getNoOfRemoveEvents() { return _noOfRemoveEvents; } public void resetCounters() { _noOfInsertEvents = new AtomicInteger(0); _noOfUpdateEvents = new AtomicInteger(0); _noOfRemoveEvents = new AtomicInteger(0); } private void testKeyAndValue(String key, String value, AtomicInteger counterToIncrement) { int counter = counterToIncrement.getAndIncrement(); Assert.assertEquals(TestUtils.getKey(_mapName, counter), key); Assert.assertEquals(_stringLength, value.length()); } } }
@Test public void testStamboom() { Administratie adm = new Administratie(); Persoon piet = adm.addPersoon( Geslacht.MAN, new String[] {"Piet"}, "Swinkels", "", new GregorianCalendar(1924, Calendar.APRIL, 23), "Den Haag", null); Persoon teuntje = adm.addPersoon( Geslacht.VROUW, new String[] {"Teuntje"}, "Vries", "de", new GregorianCalendar(1927, Calendar.MAY, 5), "Doesburg", null); Gezin teuntjeEnPiet = adm.addOngehuwdGezin(teuntje, piet); Persoon gijs = adm.addPersoon( Geslacht.MAN, new String[] {"Gijs", "Jozef"}, "Swinkels", "", new GregorianCalendar(1944, Calendar.APRIL, 21), "Geldrop", teuntjeEnPiet); Persoon ferdinand = adm.addPersoon( Geslacht.MAN, new String[] {"Ferdinand", "Karel", "Helene"}, "Vuiter", "de", new GregorianCalendar(1901, Calendar.JULY, 14), "Amsterdam", null); Persoon annalouise = adm.addPersoon( Geslacht.VROUW, new String[] {"Annalouise", "Isabel", "Teuntje"}, "Vuiter", "de", new GregorianCalendar(1902, Calendar.OCTOBER, 1), "Amsterdam", null); Gezin ferdinandEnAnnalouise = adm.addHuwelijk(ferdinand, annalouise, new GregorianCalendar(1921, Calendar.MAY, 5)); Persoon louise = adm.addPersoon( Geslacht.VROUW, new String[] {"Louise", "Isabel", "Helene"}, "Vuiter", "de", new GregorianCalendar(1927, Calendar.JANUARY, 15), "Amsterdam", ferdinandEnAnnalouise); Gezin louiseAlleen = adm.addOngehuwdGezin(louise, null); Persoon mary = adm.addPersoon( Geslacht.VROUW, new String[] {"mary"}, "Vuiter", "de", new GregorianCalendar(1943, Calendar.MAY, 25), "Rotterdam", louiseAlleen); Gezin gijsEnMary = adm.addOngehuwdGezin(gijs, mary); Persoon jaron = adm.addPersoon( Geslacht.MAN, new String[] {"Jaron"}, "Swinkels", "", new GregorianCalendar(1962, Calendar.JULY, 22), "Velp", gijsEnMary); assertEquals("afmeting boom onjuist", 8, jaron.afmetingStamboom()); String stamboomstring = jaron.stamboomAlsString(); String[] regels = stamboomstring.split(System.getProperty("line.separator")); assertEquals("aantal regels", 8, regels.length); assertEquals("regel 3 onjuist", " T. de Vries (VROUW) 5-5-1927", regels[2]); System.out.println(stamboomstring); }
public class JPASingleSessionCommandServiceFactoryEnvTest { private static String TMPDIR = System.getProperty("java.io.tmpdir"); private static final Logger log = LoggerFactory.getLogger(JPASingleSessionCommandServiceFactoryEnvTest.class); private static Server h2Server; private ApplicationContext ctx; @BeforeClass public static void startH2Database() throws Exception { DeleteDbFiles.execute("", "DroolsFlow", true); h2Server = Server.createTcpServer(new String[0]); h2Server.start(); try { TMPDIR = JPASingleSessionCommandServiceFactoryEnvTest.class .getResource("/kb_persistence") .getFile(); log.info("creating: {}", TMPDIR + "/processWorkItems.pkg"); writePackage(getProcessWorkItems(), new File(TMPDIR + "/processWorkItems.pkg")); log.info("creating: {}", TMPDIR + "/processSubProcess.pkg"); writePackage(getProcessSubProcess(), new File(TMPDIR + "/processSubProcess.pkg")); log.info("creating: {}", TMPDIR + "/processTimer.pkg"); writePackage(getProcessTimer(), new File(TMPDIR + "/processTimer.pkg")); log.info("creating: {}", TMPDIR + "/processTimer2.pkg"); writePackage(getProcessTimer2(), new File(TMPDIR + "/processTimer2.pkg")); } catch (Exception e) { log.error("can't create packages!", e); throw new RuntimeException(e); } } @AfterClass public static void stopH2Database() throws Exception { log.info("stopping database"); h2Server.stop(); DeleteDbFiles.execute("", "DroolsFlow", true); } @Before public void createSpringContext() { try { log.info("creating spring context"); ctx = new ClassPathXmlApplicationContext( "org/kie/spring/persistence/persistence_beans_env.xml"); } catch (Exception e) { log.error("can't create spring context", e); throw new RuntimeException(e); } } @After public void destroySpringContext() { log.info("destroy spring context"); } @Test public void testPersistenceWorkItems() throws Exception { log.info("---> get bean jpaSingleSessionCommandService"); KieSession service = (KieSession) ctx.getBean("jpaSingleSessionCommandService2"); log.info("---> create new SingleSessionCommandService"); int sessionId = service.getId(); log.info("---> created SingleSessionCommandService id: " + sessionId); ProcessInstance processInstance = service.startProcess("org.drools.test.TestProcess"); log.info("Started process instance {}", processInstance.getId()); TestWorkItemHandler handler = TestWorkItemHandler.getInstance(); WorkItem workItem = handler.getWorkItem(); assertNotNull(workItem); service.dispose(); final Environment env = (Environment) ctx.getBean("env"); /*Environment env = KnowledgeBaseFactory.newEnvironment(); env.set( EnvironmentName.ENTITY_MANAGER_FACTORY, ctx.getBean( "myEmf" ) ); env.set( EnvironmentName.TRANSACTION_MANAGER, ctx.getBean( "txManager" ) ); */ KieStoreServices kstore = (KieStoreServices) ctx.getBean("kstore1"); KieBase kbase1 = (KieBase) ctx.getBean("kb_persistence"); service = kstore.loadKieSession(sessionId, kbase1, null, env); processInstance = service.getProcessInstance(processInstance.getId()); assertNotNull(processInstance); service.dispose(); service = kstore.loadKieSession(sessionId, kbase1, null, env); service.getWorkItemManager().completeWorkItem(workItem.getId(), null); workItem = handler.getWorkItem(); assertNotNull(workItem); service.dispose(); service = kstore.loadKieSession(sessionId, kbase1, null, env); processInstance = service.getProcessInstance(processInstance.getId()); assertNotNull(processInstance); service.dispose(); service = kstore.loadKieSession(sessionId, kbase1, null, env); service.getWorkItemManager().completeWorkItem(workItem.getId(), null); workItem = handler.getWorkItem(); assertNotNull(workItem); service.dispose(); service = kstore.loadKieSession(sessionId, kbase1, null, env); processInstance = service.getProcessInstance(processInstance.getId()); service.dispose(); service = kstore.loadKieSession(sessionId, kbase1, null, env); service.getWorkItemManager().completeWorkItem(workItem.getId(), null); workItem = handler.getWorkItem(); assertNull(workItem); service.dispose(); service = kstore.loadKieSession(sessionId, kbase1, null, env); processInstance = service.getProcessInstance(processInstance.getId()); service.dispose(); } @Test public void testPersistenceWorkItemsUserTransaction() throws Exception { KieSession service = (KieSession) ctx.getBean("jpaSingleSessionCommandService2"); int sessionId = service.getId(); ProcessInstance processInstance = service.startProcess("org.drools.test.TestProcess"); log.info("Started process instance {}", processInstance.getId()); TestWorkItemHandler handler = TestWorkItemHandler.getInstance(); WorkItem workItem = handler.getWorkItem(); assertNotNull(workItem); service.dispose(); final Environment env = (Environment) ctx.getBean("env"); /*Environment env = KnowledgeBaseFactory.newEnvironment(); env.set( EnvironmentName.ENTITY_MANAGER_FACTORY, ctx.getBean( "myEmf" ) ); env.set( EnvironmentName.TRANSACTION_MANAGER, ctx.getBean( "txManager" ) ); */ KieStoreServices kstore = (KieStoreServices) ctx.getBean("kstore1"); KieBase kbase1 = (KieBase) ctx.getBean("kb_persistence"); service = kstore.loadKieSession(sessionId, kbase1, null, env); processInstance = service.getProcessInstance(processInstance.getId()); assertNotNull(processInstance); service.dispose(); service = kstore.loadKieSession(sessionId, kbase1, null, env); service.getWorkItemManager().completeWorkItem(workItem.getId(), null); workItem = handler.getWorkItem(); assertNotNull(workItem); service.dispose(); service = kstore.loadKieSession(sessionId, kbase1, null, env); processInstance = service.getProcessInstance(processInstance.getId()); assertNotNull(processInstance); service.dispose(); service = kstore.loadKieSession(sessionId, kbase1, null, env); service.getWorkItemManager().abortWorkItem(workItem.getId()); workItem = handler.getWorkItem(); assertNotNull(workItem); service.dispose(); service = kstore.loadKieSession(sessionId, kbase1, null, env); processInstance = service.getProcessInstance(processInstance.getId()); assertNotNull(processInstance); service.dispose(); service = kstore.loadKieSession(sessionId, kbase1, null, env); service.getWorkItemManager().completeWorkItem(workItem.getId(), null); workItem = handler.getWorkItem(); assertNull(workItem); service.dispose(); service = kstore.loadKieSession(sessionId, kbase1, null, env); processInstance = service.getProcessInstance(processInstance.getId()); assertNull(processInstance); service.dispose(); } @SuppressWarnings("unused") private static KnowledgePackage getProcessWorkItems() { RuleFlowProcess process = new RuleFlowProcess(); process.setId("org.drools.test.TestProcess"); process.setName("TestProcess"); process.setPackageName("org.drools.test"); StartNode start = new StartNode(); start.setId(1); start.setName("Start"); process.addNode(start); ActionNode actionNode = new ActionNode(); actionNode.setId(2); actionNode.setName("Action"); DroolsConsequenceAction action = new DroolsConsequenceAction(); action.setDialect("java"); action.setConsequence("System.out.println(\"Executed action\");"); actionNode.setAction(action); process.addNode(actionNode); new ConnectionImpl( start, Node.CONNECTION_DEFAULT_TYPE, actionNode, Node.CONNECTION_DEFAULT_TYPE); WorkItemNode workItemNode = new WorkItemNode(); workItemNode.setId(3); workItemNode.setName("WorkItem1"); Work work = new WorkImpl(); work.setName("MyWork"); workItemNode.setWork(work); process.addNode(workItemNode); new ConnectionImpl( actionNode, Node.CONNECTION_DEFAULT_TYPE, workItemNode, Node.CONNECTION_DEFAULT_TYPE); WorkItemNode workItemNode2 = new WorkItemNode(); workItemNode2.setId(4); workItemNode2.setName("WorkItem2"); work = new WorkImpl(); work.setName("MyWork"); workItemNode2.setWork(work); process.addNode(workItemNode2); new ConnectionImpl( workItemNode, Node.CONNECTION_DEFAULT_TYPE, workItemNode2, Node.CONNECTION_DEFAULT_TYPE); WorkItemNode workItemNode3 = new WorkItemNode(); workItemNode3.setId(5); workItemNode3.setName("WorkItem3"); work = new WorkImpl(); work.setName("MyWork"); workItemNode3.setWork(work); process.addNode(workItemNode3); new ConnectionImpl( workItemNode2, Node.CONNECTION_DEFAULT_TYPE, workItemNode3, Node.CONNECTION_DEFAULT_TYPE); EndNode end = new EndNode(); end.setId(6); end.setName("End"); process.addNode(end); new ConnectionImpl( workItemNode3, Node.CONNECTION_DEFAULT_TYPE, end, Node.CONNECTION_DEFAULT_TYPE); KnowledgeBuilderImpl packageBuilder = new KnowledgeBuilderImpl(); ProcessBuilderImpl processBuilder = new ProcessBuilderImpl(packageBuilder); processBuilder.buildProcess(process, null); return packageBuilder.getPackage(); } public static void writePackage(KnowledgePackage pkg, File dest) { dest.deleteOnExit(); OutputStream out = null; try { out = new BufferedOutputStream(new FileOutputStream(dest)); DroolsStreamUtils.streamOut(out, pkg); } catch (Exception e) { throw new RuntimeException(e); } finally { if (out != null) { try { out.close(); } catch (IOException e) { } } } } @Test public void testPersistenceSubProcess() { KieSession service = (KieSession) ctx.getBean("jpaSingleSessionCommandService2"); int sessionId = service.getId(); RuleFlowProcessInstance processInstance = (RuleFlowProcessInstance) service.startProcess("org.drools.test.ProcessSubProcess"); log.info("Started process instance {}", processInstance.getId()); long processInstanceId = processInstance.getId(); TestWorkItemHandler handler = TestWorkItemHandler.getInstance(); WorkItem workItem = handler.getWorkItem(); assertNotNull(workItem); service.dispose(); final Environment env = (Environment) ctx.getBean("env"); /*Environment env = KnowledgeBaseFactory.newEnvironment(); env.set( EnvironmentName.ENTITY_MANAGER_FACTORY, ctx.getBean( "myEmf" ) ); env.set( EnvironmentName.TRANSACTION_MANAGER, ctx.getBean( "txManager" ) ); */ KieStoreServices kstore = (KieStoreServices) ctx.getBean("kstore1"); KieBase kbase1 = (KieBase) ctx.getBean("kb_persistence"); service = kstore.loadKieSession(sessionId, kbase1, null, env); processInstance = (RuleFlowProcessInstance) service.getProcessInstance(processInstanceId); assertNotNull(processInstance); Collection<NodeInstance> nodeInstances = processInstance.getNodeInstances(); assertEquals(1, nodeInstances.size()); SubProcessNodeInstance subProcessNodeInstance = (SubProcessNodeInstance) nodeInstances.iterator().next(); long subProcessInstanceId = subProcessNodeInstance.getProcessInstanceId(); RuleFlowProcessInstance subProcessInstance = (RuleFlowProcessInstance) service.getProcessInstance(subProcessInstanceId); assertNotNull(subProcessInstance); service.dispose(); service = kstore.loadKieSession(sessionId, kbase1, null, env); service.getWorkItemManager().completeWorkItem(workItem.getId(), null); service.dispose(); service = kstore.loadKieSession(sessionId, kbase1, null, env); subProcessInstance = (RuleFlowProcessInstance) service.getProcessInstance(subProcessInstanceId); assertNull(subProcessInstance); processInstance = (RuleFlowProcessInstance) service.getProcessInstance(processInstanceId); assertNull(processInstance); service.dispose(); } @SuppressWarnings("unused") private static KnowledgePackage getProcessSubProcess() { RuleFlowProcess process = new RuleFlowProcess(); process.setId("org.drools.test.ProcessSubProcess"); process.setName("ProcessSubProcess"); process.setPackageName("org.drools.test"); StartNode start = new StartNode(); start.setId(1); start.setName("Start"); process.addNode(start); ActionNode actionNode = new ActionNode(); actionNode.setId(2); actionNode.setName("Action"); DroolsConsequenceAction action = new DroolsConsequenceAction(); action.setDialect("java"); action.setConsequence("System.out.println(\"Executed action\");"); actionNode.setAction(action); process.addNode(actionNode); new ConnectionImpl( start, Node.CONNECTION_DEFAULT_TYPE, actionNode, Node.CONNECTION_DEFAULT_TYPE); SubProcessNode subProcessNode = new SubProcessNode(); subProcessNode.setId(3); subProcessNode.setName("SubProcess"); subProcessNode.setProcessId("org.drools.test.SubProcess"); process.addNode(subProcessNode); new ConnectionImpl( actionNode, Node.CONNECTION_DEFAULT_TYPE, subProcessNode, Node.CONNECTION_DEFAULT_TYPE); EndNode end = new EndNode(); end.setId(4); end.setName("End"); process.addNode(end); new ConnectionImpl( subProcessNode, Node.CONNECTION_DEFAULT_TYPE, end, Node.CONNECTION_DEFAULT_TYPE); KnowledgeBuilderImpl packageBuilder = new KnowledgeBuilderImpl(); ProcessBuilderImpl processBuilder = new ProcessBuilderImpl(packageBuilder); processBuilder.buildProcess(process, null); process = new RuleFlowProcess(); process.setId("org.drools.test.SubProcess"); process.setName("SubProcess"); process.setPackageName("org.drools.test"); start = new StartNode(); start.setId(1); start.setName("Start"); process.addNode(start); actionNode = new ActionNode(); actionNode.setId(2); actionNode.setName("Action"); action = new DroolsConsequenceAction(); action.setDialect("java"); action.setConsequence("System.out.println(\"Executed action\");"); actionNode.setAction(action); process.addNode(actionNode); new ConnectionImpl( start, Node.CONNECTION_DEFAULT_TYPE, actionNode, Node.CONNECTION_DEFAULT_TYPE); WorkItemNode workItemNode = new WorkItemNode(); workItemNode.setId(3); workItemNode.setName("WorkItem1"); Work work = new WorkImpl(); work.setName("MyWork"); workItemNode.setWork(work); process.addNode(workItemNode); new ConnectionImpl( actionNode, Node.CONNECTION_DEFAULT_TYPE, workItemNode, Node.CONNECTION_DEFAULT_TYPE); end = new EndNode(); end.setId(6); end.setName("End"); process.addNode(end); new ConnectionImpl( workItemNode, Node.CONNECTION_DEFAULT_TYPE, end, Node.CONNECTION_DEFAULT_TYPE); processBuilder.buildProcess(process, null); return packageBuilder.getPackage(); } @Test public void testPersistenceTimer() throws Exception { System.out.println("TMPDIR == " + TMPDIR); log.info("---> get bean jpaSingleSessionCommandService2"); KieSession service = (KieSession) ctx.getBean("jpaSingleSessionCommandService2"); int sessionId = service.getId(); log.info("---> created SingleSessionCommandService id: " + sessionId); ProcessInstance processInstance = service.startProcess("org.drools.test.ProcessTimer"); long procId = processInstance.getId(); log.info("---> Started ProcessTimer id: {}", procId); service.dispose(); log.info("---> session disposed"); final Environment env = (Environment) ctx.getBean("env"); /* Environment env = KnowledgeBaseFactory.newEnvironment(); env.set( EnvironmentName.ENTITY_MANAGER_FACTORY, ctx.getBean( "myEmf" ) ); env.set( EnvironmentName.TRANSACTION_MANAGER, ctx.getBean( "txManager" ) ); */ KieStoreServices kstore = (KieStoreServices) ctx.getBean("kstore1"); KieBase kbase1 = (KieBase) ctx.getBean("kb_persistence"); service = kstore.loadKieSession(sessionId, kbase1, null, env); log.info("---> load session: " + sessionId); processInstance = service.getProcessInstance(procId); log.info("---> GetProcessInstanceCommand id: " + procId); assertNotNull(processInstance); Thread.sleep(1000); log.info("---> session disposed"); service.dispose(); service = kstore.loadKieSession(sessionId, kbase1, null, env); log.info("---> load session: " + sessionId); Thread.sleep(3000); log.info("---> GetProcessInstanceCommand id: " + procId); processInstance = service.getProcessInstance(procId); log.info("---> session disposed"); assertNull(processInstance); } @SuppressWarnings("unused") private static KnowledgePackage getProcessTimer() { RuleFlowProcess process = new RuleFlowProcess(); process.setId("org.drools.test.ProcessTimer"); process.setName("ProcessTimer"); process.setPackageName("org.drools.test"); StartNode start = new StartNode(); start.setId(1); start.setName("Start"); process.addNode(start); TimerNode timerNode = new TimerNode(); timerNode.setId(2); timerNode.setName("Timer"); Timer timer = new Timer(); timer.setDelay("2000"); timerNode.setTimer(timer); process.addNode(timerNode); new ConnectionImpl( start, Node.CONNECTION_DEFAULT_TYPE, timerNode, Node.CONNECTION_DEFAULT_TYPE); ActionNode actionNode = new ActionNode(); actionNode.setId(3); actionNode.setName("Action"); DroolsConsequenceAction action = new DroolsConsequenceAction(); action.setDialect("java"); action.setConsequence("System.out.println(\"Executed action\");"); actionNode.setAction(action); process.addNode(actionNode); new ConnectionImpl( timerNode, Node.CONNECTION_DEFAULT_TYPE, actionNode, Node.CONNECTION_DEFAULT_TYPE); EndNode end = new EndNode(); end.setId(6); end.setName("End"); process.addNode(end); new ConnectionImpl(actionNode, Node.CONNECTION_DEFAULT_TYPE, end, Node.CONNECTION_DEFAULT_TYPE); KnowledgeBuilderImpl packageBuilder = new KnowledgeBuilderImpl(); ProcessBuilderImpl processBuilder = new ProcessBuilderImpl(packageBuilder); processBuilder.buildProcess(process, null); return packageBuilder.getPackage(); } @Test public void testPersistenceTimer2() throws Exception { KieSession service = (KieSession) ctx.getBean("jpaSingleSessionCommandService2"); int sessionId = service.getId(); ProcessInstance processInstance = service.startProcess("org.drools.test.ProcessTimer2"); log.info("Started process instance {}", processInstance.getId()); Thread.sleep(2000); final Environment env = (Environment) ctx.getBean("env"); /* Environment env = KnowledgeBaseFactory.newEnvironment(); env.set( EnvironmentName.ENTITY_MANAGER_FACTORY, ctx.getBean( "myEmf" ) ); env.set( EnvironmentName.TRANSACTION_MANAGER, ctx.getBean( "txManager" ) ); */ KieStoreServices kstore = (KieStoreServices) ctx.getBean("kstore1"); KieBase kbase1 = (KieBase) ctx.getBean("kb_persistence"); service = kstore.loadKieSession(sessionId, kbase1, null, env); processInstance = service.getProcessInstance(processInstance.getId()); assertNull(processInstance); } @SuppressWarnings("unused") private static KnowledgePackage getProcessTimer2() { RuleFlowProcess process = new RuleFlowProcess(); process.setId("org.drools.test.ProcessTimer2"); process.setName("ProcessTimer2"); process.setPackageName("org.drools.test"); StartNode start = new StartNode(); start.setId(1); start.setName("Start"); process.addNode(start); TimerNode timerNode = new TimerNode(); timerNode.setId(2); timerNode.setName("Timer"); Timer timer = new Timer(); timer.setDelay("0"); timerNode.setTimer(timer); process.addNode(timerNode); new ConnectionImpl( start, Node.CONNECTION_DEFAULT_TYPE, timerNode, Node.CONNECTION_DEFAULT_TYPE); ActionNode actionNode = new ActionNode(); actionNode.setId(3); actionNode.setName("Action"); DroolsConsequenceAction action = new DroolsConsequenceAction(); action.setDialect("java"); action.setConsequence( "try { Thread.sleep(1000); } catch (Throwable t) {} System.out.println(\"Executed action\");"); actionNode.setAction(action); process.addNode(actionNode); new ConnectionImpl( timerNode, Node.CONNECTION_DEFAULT_TYPE, actionNode, Node.CONNECTION_DEFAULT_TYPE); EndNode end = new EndNode(); end.setId(6); end.setName("End"); process.addNode(end); new ConnectionImpl(actionNode, Node.CONNECTION_DEFAULT_TYPE, end, Node.CONNECTION_DEFAULT_TYPE); KnowledgeBuilderImpl packageBuilder = new KnowledgeBuilderImpl(); ProcessBuilderImpl processBuilder = new ProcessBuilderImpl(packageBuilder); processBuilder.buildProcess(process, null); return packageBuilder.getPackage(); } }
/** * Note that actual use of MergeSummaryHoldings is a call to main() from a shell script, so these * tests must use the CommandLineUtils * * @author Naomi Dushay */ public class MergeSummaryHoldingsTests { static String testDir = "test"; static String testDataParentPath = System.getProperty("test.data.path"); static String testConfigFname = System.getProperty("test.config.file"); { if (testDataParentPath == null) fail("property test.data.path must be defined for the tests to run"); // static String testDataParentPath = testDir + File.separator + "data"; if (testConfigFname == null) fail("property test.config.file must be defined for the tests to run"); // static String testConfigFile = smokeTestDir + File.separator + "test_config.properties"; } static String smokeTestDir = testDataParentPath + File.separator + "smoketest"; static String MERGE_MHLD_CLASS_NAME = "org.solrmarc.tools.MergeSummaryHoldings"; static String MARC_PRINTER_CLASS_NAME = "org.solrmarc.marc.MarcPrinter"; static String MAIN_METHOD_NAME = "main"; // for vetting results - no point in loading these constants for each test static Map<String, Record> ALL_MERGED_BIB_RESULTS = new HashMap<String, Record>(); static Map<String, Record> ALL_UNMERGED_BIBS = new HashMap<String, Record>(); static { String bibFilePath = testDataParentPath + File.separator + "mhldMergeBibs1346.mrc"; try { RawRecordReader rawRecRdr = new RawRecordReader(new FileInputStream(new File(bibFilePath))); while (rawRecRdr.hasNext()) { RawRecord rawRec = rawRecRdr.next(); Record rec = rawRec.getAsRecord(true, false, "999", "MARC8"); String id = rec.getControlNumber(); // String id = RecordTestingUtils.getRecordIdFrom001(rec); ALL_UNMERGED_BIBS.put(id, rec); } } catch (FileNotFoundException e) { e.printStackTrace(); } bibFilePath = testDataParentPath + File.separator + "mhldMerged1346.mrc"; try { RawRecordReader rawRecRdr = new RawRecordReader(new FileInputStream(new File(bibFilePath))); while (rawRecRdr.hasNext()) { RawRecord rawRec = rawRecRdr.next(); Record rec = rawRec.getAsRecord(true, false, "999", "MARC8"); String id = rec.getControlNumber(); // String id = RecordTestingUtils.getRecordIdFrom001(rec); ALL_MERGED_BIB_RESULTS.put(id, rec); } } catch (FileNotFoundException e) { e.printStackTrace(); } } @Before public void setUp() { if (!Boolean.parseBoolean(System.getProperty("test.solr.verbose"))) { java.util.logging.Logger.getLogger("org.apache.solr") .setLevel(java.util.logging.Level.SEVERE); Utils.setLog4jLogLevel(org.apache.log4j.Level.WARN); } testDataParentPath = System.getProperty("test.data.path"); testConfigFname = System.getProperty("test.config.file"); // System.out.println("-----testDataParentPath = "+testDataParentPath); } /** code should output the unchanged bib records if no mhlds match */ @Test public void testNoMatches() throws IOException { // bib46, mhld235 String bibFilePath = testDataParentPath + File.separator + "mhldMergeBibs46.mrc"; String mhldFilePath = testDataParentPath + File.separator + "mhldMergeMhlds235.mrc"; Map<String, Record> mergedRecs = MergeSummaryHoldings.mergeMhldsIntoBibRecordsAsMap(bibFilePath, mhldFilePath); Set<String> mergedRecIds = mergedRecs.keySet(); assertEquals(2, mergedRecIds.size()); // result bibs should match the bib input because there was no merge String id = "a4"; RecordTestingUtils.assertEquals(ALL_UNMERGED_BIBS.get(id), mergedRecs.get(id)); id = "a6"; RecordTestingUtils.assertEquals(ALL_UNMERGED_BIBS.get(id), mergedRecs.get(id)); System.out.println("Test testNoMatches() successful"); } /** code should end smoothly if it encounters no matches between bib and mhld */ @Test public void testNoOutputMessagesWhenNoMatches() throws IOException { // bib46, mhld235 String bibFilePath = testDataParentPath + File.separator + "mhldMergeBibs46.mrc"; String mhldFilePath = testDataParentPath + File.separator + "mhldMergeMhlds235.mrc"; // ensure no error message was printed ByteArrayOutputStream sysBAOS = new ByteArrayOutputStream(); mergeBibAndMhldFiles(bibFilePath, mhldFilePath, sysBAOS); // ensure no error message was printed assertTrue( "Output messages written when none were expected: " + sysBAOS.toString(), sysBAOS.size() == 0); System.out.println("Test testNoOutputMessagesWhenNoMatches() successful"); } // first record in file tests ---------- /** code should find a match when first bib matches first mhld */ @Test public void testBothFirstRecsMatch() throws IOException { // bib346, mhld34 String bibFilePath = testDataParentPath + File.separator + "mhldMergeBibs346.mrc"; String mhldFilePath = testDataParentPath + File.separator + "mhldMergeMhlds34.mrc"; Map<String, Record> mergedRecs = MergeSummaryHoldings.mergeMhldsIntoBibRecordsAsMap(bibFilePath, mhldFilePath); // there should be 3 results Set<String> mergedRecIds = mergedRecs.keySet(); assertEquals(3, mergedRecIds.size()); // result bibs 3, 4 should have the mhld fields String id = "a3"; RecordTestingUtils.assertEqualsIgnoreLeader(ALL_MERGED_BIB_RESULTS.get(id), mergedRecs.get(id)); id = "a4"; RecordTestingUtils.assertEqualsIgnoreLeader(ALL_MERGED_BIB_RESULTS.get(id), mergedRecs.get(id)); // result bib 6 should not be changed id = "a6"; RecordTestingUtils.assertEquals(ALL_UNMERGED_BIBS.get(id), mergedRecs.get(id)); System.out.println("Test testBothFirstRecsMatch() successful"); } /** code should find a match when first bib matches non-first mhld */ @Test public void testFirstBibMatchesNonFirstMhld() throws IOException { // bib346, mhld235 String bibFilePath = testDataParentPath + File.separator + "mhldMergeBibs346.mrc"; String mhldFilePath = testDataParentPath + File.separator + "mhldMergeMhlds235.mrc"; Map<String, Record> mergedRecs = MergeSummaryHoldings.mergeMhldsIntoBibRecordsAsMap(bibFilePath, mhldFilePath); // there should be 3 results Set<String> mergedRecIds = mergedRecs.keySet(); assertEquals(3, mergedRecIds.size()); // result bib 3 only should have the mhld fields String id = "a3"; RecordTestingUtils.assertEqualsIgnoreLeader(ALL_MERGED_BIB_RESULTS.get(id), mergedRecs.get(id)); // result bibs 4 and 6 should not be changed id = "a4"; RecordTestingUtils.assertEqualsIgnoreLeader(ALL_UNMERGED_BIBS.get(id), mergedRecs.get(id)); id = "a6"; RecordTestingUtils.assertEquals(ALL_UNMERGED_BIBS.get(id), mergedRecs.get(id)); System.out.println("Test testFirstBibMatchesNonFirstMhld() successful"); } /** code should find a match when non-first bib matches first mhld */ @Test public void testNonFirstBibMatchesFirstMhld() throws IOException { // bib134, mhld345 String bibFilePath = testDataParentPath + File.separator + "mhldMergeBibs134.mrc"; String mhldFilePath = testDataParentPath + File.separator + "mhldMergeMhlds345.mrc"; Map<String, Record> mergedRecs = MergeSummaryHoldings.mergeMhldsIntoBibRecordsAsMap(bibFilePath, mhldFilePath); // there should be 3 results Set<String> mergedRecIds = mergedRecs.keySet(); assertEquals(3, mergedRecIds.size()); // result bibs 3 and 4 only should have the mhld fields String id = "a1"; RecordTestingUtils.assertEqualsIgnoreLeader(ALL_UNMERGED_BIBS.get(id), mergedRecs.get(id)); id = "a3"; RecordTestingUtils.assertEqualsIgnoreLeader(ALL_MERGED_BIB_RESULTS.get(id), mergedRecs.get(id)); id = "a4"; RecordTestingUtils.assertEqualsIgnoreLeader(ALL_MERGED_BIB_RESULTS.get(id), mergedRecs.get(id)); System.out.println("Test testNonFirstBibMatchesFirstMhld() successful"); } // last record in file tests ------------ /** code should find a match when last bib matches last mhld */ @Test public void testBothLastRecsMatch() throws IOException { // bib46, mhld236 String bibFilePath = testDataParentPath + File.separator + "mhldMergeBibs46.mrc"; String mhldFilePath = testDataParentPath + File.separator + "mhldMergeMhlds236.mrc"; Map<String, Record> mergedRecs = MergeSummaryHoldings.mergeMhldsIntoBibRecordsAsMap(bibFilePath, mhldFilePath); // there should be 2 results Set<String> mergedRecIds = mergedRecs.keySet(); assertEquals(2, mergedRecIds.size()); // result bib 6 only should have the mhld fields String id = "a4"; RecordTestingUtils.assertEqualsIgnoreLeader(ALL_UNMERGED_BIBS.get(id), mergedRecs.get(id)); id = "a6"; RecordTestingUtils.assertEqualsIgnoreLeader(ALL_MERGED_BIB_RESULTS.get(id), mergedRecs.get(id)); System.out.println("Test testBothLastRecsMatch() successful"); } /** code should find a match when last bib matches non-last mhld */ @Test public void testLastBibMatchesNonLastMhld() throws IOException { // bib134, mhld345 String bibFilePath = testDataParentPath + File.separator + "mhldMergeBibs134.mrc"; String mhldFilePath = testDataParentPath + File.separator + "mhldMergeMhlds345.mrc"; Map<String, Record> mergedRecs = MergeSummaryHoldings.mergeMhldsIntoBibRecordsAsMap(bibFilePath, mhldFilePath); // there should be 3 results Set<String> mergedRecIds = mergedRecs.keySet(); assertEquals(3, mergedRecIds.size()); // result bibs 3 and 4 only should have the mhld fields String id = "a1"; RecordTestingUtils.assertEqualsIgnoreLeader(ALL_UNMERGED_BIBS.get(id), mergedRecs.get(id)); id = "a3"; RecordTestingUtils.assertEqualsIgnoreLeader(ALL_MERGED_BIB_RESULTS.get(id), mergedRecs.get(id)); id = "a4"; RecordTestingUtils.assertEqualsIgnoreLeader(ALL_MERGED_BIB_RESULTS.get(id), mergedRecs.get(id)); System.out.println("Test testLastBibMatchesNonLastMhld() successful"); } /** code should find a match when non-last bib matches last mhld */ @Test public void testNonLastBibMatchesLastMhld() throws IOException { // bib46, mhld34 String bibFilePath = testDataParentPath + File.separator + "mhldMergeBibs46.mrc"; String mhldFilePath = testDataParentPath + File.separator + "mhldMergeMhlds34.mrc"; Map<String, Record> mergedRecs = MergeSummaryHoldings.mergeMhldsIntoBibRecordsAsMap(bibFilePath, mhldFilePath); // there should be 2 results Set<String> mergedRecIds = mergedRecs.keySet(); assertEquals(2, mergedRecIds.size()); // result bib 6 only should have the mhld fields String id = "a4"; RecordTestingUtils.assertEqualsIgnoreLeader(ALL_MERGED_BIB_RESULTS.get(id), mergedRecs.get(id)); id = "a6"; RecordTestingUtils.assertEqualsIgnoreLeader(ALL_UNMERGED_BIBS.get(id), mergedRecs.get(id)); System.out.println("Test testLastBibMatchesNonLastMhld() successful"); } /** need to ensure all the MHLD data is included, not just the first record */ // FIXME: fails! needs MarcCombiningReader for mhld or at least a diff version of RawRecordReader @Test public void testMultMHLDsWithSameID() throws IOException { // bib134, multMhlds1 String bibFilePath = testDataParentPath + File.separator + "mhldMergeBibs134.mrc"; String mhldFilePath = testDataParentPath + File.separator + "mhldMergeMhlds1Mult.mrc"; Map<String, Record> mergedRecs = MergeSummaryHoldings.mergeMhldsIntoBibRecordsAsMap(bibFilePath, mhldFilePath); Record mergedRec = mergedRecs.get("a1"); assertEquals("Expected three 852", 3, mergedRec.getVariableFields("852").size()); Set<String> expectedVals = new HashSet<String>(); expectedVals.add("Location1"); expectedVals.add("Location2"); RecordTestingUtils.assertSubfieldHasExpectedValues(mergedRec, "852", 'b', expectedVals); expectedVals.clear(); expectedVals.add("(month)"); expectedVals.add("(season)"); RecordTestingUtils.assertSubfieldHasExpectedValues(mergedRec, "853", 'b', expectedVals); assertEquals("Expected one 863", 2, mergedRec.getVariableFields("863").size()); assertEquals("Expected one 866", 1, mergedRec.getVariableFields("866").size()); // fail("Implement me"); System.out.println("Test testMultMHLDsWithSameID() successful"); } /** * the MHLD fields should only be merged into ONE of the bibs, if the bibs will be combined? Or * it's probably ok if they are in each bib, as they should be removed from the bib after * processing? */ // @Test public void testMultBibsWithSameID() { // multBibs4, mhld 34 fail("Implement me"); } /** need to ensure all the MHLD data is included, not just the first record */ public void testMultBothWithSameID() {} /** the bib record should only get the fields specified */ public void testFieldsToMerge() {} /** the bib record should not get any MHLD fields that aren't indicated for the merge */ public void testFieldsNotToMerge() {} /** * if the MHLD has more than one instance of a field, all instances should be put in the bib * record */ public void testMultOccurFieldsToMerge() {} /** * if the bib rec has existing MHLD fields (not from another MHLD record?) then it should remove * them before adding the MHLD fields */ // @Test public void testCrashingBibFieldsRemoved() { // bibWmhldFlds, completeMhld fail("implement me"); } // Tests for very basic functionality of code, including Bob's original test (with some // modifications to run as a more typical junit test) String mergedSummaryHoldingsOutput[] = { "LEADER 02429nas a2200481 a 4500", "001 u335", "003 SIRSI", "008 840508c19799999gw fu p 0uuub0ger d", "035 $a(Sirsi) o10701458", "035 $a(OCoLC)10701458", "040 $aVA@$cVA@", "049 $aVAS@", "090 $aAP30$b.T75$mVAS@$qALDERMAN", "245 00$aTumult.", "246 13$aZeitschrift für Verkehrswissenschaft", "260 $aBerlin :$bMerve Verlag,$c1979-", "300 $av. :$bill. ;$c24 cm.", "310 $aSemiannual", "362 0 $a1-", "500 $aTitle from cover; imprint varies.", "599 $a2$b(YR.) 2008 NO. 34;$b(YR.) 2008 NO. 33;$bNR. 32 2007;", "596 $a2", "515 $aNone published 1980-1981.", "852 $bALDERMAN$cALD-STKS$xpat#169090$x2x$xbind 4N=2 or 3yrs$xex.: Nr. 15-18 1988-93$xindex ?$xuse copyright year for dating$zCURRENT ISSUES HELD IN THE PERIODICALS ROOM $x5071", "853 2 $82$anr.", "853 2 $83$anr.$i(year)$j(unit)", "853 2 $84$a(yr.)$bno.$u2$vc", "866 0$81$aNr.1-28 (1979-2004)$zIn stacks", "863 1$83.6$a29$i2005$j.", "863 1$83.7$a30$i2005$j.", "863 1$83.8$a31$i2006$j.", "863 1$83.9$a32$i2007", "863 1$84.1$a2008$b33", "863 1$84.2$a2008$b34", "999 $aAP30 .T75 Nr.7-10 1983-87$wLCPER$c1$iX001614137$d5/9/2008$lALD-STKS$mALDERMAN$n2$q3$rY$sY$tBOUND-JRNL$u6/28/1996$xH-NOTIS", "999 $aAP30 .T75 Nr.1-3 1979-82$wLCPER$c1$iX000769605$d4/8/2009$lALD-STKS$mALDERMAN$q2$rY$sY$tBOUND-JRNL$u6/28/1996$xH-NOTIS", "999 $aAP30 .T75 Nr.4-6 1982-83$wLCPER$c1$iX000764174$d5/21/2002$lALD-STKS$mALDERMAN$q5$rY$sY$tBOUND-JRNL$u6/28/1996$xH-NOTIS", "999 $aAP30 .T75 Nr.11-14 1988-90$wLCPER$c1$iX002128357$d1/27/2010$lALD-STKS$mALDERMAN$n1$q1$rY$sY$tBOUND-JRNL$u6/28/1996$xH-NOTIS", "999 $aAP30 .T75 Nr.15-18 1991-93$wLCPER$c1$iX002509913$d11/11/1994$lALD-STKS$mALDERMAN$n1$rY$sY$tBOUND-JRNL$u6/28/1996$xH-NOTIS", "999 $aAP30 .T75 Periodical order-001$wLCPER$c1$i335-6001$d1/11/1999$lALD-STKS$mALDERMAN$rY$sY$tBOUND-JRNL$u12/18/1996", "999 $aAP30 .T75 Nr.19-22 1994-96$wLCPER$c1$iX006060933$d7/23/1998$e5/26/1998$lALD-STKS$mALDERMAN$n1$rY$sY$tBOUND-JRNL$u5/26/1998$xADD", "999 $aAP30 .T75 Nr.25-28 2001-2004$wLCPER$c1$iX030047292$d2/12/2007$e1/23/2007$lALD-STKS$mALDERMAN$q1$rY$sY$tBOUND-JRNL$u1/22/2007$xADD", "999 $aAP30 .T75 Nr.23-24 1998-1999$wLCPER$c1$iX006166304$d4/5/2007$e3/13/2007$lALD-STKS$mALDERMAN$rY$sY$tBOUND-JRNL$u3/12/2007$xADD", }; String mergedSummaryHoldingsOutputNoUmlaut[] = { "LEADER 02429nas a2200481 a 4500", "001 u335", "003 SIRSI", "008 840508c19799999gw fu p 0uuub0ger d", "035 $a(Sirsi) o10701458", "035 $a(OCoLC)10701458", "040 $aVA@$cVA@", "049 $aVAS@", "090 $aAP30$b.T75$mVAS@$qALDERMAN", "245 00$aTumult.", "246 13$aZeitschrift fèur Verkehrswissenschaft", "260 $aBerlin :$bMerve Verlag,$c1979-", "300 $av. :$bill. ;$c24 cm.", "310 $aSemiannual", "362 0 $a1-", "500 $aTitle from cover; imprint varies.", "599 $a2$b(YR.) 2008 NO. 34;$b(YR.) 2008 NO. 33;$bNR. 32 2007;", "596 $a2", "515 $aNone published 1980-1981.", "852 $bALDERMAN$cALD-STKS$xpat#169090$x2x$xbind 4N=2 or 3yrs$xex.: Nr. 15-18 1988-93$xindex ?$xuse copyright year for dating$zCURRENT ISSUES HELD IN THE PERIODICALS ROOM $x5071", "853 2 $82$anr.", "853 2 $83$anr.$i(year)$j(unit)", "853 2 $84$a(yr.)$bno.$u2$vc", "866 0$81$aNr.1-28 (1979-2004)$zIn stacks", "863 1$83.6$a29$i2005$j.", "863 1$83.7$a30$i2005$j.", "863 1$83.8$a31$i2006$j.", "863 1$83.9$a32$i2007", "863 1$84.1$a2008$b33", "863 1$84.2$a2008$b34", "999 $aAP30 .T75 Nr.7-10 1983-87$wLCPER$c1$iX001614137$d5/9/2008$lALD-STKS$mALDERMAN$n2$q3$rY$sY$tBOUND-JRNL$u6/28/1996$xH-NOTIS", "999 $aAP30 .T75 Nr.1-3 1979-82$wLCPER$c1$iX000769605$d4/8/2009$lALD-STKS$mALDERMAN$q2$rY$sY$tBOUND-JRNL$u6/28/1996$xH-NOTIS", "999 $aAP30 .T75 Nr.4-6 1982-83$wLCPER$c1$iX000764174$d5/21/2002$lALD-STKS$mALDERMAN$q5$rY$sY$tBOUND-JRNL$u6/28/1996$xH-NOTIS", "999 $aAP30 .T75 Nr.11-14 1988-90$wLCPER$c1$iX002128357$d1/27/2010$lALD-STKS$mALDERMAN$n1$q1$rY$sY$tBOUND-JRNL$u6/28/1996$xH-NOTIS", "999 $aAP30 .T75 Nr.15-18 1991-93$wLCPER$c1$iX002509913$d11/11/1994$lALD-STKS$mALDERMAN$n1$rY$sY$tBOUND-JRNL$u6/28/1996$xH-NOTIS", "999 $aAP30 .T75 Periodical order-001$wLCPER$c1$i335-6001$d1/11/1999$lALD-STKS$mALDERMAN$rY$sY$tBOUND-JRNL$u12/18/1996", "999 $aAP30 .T75 Nr.19-22 1994-96$wLCPER$c1$iX006060933$d7/23/1998$e5/26/1998$lALD-STKS$mALDERMAN$n1$rY$sY$tBOUND-JRNL$u5/26/1998$xADD", "999 $aAP30 .T75 Nr.25-28 2001-2004$wLCPER$c1$iX030047292$d2/12/2007$e1/23/2007$lALD-STKS$mALDERMAN$q1$rY$sY$tBOUND-JRNL$u1/22/2007$xADD", "999 $aAP30 .T75 Nr.23-24 1998-1999$wLCPER$c1$iX006166304$d4/5/2007$e3/13/2007$lALD-STKS$mALDERMAN$rY$sY$tBOUND-JRNL$u3/12/2007$xADD", }; /** * This is Bob's original test, re-written only to allow it to execute as a normal junit test * within Eclipse. */ @Test public void origTestOfRewritingMHLDtoSameBib() throws IOException { String mhldRecFileName = testDataParentPath + File.separator + "summaryHld_1-1000.mrc"; String bibRecFileName = testDataParentPath + File.separator + "u335.mrc"; InputStream inStr = null; ByteArrayOutputStream resultMrcOutStream = new ByteArrayOutputStream(); String[] mergeMhldArgs = new String[] {"-s", mhldRecFileName, bibRecFileName}; // call the code for mhldfile summaryHld_1-1000.mrc and bibfile u335.mrc CommandLineUtils.runCommandLineUtil( MERGE_MHLD_CLASS_NAME, MAIN_METHOD_NAME, inStr, resultMrcOutStream, mergeMhldArgs); RecordTestingUtils.assertMarcRecsEqual(mergedSummaryHoldingsOutput, resultMrcOutStream); // Now merge record again to test the deleting of existing summary holdings info ByteArrayInputStream mergedMarcBibRecAsInStream = new ByteArrayInputStream(resultMrcOutStream.toByteArray()); resultMrcOutStream.close(); resultMrcOutStream = new ByteArrayOutputStream(); // do the merge by piping the bib record in to the merge class CommandLineUtils.runCommandLineUtil( MERGE_MHLD_CLASS_NAME, MAIN_METHOD_NAME, mergedMarcBibRecAsInStream, resultMrcOutStream, new String[] {"-s", mhldRecFileName}); RecordTestingUtils.assertMarcRecsEqual(mergedSummaryHoldingsOutput, resultMrcOutStream); System.out.println("Test origTestOfRewritingMHLDtoSameBib() successful"); } /** test methods that return Map of ids to Records and no sysout stuff */ @Test public void testGettingOutputAsMapOfRecords() throws IOException { String mhldRecFileName = testDataParentPath + File.separator + "summaryHld_1-1000.mrc"; String bibRecFileName = testDataParentPath + File.separator + "u335.mrc"; Map<String, Record> mergedRecs = MergeSummaryHoldings.mergeMhldsIntoBibRecordsAsMap(bibRecFileName, mhldRecFileName); junit.framework.Assert.assertEquals("results should have 1 record", 1, mergedRecs.size()); String expId = "u335"; assertTrue("Record with id " + expId + " should be in results", mergedRecs.containsKey(expId)); Record resultRec = mergedRecs.get(expId); RecordTestingUtils.assertEqualsIgnoreLeader(mergedSummaryHoldingsOutputNoUmlaut, resultRec); System.out.println("Test testGettingOutputAsMapOfRecords() successful"); } /** Test if using Naomi's approach with next() works as well as weird way of duplicating code */ @Test public void testMergeToStdOut2() throws IOException { String mhldRecFileName = testDataParentPath + File.separator + "summaryHld_1-1000.mrc"; String bibRecFileName = testDataParentPath + File.separator + "u335.mrc"; ByteArrayOutputStream sysBAOS = new ByteArrayOutputStream(); PrintStream sysMsgs = new PrintStream(sysBAOS); MergeSummaryHoldings.mergeMhldRecsIntoBibRecsAsStdOut2( bibRecFileName, mhldRecFileName, sysMsgs); RecordTestingUtils.assertMarcRecsEqual(mergedSummaryHoldingsOutput, sysBAOS); System.out.println("Test testMergeToStdOut2() successful"); } // supporting methods for testing ---------------------------------------------- /** * @param bibRecsFileName name of the file containing Bib records, relative to the * testDataParentPath * @param mhldRecsFileName name of the file containing MHLD records, relative to the * testDataParentPath * @param outStream name of the a ByteArrayOutputStream to be used for output from running the * command * @return the resulting merged bib file as a ByteArrayOutputStream */ private ByteArrayOutputStream mergeBibAndMhldFiles( String bibRecsFileName, String mhldRecsFileName, ByteArrayOutputStream outStream) { // String fullBibRecsFileName = testDataParentPath + File.separator + bibRecsFileName; // String fullMhldRecsFileName = testDataParentPath + File.separator + mhldRecsFileName; InputStream inStr = null; // ByteArrayOutputStream resultMrcOutStream = new ByteArrayOutputStream(); String[] mergeMhldArgs = new String[] {"-s", bibRecsFileName, mhldRecsFileName}; // call the MergeSummaryHoldings code from the command line CommandLineUtils.runCommandLineUtil( MERGE_MHLD_CLASS_NAME, MAIN_METHOD_NAME, inStr, outStream, mergeMhldArgs); return outStream; } }
/** Test of process method, of class GenbankReader. */ @Test public void testProcess() throws Throwable { /* * Method 1: With the GenbankProxySequenceReader */ // Try with the GenbankProxySequenceReader GenbankProxySequenceReader<AminoAcidCompound> genbankProteinReader = new GenbankProxySequenceReader<AminoAcidCompound>( System.getProperty("java.io.tmpdir"), "NP_000257", AminoAcidCompoundSet.getAminoAcidCompoundSet()); ProteinSequence proteinSequence = new ProteinSequence(genbankProteinReader); genbankProteinReader .getHeaderParser() .parseHeader(genbankProteinReader.getHeader(), proteinSequence); logger.info( "Sequence({},{}) = {}...", proteinSequence.getAccession(), proteinSequence.getLength(), proteinSequence.getSequenceAsString().substring(0, 10)); GenbankProxySequenceReader<NucleotideCompound> genbankDNAReader = new GenbankProxySequenceReader<NucleotideCompound>( System.getProperty("java.io.tmpdir"), "NM_001126", DNACompoundSet.getDNACompoundSet()); DNASequence dnaSequence = new DNASequence(genbankDNAReader); genbankDNAReader.getHeaderParser().parseHeader(genbankDNAReader.getHeader(), dnaSequence); logger.info( "Sequence({},{}) = {}...", dnaSequence.getAccession(), dnaSequence.getLength(), dnaSequence.getSequenceAsString().substring(0, 10)); /* * Method 2: With the GenbankReaderHelper */ // Try with the GenbankReaderHelper ClasspathResource dnaResource = new ClasspathResource("NM_000266.gb", true); // File dnaFile = new File("src/test/resources/NM_000266.gb"); // File protFile = new File("src/test/resources/BondFeature.gb"); ClasspathResource protResource = new ClasspathResource("BondFeature.gb"); LinkedHashMap<String, DNASequence> dnaSequences = GenbankReaderHelper.readGenbankDNASequence(dnaResource.getInputStream()); for (DNASequence sequence : dnaSequences.values()) { logger.info("DNA Sequence: {}", sequence.getSequenceAsString()); } LinkedHashMap<String, ProteinSequence> protSequences = GenbankReaderHelper.readGenbankProteinSequence(protResource.getInputStream()); for (ProteinSequence sequence : protSequences.values()) { logger.info("Protein Sequence: {}", sequence.getSequenceAsString()); } /* * Method 3: With the GenbankReader Object */ // Try reading with the GanbankReader GenbankReader<DNASequence, NucleotideCompound> dnaReader = new GenbankReader<DNASequence, NucleotideCompound>( dnaResource.getInputStream(), new GenericGenbankHeaderParser<DNASequence, NucleotideCompound>(), new DNASequenceCreator(DNACompoundSet.getDNACompoundSet())); dnaSequences = dnaReader.process(); logger.info("DNA Sequence: {}", dnaSequences); GenbankReader<ProteinSequence, AminoAcidCompound> protReader = new GenbankReader<ProteinSequence, AminoAcidCompound>( protResource.getInputStream(), new GenericGenbankHeaderParser<ProteinSequence, AminoAcidCompound>(), new ProteinSequenceCreator(AminoAcidCompoundSet.getAminoAcidCompoundSet())); protSequences = protReader.process(); logger.info("Protein Sequence: {}", protSequences); }