/** * Configures the Logger (log4j) class used by the entire library to log what happens at run time. * * @param logToConsole determines if log4j engine should log to the standard output or not. * @return a Logger class used to log events and functional meanings of the application. */ public Logger configure(boolean logToConsole) { Logger log = Logger.getLogger(clazz); boolean loadDefaults = false; String logPath = null; PatternLayout layout = null; Strings str = Strings.getOne(); String logLevel = "", logConsoleLevel = "", logFileLevel = ""; try { Constants c = Constants.getOne(clazz); Properties log4j = c.getLog4j(); loadDefaults = log4j.isEmpty(); if (!loadDefaults) { PropertyConfigurator.configure(log4j); } else { logLevel = c.getConstant("log.level"); logConsoleLevel = c.getConstant("log.console.level"); logFileLevel = c.getConstant("log.file.level"); logPath = str.safeConcatPaths(c.getConstant("log.dir"), c.getConstant("log.file")); if (!str.checkPathExistence(logPath)) { str.createPath(logPath); new File(logPath).createNewFile(); } layout = new PatternLayout(c.getConstant("log.pattern")); } } catch (Throwable e) { logPath = str.safeConcatPaths(Constants.USER_HOME, "." + Constants.FPM_DIR); if (!str.checkPathExistence(logPath)) { str.createPath(logPath); } logPath = str.safeConcatPaths(logPath, Constants.MAIN_LOG_FILE); layout = new PatternLayout(Constants.DEFAULT_LOG_PATTERN); loadDefaults = true; } if (loadDefaults) { log.setLevel(Level.toLevel(logLevel)); if (logToConsole) { ConsoleAppender consapp = new ConsoleAppender(layout); consapp.setTarget("System.out"); consapp.setThreshold(Level.toLevel(logConsoleLevel)); log.addAppender(consapp); } FileAppender fileapp; try { fileapp = new FileAppender(layout, logPath); fileapp.setThreshold(Level.toLevel(logFileLevel)); log.addAppender(fileapp); } catch (IOException e) { log.warn("FileAppender not initialized! ".concat(e.getMessage())); } } log.trace("Logger initialized!"); return log; }
@Override public final void close() { if (state.equals(ReaderWriterState.OPEN)) { try { appender.close(); } catch (IOException e) { this.state = ReaderWriterState.ERROR; throw new DatasetIOException("Failed to close appender " + appender, e); } if (count > 0) { // commit the temp file try { if (!fs.rename(tempPath, finalPath)) { this.state = ReaderWriterState.ERROR; throw new DatasetWriterException("Failed to move " + tempPath + " to " + finalPath); } } catch (IOException e) { this.state = ReaderWriterState.ERROR; throw new DatasetIOException("Failed to commit " + finalPath, e); } LOG.debug( "Committed {} for appender {} ({} entities)", new Object[] {finalPath, appender, count}); } else { // discard the temp file try { if (!fs.delete(tempPath, true)) { this.state = ReaderWriterState.ERROR; throw new DatasetWriterException("Failed to delete " + tempPath); } } catch (IOException e) { this.state = ReaderWriterState.ERROR; throw new DatasetIOException("Failed to remove temporary file " + tempPath, e); } LOG.debug("Discarded {} ({} entities)", tempPath, count); } try { appender.cleanup(); } catch (IOException e) { throw new DatasetIOException("Failed to clean up " + appender, e); } this.state = ReaderWriterState.CLOSED; } else if (state.equals(ReaderWriterState.ERROR)) { this.state = ReaderWriterState.CLOSED; } }
@Override public final void initialize() { Preconditions.checkState( state.equals(ReaderWriterState.NEW), "Unable to open a writer from state:%s", state); // ensure the directory exists try { fs.mkdirs(directory); } catch (IOException ex) { this.state = ReaderWriterState.ERROR; throw new DatasetIOException("Failed to create path " + directory, ex); } // initialize paths this.finalPath = new Path(directory, uniqueFilename(descriptor.getFormat())); this.tempPath = tempFilename(finalPath); this.appender = newAppender(tempPath); try { appender.open(); } catch (IOException e) { this.state = ReaderWriterState.ERROR; throw new DatasetIOException("Failed to open appender " + appender, e); } this.count = 0; LOG.debug("Opened appender {} for {}", appender, finalPath); this.state = ReaderWriterState.OPEN; }
public synchronized void setFile( String fileName, boolean append, boolean bufferedIO, int bufferSize) throws IOException { super.setFile(fileName, append, this.bufferedIO, this.bufferSize); if (append) { File f = new File(fileName); ((CountingQuietWriter) qw).setCount(f.length()); } }
@Override public void flush() { Preconditions.checkState( state.equals(ReaderWriterState.OPEN), "Attempt to write to a writer in state:%s", state); try { appender.flush(); } catch (IOException e) { this.state = ReaderWriterState.ERROR; throw new DatasetWriterException("Failed to flush appender " + appender); } }
public static Logger getLogger(String categroy, String name) { Logger logger = Logger.getLogger(name); logger.removeAllAppenders(); logger.setAdditivity(false); FileAppender appender = null; PatternLayout layout = new PatternLayout(); layout.setConversionPattern("[%p][%d{yyyy-MM-dd HH:mm:ss}] %m%n"); try { appender = new DailyRollingFileAppender( layout, String.format("/data/applogs/%s/%s.log", categroy, name), "yyyy-MM-dd"); appender.setEncoding("UTF-8"); appender.activateOptions(); } catch (IOException e) { throw new RuntimeException("LogUtil fail.", e); } logger.addAppender(appender); logger.setLevel(Level.INFO); return logger; }
@Override public final void write(E entity) { Preconditions.checkState( state.equals(ReaderWriterState.OPEN), "Attempt to write to a writer in state:%s", state); try { appender.append(entity); count += 1; } catch (IOException e) { this.state = ReaderWriterState.ERROR; throw new DatasetIOException("Failed to append " + entity + " to " + appender, e); } }
@Override public final void write(E entity) { Preconditions.checkState( state.equals(ReaderWriterState.OPEN), "Attempt to write to a writer in state:%s", state); try { appender.append(entity); count += 1; } catch (RuntimeException e) { Throwables.propagateIfInstanceOf(e, DatasetRecordException.class); this.state = ReaderWriterState.ERROR; throw new DatasetOperationException(e, "Failed to append %s to %s", entity, appender); } catch (IOException e) { this.state = ReaderWriterState.ERROR; throw new DatasetIOException("Failed to append " + entity + " to " + appender, e); } }
@Override public final void initialize() { Preconditions.checkState( state.equals(ReaderWriterState.NEW), "Unable to open a writer from state:%s", state); ValidationException.check( isSupportedFormat(descriptor), "Not a supported format: %s", descriptor.getFormat()); // ensure the directory exists try { fs.mkdirs(directory); } catch (RuntimeException e) { this.state = ReaderWriterState.ERROR; throw new DatasetOperationException(e, "Failed to create path %s", directory); } catch (IOException ex) { this.state = ReaderWriterState.ERROR; throw new DatasetIOException("Failed to create path " + directory, ex); } // initialize paths try { this.finalPath = new Path(directory, uniqueFilename(descriptor.getFormat())); this.tempPath = tempFilename(finalPath); } catch (RuntimeException e) { this.state = ReaderWriterState.ERROR; throw new DatasetOperationException(e, "Failed to initialize file paths under %s", directory); } try { this.appender = newAppender(tempPath); appender.open(); } catch (RuntimeException e) { this.state = ReaderWriterState.ERROR; throw new DatasetOperationException(e, "Failed to open appender %s", appender); } catch (IOException e) { this.state = ReaderWriterState.ERROR; throw new DatasetIOException("Failed to open appender " + appender, e); } this.count = 0; LOG.info("Opened output appender {} for {}", appender, finalPath); this.state = ReaderWriterState.OPEN; }
/** * This method differentiates RollingFileAppender from its super class. * * @since 0.9.0 */ protected void subAppend(LoggingEvent event) { super.subAppend(event); if ((fileName != null) && ((CountingQuietWriter) qw).getCount() >= maxFileSize) this.rollOver(); }
@Before public void setup() throws Exception { this.randomValues = new HashMap<Integer, Integer>(); this.conf = new TajoConf(); util = new TajoTestingCluster(); util.startCatalogCluster(); catalog = util.getMiniCatalogCluster().getCatalog(); Path workDir = CommonTestingUtil.getTestDir(); catalog.createTablespace(DEFAULT_TABLESPACE_NAME, workDir.toUri().toString()); catalog.createDatabase(TajoConstants.DEFAULT_DATABASE_NAME, DEFAULT_TABLESPACE_NAME); sm = StorageManagerFactory.getStorageManager(conf, workDir); idxPath = new Path(workDir, "test.idx"); Schema schema = new Schema(); schema.addColumn("managerid", Type.INT4); schema.addColumn("empid", Type.INT4); schema.addColumn("deptname", Type.TEXT); this.idxSchema = new Schema(); idxSchema.addColumn("managerid", Type.INT4); SortSpec[] sortKeys = new SortSpec[1]; sortKeys[0] = new SortSpec(idxSchema.getColumn("managerid"), true, false); this.comp = new TupleComparator(idxSchema, sortKeys); this.writer = new BSTIndex(conf) .getIndexWriter(idxPath, BSTIndex.TWO_LEVEL_INDEX, this.idxSchema, this.comp); writer.setLoadNum(100); writer.open(); long offset; meta = CatalogUtil.newTableMeta(StoreType.CSV); tablePath = StorageUtil.concatPath(workDir, "employee", "table.csv"); fs = tablePath.getFileSystem(conf); fs.mkdirs(tablePath.getParent()); FileAppender appender = (FileAppender) StorageManagerFactory.getStorageManager(conf).getAppender(meta, schema, tablePath); appender.init(); Tuple tuple = new VTuple(schema.size()); for (int i = 0; i < 10000; i++) { Tuple key = new VTuple(this.idxSchema.size()); int rndKey = rnd.nextInt(250); if (this.randomValues.containsKey(rndKey)) { int t = this.randomValues.remove(rndKey) + 1; this.randomValues.put(rndKey, t); } else { this.randomValues.put(rndKey, 1); } key.put(new Datum[] {DatumFactory.createInt4(rndKey)}); tuple.put( new Datum[] { DatumFactory.createInt4(rndKey), DatumFactory.createInt4(rnd.nextInt(10)), DatumFactory.createText("dept_" + rnd.nextInt(10)) }); offset = appender.getOffset(); appender.addTuple(tuple); writer.write(key, offset); } appender.flush(); appender.close(); writer.close(); TableDesc desc = new TableDesc( CatalogUtil.buildFQName(TajoConstants.DEFAULT_DATABASE_NAME, "employee"), schema, meta, sm.getTablePath("employee")); catalog.createTable(desc); analyzer = new SQLAnalyzer(); planner = new LogicalPlanner(catalog); optimizer = new LogicalOptimizer(conf); }
@Override public final void close() { try { if (ReaderWriterState.NEW.equals(state) || ReaderWriterState.CLOSED.equals(state)) { return; } // Only try to close the appender if not in an error state. Any calls to // flush and sync must produce recoverable data without a call to close. if (!ReaderWriterState.ERROR.equals(state)) { try { appender.close(); } catch (RuntimeException e) { throw new DatasetOperationException(e, "Failed to close appender %s", appender); } catch (IOException e) { throw new DatasetIOException("Failed to close appender " + appender, e); } } // Make the file visible if any data was written and either some data has // been flushed or the writer is not in an error state. Only instances of // IncrementalWriter set flushed to true. if (count > 0 && (flushed || ReaderWriterState.OPEN.equals(state))) { // commit the temp file try { if (!fs.rename(tempPath, finalPath)) { throw new DatasetOperationException("Failed to move %s to %s", tempPath, finalPath); } } catch (RuntimeException e) { throw new DatasetOperationException(e, "Failed to commit %s", finalPath); } catch (IOException e) { throw new DatasetIOException("Failed to commit " + finalPath, e); } LOG.debug( "Committed {} for appender {} ({} entities)", new Object[] {finalPath, appender, count}); } else { // discard the temp file try { if (!fs.delete(tempPath, true)) { throw new DatasetOperationException("Failed to delete %s", tempPath); } } catch (RuntimeException e) { throw new DatasetOperationException(e, "Failed to remove temporary file %s", tempPath); } catch (IOException e) { throw new DatasetIOException("Failed to remove temporary file " + tempPath, e); } LOG.debug("Discarded {} ({} entities)", tempPath, count); } try { appender.cleanup(); } catch (IOException e) { throw new DatasetIOException("Failed to clean up " + appender, e); } } finally { this.state = ReaderWriterState.CLOSED; } }