protected void load(ZipFile zipfile) throws IOException { Enumeration entries = zipfile.entries(); while (entries.hasMoreElements()) { ZipEntry entry = (ZipEntry) entries.nextElement(); fireBeginFile(entry.getName()); Logger.getLogger(getClass()) .debug("Starting file " + entry.getName() + " (" + entry.getSize() + " bytes)"); byte[] bytes = null; InputStream in = null; try { in = zipfile.getInputStream(entry); bytes = readBytes(in); } finally { if (in != null) { try { in.close(); } catch (IOException ex) { // Ignore } } } Logger.getLogger(getClass()) .debug("Passing up file " + entry.getName() + " (" + bytes.length + " bytes)"); getLoader().load(entry.getName(), new ByteArrayInputStream(bytes)); fireEndFile(entry.getName()); } }
protected void load(String filename, InputStream in) { Logger.getLogger(getClass()).debug("Starting group in stream " + filename); ZipInputStream zipfile = null; try { zipfile = new ZipInputStream(in); fireBeginGroup(filename, -1); Logger.getLogger(getClass()).debug("Loading ZipInputStream " + filename); load(zipfile); Logger.getLogger(getClass()).debug("Loaded ZipInputStream " + filename); fireEndGroup(filename); } catch (IOException ex) { Logger.getLogger(getClass()).error("Cannot load Zip file \"" + filename + "\"", ex); } finally { if (zipfile != null) { try { zipfile.close(); } catch (IOException ex) { // Ignore } } } }
/* Check: getLoggerNames() must return correct names * for registered loggers and their parents. * Returns boolean values: PASSED or FAILED */ public static boolean checkLoggers() { String failMsg = "# checkLoggers: getLoggerNames() returned unexpected loggers"; Vector<String> expectedLoggerNames = new Vector<String>(getDefaultLoggerNames()); // Create the logger LOGGER_NAME_1 Logger.getLogger(LOGGER_NAME_1); expectedLoggerNames.addElement(PARENT_NAME_1); expectedLoggerNames.addElement(LOGGER_NAME_1); // Create the logger LOGGER_NAME_2 Logger.getLogger(LOGGER_NAME_2); expectedLoggerNames.addElement(PARENT_NAME_2); expectedLoggerNames.addElement(LOGGER_NAME_2); Enumeration<String> returnedLoggersEnum = logMgr.getLoggerNames(); Vector<String> returnedLoggerNames = new Vector<String>(0); while (returnedLoggersEnum.hasMoreElements()) { String logger = returnedLoggersEnum.nextElement(); if (!initialLoggerNames.contains(logger)) { // filter out the loggers that have been added before this test runs returnedLoggerNames.addElement(logger); } } ; return checkNames(expectedLoggerNames, returnedLoggerNames, failMsg); }
/* good2() reverses the bodies in the if statement */ private void good2(HttpServletRequest request, HttpServletResponse response) throws Throwable { if (IO.static_returns_t()) { Logger tcLog = Logger.getLogger("cwe_testcases_logger"); if (request.getParameter("username") == null) { return; } String username = request.getParameter("username"); if (username.matches("[a-zA-Z0-9]*")) { HttpSession session = request.getSession(true); /* FIX: logged message does not contain session id */ tcLog.log(Level.FINEST, "Username: "******" Session ID:" + session.getId()); } else { response.getWriter().println("Invalid characters"); } } else { /* INCIDENTAL: CWE 561 Dead Code, the code below will never run */ Logger tcLog = Logger.getLogger("cwe_testcases_logger"); if (request.getParameter("username") == null) { return; } String username = request.getParameter("username"); if (username.matches("[a-zA-Z0-9]*")) { HttpSession session = request.getSession(true); /* FLAW: leak session ID to debug log */ tcLog.log(Level.FINEST, "Username: "******" Session ID:" + session.getId()); } else { response.getWriter().println("Invalid characters"); } } }
public void handleInit(QuickServer quickserver) throws Exception { Logger logger = null; FileHandler xmlLog = null; File log = new File("./log/"); if (!log.canRead()) log.mkdir(); try { logger = Logger.getLogger(""); logger.setLevel(Level.FINEST); logger = Logger.getLogger("org.quickserver.net.qsadmin"); xmlLog = new FileHandler("log/FtpServer_QSAdmin%u%g.xml", 1024 * 1024, 20, true); xmlLog.setLevel(Level.FINEST); logger.addHandler(xmlLog); logger = Logger.getLogger("org.quickserver"); xmlLog = new FileHandler("log/FtpServer_QuickServer%u%g.xml", 1024 * 1024, 20, true); xmlLog.setLevel(Level.FINEST); logger.addHandler(xmlLog); logger = Logger.getLogger("ftpserver"); xmlLog = new FileHandler("log/FtpServer%u%g.xml", 1024 * 1024, 20, true); xmlLog.setLevel(Level.FINEST); logger.addHandler(xmlLog); quickserver.setAppLogger(logger); // img } catch (IOException e) { System.err.println("Could not create txtLog FileHandler : " + e); throw e; } }
public LocalVariableTable_attribute(ConstantPool constantPool, Visitable owner, DataInput in) throws IOException { super(constantPool, owner); int byteCount = in.readInt(); Logger.getLogger(getClass()).debug("Attribute length: " + byteCount); int localVariableTableLength = in.readUnsignedShort(); Logger.getLogger(getClass()) .debug("Reading " + localVariableTableLength + " local variable(s) ..."); for (int i = 0; i < localVariableTableLength; i++) { Logger.getLogger(getClass()).debug("Local variable " + i + ":"); localVariables.add(new LocalVariable(this, in)); } }
public void summaryAction(HttpServletRequest req, HttpServletResponse res) { if (AccountController.redirectIfNoCookie(req, res)) return; Map<String, Object> viewData = new HashMap<String, Object>(); DocumentManager docMan = new DocumentManager(); try { if (req.getParameter("documentId") != null) { // Get the document ID int docId = Integer.parseInt(req.getParameter("documentId")); // Get the document using document id Document document = docMan.get(docId); // Set title to name of the document viewData.put("title", document.getDocumentName()); // Create List of access records List<AccessRecord> accessRecords = new LinkedList<AccessRecord>(); // Add access records for document to the list accessRecords = docMan.getAccessRecords(docId); viewData.put("accessRecords", accessRecords); } else { // Go back to thread page. } } catch (Exception e) { Logger.getLogger("").log(Level.SEVERE, "An error occurred when getting profile user", e); } view(req, res, "/views/group/Document.jsp", viewData); }
/** metodo que se encarga de cerrar todas las conexiones que se realizaron en el socket */ public void desconnectar() { try { socket.close(); } catch (IOException ex) { Logger.getLogger(ServidorHilo.class.getName()).log(Level.SEVERE, null, ex); } }
/** * Add a named logger. This does nothing and returns false if a logger * with the same name is already registered. * <p> * The Logger factory methods call this method to register each * newly created Logger. * <p> * The application should retain its own reference to the Logger * object to avoid it being garbage collected. The LogManager * may only retain a weak reference. * * @param logger the new logger. * @return true if the argument logger was registered successfully, * false if a logger of that name already exists. * @exception NullPointerException if the logger name is null. */ public synchronized boolean addLogger(Logger logger) { String name = logger.getName(); if (name == null) { throw new NullPointerException(); } Logger old = (Logger) loggers.get(name); if (old != null) { // We already have a registered logger with the given name. return false; } // We're adding a new logger. // Note that we are creating a strong reference here that will // keep the Logger in existence indefinitely. loggers.put(name, logger); // Apply any initial level defined for the new logger. Level level = getLevelProperty(name+".level", null); if (level != null) { doSetLevel(logger, level); } // If any of the logger's parents have levels defined, // make sure they are instantiated. int ix = 1; for (;;) { int ix2 = name.indexOf(".", ix); if (ix2 < 0) { break; } String pname = name.substring(0,ix2); if (getProperty(pname+".level") != null) { // This pname has a level definition. Make sure it exists. Logger plogger = Logger.getLogger(pname); } ix = ix2+1; } // Find the new node and its parent. LogNode node = findNode(name); node.logger = logger; Logger parent = null; LogNode nodep = node.parent; while (nodep != null) { if (nodep.logger != null) { parent = nodep.logger; break; } nodep = nodep.parent; } if (parent != null) { doSetParent(logger, parent); } // Walk over the children and tell them we are their new parent. node.walkAndSetParent(logger); return true; }
protected void load(ZipInputStream in) throws IOException { ZipEntry entry; while ((entry = in.getNextEntry()) != null) { fireBeginFile(entry.getName()); Logger.getLogger(getClass()) .debug("Starting file " + entry.getName() + " (" + entry.getSize() + " bytes)"); byte[] bytes = readBytes(in); Logger.getLogger(getClass()) .debug("Passing up file " + entry.getName() + " (" + bytes.length + " bytes)"); getLoader().load(entry.getName(), new ByteArrayInputStream(bytes)); fireEndFile(entry.getName()); } }
public void preprocess(Run run) { File logFile = new File(getRunDir(run), "genetik.log"); try { SimpleFileHandler fh = new SimpleFileHandler(logFile); fh.setFormatter(new CompactFormatter()); Logger logger = Logger.getLogger(GenetikConstants.LOGGER); logger.setLevel(Level.INFO); logger.setUseParentHandlers(false); Handler handlers[] = logger.getHandlers(); logger.addHandler(fh); for (Handler h : handlers) { logger.removeHandler(h); if (h instanceof SimpleFileHandler) h.close(); // close our old one } } catch (Exception exp) { throw new IllegalArgumentException( "Unable to create log file at " + logFile.getAbsolutePath()); } super.preprocess(run); }
public class IdlePointCron extends HttpServlet { /** */ private static final long serialVersionUID = 1L; private static final Logger log = Logger.getLogger(IdlePointCron.class.getName()); @Override @SuppressWarnings(Const.WARNING_UNCHECKED) public void doGet(final HttpServletRequest req, final HttpServletResponse resp) throws IOException { // PrintWriter out; // out = resp.getWriter(); try { processGet(); } catch (NimbitsException e) { LogHelper.logException(IdlePointCron.class, e); } } protected static int processGet() throws NimbitsException { final List<Entity> points = EntityServiceFactory.getInstance().getIdleEntities(); log.info("Processing " + points.size() + " potentially idle points"); for (final Entity p : points) { try { checkIdle((Point) p); } catch (NimbitsException e) { LogHelper.logException(IdlePointCron.class, e); } } return points.size(); } protected static boolean checkIdle(final Point p) throws NimbitsException { final Calendar c = Calendar.getInstance(); c.add(Calendar.SECOND, p.getIdleSeconds() * -1); boolean retVal = false; final List<Entity> result = EntityServiceFactory.getInstance() .getEntityByKey( UserServiceFactory.getServerInstance().getAdmin(), p.getOwner(), EntityType.user); if (!result.isEmpty()) { final User u = (User) result.get(0); final List<Value> v = ValueServiceFactory.getInstance().getCurrentValue(p); if (p.getIdleSeconds() > 0 && !v.isEmpty() && v.get(0).getTimestamp().getTime() <= c.getTimeInMillis() && !p.getIdleAlarmSent()) { p.setIdleAlarmSent(true); EntityServiceFactory.getInstance().addUpdateEntity(u, p); // PointServiceFactory.getInstance().updatePoint(u, p); final Value va = ValueFactory.createValueModel(v.get(0), AlertType.IdleAlert); SubscriptionServiceFactory.getInstance().processSubscriptions(u, p, va); retVal = true; } } return retVal; } }
/** Export to an ARC file */ public class ArcExporter extends Exporter { private static Logger log = Logger.getLogger("ArcExporter"); protected CIProperties arcProps = null; String arcFilePrefix = "SimulatedCrawl"; AtomicInteger serialNo = new AtomicInteger(0); ARCWriter aw; boolean isResponse; public ArcExporter(LockssDaemon daemon, ArchivalUnit au, boolean isResponse) { super(daemon, au); this.isResponse = isResponse; } protected void start() { aw = makeARCWriter(); } protected void finish() throws IOException { aw.close(); } private ARCWriter makeARCWriter() { return new ARCWriter( serialNo, ListUtil.list(dir), prefix, compress, maxSize >= 0 ? maxSize : Long.MAX_VALUE); } protected void writeCu(CachedUrl cu) throws IOException { String url = cu.getUrl(); long contentSize = cu.getContentSize(); CIProperties props = cu.getProperties(); long fetchTime = Long.parseLong(props.getProperty(CachedUrl.PROPERTY_FETCH_TIME)); InputStream contentIn = cu.getUnfilteredInputStream(); try { if (isResponse) { String hdrString = getHttpResponseString(cu); long size = contentSize + hdrString.length(); InputStream headerIn = new ReaderInputStream(new StringReader(hdrString)); InputStream concat = new SequenceInputStream(headerIn, contentIn); try { aw.write(xlateFilename(url), cu.getContentType(), getHostIp(), fetchTime, size, concat); } finally { IOUtil.safeClose(concat); } } else { aw.write( xlateFilename(url), cu.getContentType(), getHostIp(), fetchTime, cu.getContentSize(), contentIn); } } finally { AuUtil.safeRelease(cu); } } }
public class HighWireDrupalHtmlMetadataExtractorFactory implements FileMetadataExtractorFactory { private static final Logger log = Logger.getLogger(HighWireDrupalHtmlMetadataExtractorFactory.class); @Override public FileMetadataExtractor createFileMetadataExtractor( MetadataTarget target, String contentType) throws PluginException { return new HighWireDrupalHtmlMetadataExtractor(); } public static class HighWireDrupalHtmlMetadataExtractor implements FileMetadataExtractor { // Map HighWire HTML meta tag names to cooked metadata fields private static MultiMap tagMap = new MultiValueMap(); static { tagMap.put("DC.Format", MetadataField.FIELD_FORMAT); tagMap.put("DC.Language", MetadataField.FIELD_LANGUAGE); tagMap.put("citation_publisher", MetadataField.FIELD_PUBLISHER); tagMap.put("citation_journal_title", MetadataField.FIELD_PUBLICATION_TITLE); tagMap.put("citation_title", MetadataField.FIELD_ARTICLE_TITLE); tagMap.put("citation_date", MetadataField.FIELD_DATE); tagMap.put("citation_publication_date", MetadataField.FIELD_DATE); tagMap.put( "citation_authors", new MetadataField(MetadataField.FIELD_AUTHOR, MetadataField.splitAt(";"))); tagMap.put("citation_author", MetadataField.FIELD_AUTHOR); tagMap.put("citation_issn", MetadataField.FIELD_ISSN); tagMap.put("citation_volume", MetadataField.FIELD_VOLUME); tagMap.put("citation_issue", MetadataField.FIELD_ISSUE); tagMap.put("citation_firstpage", MetadataField.FIELD_START_PAGE); tagMap.put("citation_lastpage", MetadataField.FIELD_END_PAGE); tagMap.put("citation_doi", MetadataField.FIELD_DOI); tagMap.put("citation_public_url", MetadataField.FIELD_ACCESS_URL); // typical field value: "acupmed;30/1/8": extract "acupmed" tagMap.put( "citation_mjid", new MetadataField( MetadataField.FIELD_PROPRIETARY_IDENTIFIER, MetadataField.extract("^([^;]+);", 1))); } @Override public void extract(MetadataTarget target, CachedUrl cu, Emitter emitter) throws IOException { ArticleMetadata am = new SimpleHtmlMetaTagMetadataExtractor().extract(target, cu); am.cook(tagMap); String url = am.get(MetadataField.FIELD_ACCESS_URL); ArchivalUnit au = cu.getArchivalUnit(); if (url == null || url.isEmpty() || !au.makeCachedUrl(url).hasContent()) { url = cu.getUrl(); } am.replace( MetadataField.FIELD_ACCESS_URL, HttpToHttpsUtil.AuUtil.normalizeHttpHttpsFromBaseUrl(au, url)); emitter.emitMetadata(cu, am); } } }
DBPort(InetSocketAddress addr, DBPortPool pool, MongoOptions options) throws IOException { _options = options; _addr = addr; _pool = pool; _hashCode = _addr.hashCode(); _logger = Logger.getLogger(_rootLogger.getName() + "." + addr.toString()); }
/** * Construye el objeto que gestionara la comunicacion de datos. * * @param s Socket de conexion. * @throws Exception */ public DataAttendant(Socket s) throws Exception { mylogger = Logger.getLogger("isabel.nereda.DataAttendant"); mylogger.fine("Creating DataAttendant object."); sock = s; myPP = new PacketProcessor(sock); NeReDa.peers.Add(this); }
public void actionPerformed(ActionEvent e) { try { undo.redo(); } catch (CannotRedoException ex) { Logger.getLogger(RedoAction.class.getName()).log(Level.SEVERE, "Unable to redo", ex); } update(); undoAction.update(); }
// constructor que inicializar el socket y asigna el id al numero de sesión public ServidorHilo(Socket socket, int id) { this.socket = socket; this.idSessio = id; try { dos = new DataOutputStream(socket.getOutputStream()); dis = new DataInputStream(socket.getInputStream()); } catch (IOException ex) { Logger.getLogger(ServidorHilo.class.getName()).log(Level.SEVERE, null, ex); } }
/** * The <tt>Resources</tt> class manages the access to the internationalization properties files and * the image resources used in this plugin. * * @author Yana Stamcheva */ public class Resources { private static Logger log = Logger.getLogger(Resources.class); /** The name of the resource, where internationalization strings for this plugin are stored. */ private static final String STRING_RESOURCE_NAME = "resources.languages.plugin.contactinfo.resources"; /** The name of the resource, where paths to images used in this bundle are stored. */ private static final String IMAGE_RESOURCE_NAME = "net.java.sip.communicator.plugin.contactinfo.resources"; /** The string resource bundle. */ private static final ResourceBundle STRING_RESOURCE_BUNDLE = ResourceBundle.getBundle(STRING_RESOURCE_NAME); /** The image resource bundle. */ private static final ResourceBundle IMAGE_RESOURCE_BUNDLE = ResourceBundle.getBundle(IMAGE_RESOURCE_NAME); /** * Returns an internationalized string corresponding to the given key. * * @param key The key of the string. * @return An internationalized string corresponding to the given key. */ public static String getString(String key) { try { return STRING_RESOURCE_BUNDLE.getString(key); } catch (MissingResourceException e) { return '!' + key + '!'; } } /** * Loads an image from a given image identifier. * * @param imageID The identifier of the image. * @return The image for the given identifier. */ public static ImageIcon getImage(String imageID) { BufferedImage image = null; String path = IMAGE_RESOURCE_BUNDLE.getString(imageID); try { image = ImageIO.read(Resources.class.getClassLoader().getResourceAsStream(path)); } catch (IOException e) { log.error("Failed to load image:" + path, e); } return new ImageIcon(image); } }
public class TestElsevierXmlLinkExtractorFactory extends LinkExtractorTestCase { private static Logger logger = Logger.getLogger("TestElsevierXmlLinkExtractorFactory"); String srcUrl = "http://www.example.com/"; private static final String withLinks = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<!DOCTYPE dataset SYSTEM \"http://support.sciencedirect.com/xml/sdosftp10.dtd\">\n" + "<dataset identifier=\"OXM10160\" customer=\"OHL\"" + " status=\"Announcement\"" + " version=\"Network Dataset Announcement/Confirmation v1.0\">" + " <date year=\"2007\" month=\"May\" day=\"1\"/>\n" + "<file name=\"01407007.tar\" size=\"21780480\"" + " md5=\"6c7266e0e246bf3e8cf1cd8b659a7a73\"/>\n" + "<file name=\"03064530.tar\" size=\"12748800\"" + " md5=\"df9519d3075e164d22f5dd4988a693c3\"/>\n" + "<file name=\"dataset.toc\" size=\"2216587\"" + " md5=\"cd21741eb91fa0fdfef2fa36485e21a0\"/>\n" + "</dataset>\n"; private static final String withoutLinks = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<!DOCTYPE dataset SYSTEM \"http://support.sciencedirect.com/xml/sdosftp10.dtd\">\n" + "<dataset identifier=\"OXM10160\" customer=\"OHL\"" + " status=\"Announcement\"" + " version=\"Network Dataset Announcement/Confirmation v1.0\">" + " <date year=\"2007\" month=\"May\" day=\"1\"/>\n" + "</dataset>\n"; private static final String[] links = { "01407007.tar", "03064530.tar", "dataset.toc", }; public String getMimeType() { return "text/xml"; } public LinkExtractorFactory getFactory() { return new ElsevierXmlLinkExtractorFactory(); } public void testFindCorrectEntries() throws Exception { Set expected = new HashSet(); for (String link : links) { expected.add(srcUrl + link); } assertEquals(expected, extractUrls(withLinks)); } public void testFindNoEntries() throws Exception { assertEmpty(extractUrls(withoutLinks)); } }
/* * OJS2HtmlMetadataExtractorFactory extracts metadata from each article. */ public class OJS2HtmlMetadataExtractorFactory implements FileMetadataExtractorFactory { static Logger log = Logger.getLogger(OJS2HtmlMetadataExtractorFactory.class); public FileMetadataExtractor createFileMetadataExtractor( MetadataTarget target, String contentType) throws PluginException { return new OJS2HtmlMetadataExtractor(); } // createFileMetadataExtractor public static class OJS2HtmlMetadataExtractor extends SimpleHtmlMetaTagMetadataExtractor { // Map OJS2-specific HTML meta tag names to cooked metadata fields private static MultiMap tagMap = new MultiValueMap(); static { tagMap.put("DC.Format", MetadataField.DC_FIELD_FORMAT); tagMap.put("DC.Language", MetadataField.DC_FIELD_LANGUAGE); tagMap.put("DC.Title", MetadataField.DC_FIELD_TITLE); tagMap.put("DC.Identifier", MetadataField.DC_FIELD_IDENTIFIER); tagMap.put("DC.Date", MetadataField.DC_FIELD_DATE); tagMap.put("DC.Publisher", MetadataField.DC_FIELD_PUBLISHER); tagMap.put("DC.Publisher", MetadataField.FIELD_PUBLISHER); tagMap.put("DC.Contributor", MetadataField.DC_FIELD_CONTRIBUTOR); tagMap.put("citation_journal_title", MetadataField.FIELD_PUBLICATION_TITLE); tagMap.put("citation_title", MetadataField.FIELD_ARTICLE_TITLE); tagMap.put("citation_date", MetadataField.FIELD_DATE); tagMap.put("citation_author", MetadataField.FIELD_AUTHOR); tagMap.put( "citation_authors", new MetadataField(MetadataField.FIELD_AUTHOR, MetadataField.splitAt(";"))); tagMap.put("citation_issn", MetadataField.FIELD_ISSN); tagMap.put("citation_volume", MetadataField.FIELD_VOLUME); tagMap.put("citation_volume", MetadataField.DC_FIELD_CITATION_VOLUME); tagMap.put("citation_issue", MetadataField.FIELD_ISSUE); tagMap.put("citation_issue", MetadataField.DC_FIELD_CITATION_ISSUE); tagMap.put("citation_firstpage", MetadataField.FIELD_START_PAGE); tagMap.put("citation_lastpage", MetadataField.FIELD_END_PAGE); tagMap.put("citation_doi", MetadataField.FIELD_DOI); tagMap.put("citation_public_url", MetadataField.FIELD_ACCESS_URL); } // static @Override public ArticleMetadata extract(MetadataTarget target, CachedUrl cu) throws IOException { log.debug3("Metadata - cachedurl cu:" + cu.getUrl()); ArticleMetadata am = super.extract(target, cu); am.cook(tagMap); return am; } // extract } // OJS2HtmlMetadataExtractor } // OJS2HtmlMetadataExtractorFactory
@Override @SuppressWarnings("SleepWhileHoldingLock") public void run() { try { // initialize the statusbar status.removeAll(); JProgressBar progress = new JProgressBar(); progress.setMinimum(0); progress.setMaximum(doc.getLength()); status.add(progress); status.revalidate(); // start writing Writer out = new FileWriter(f); Segment text = new Segment(); text.setPartialReturn(true); int charsLeft = doc.getLength(); int offset = 0; while (charsLeft > 0) { doc.getText(offset, Math.min(4096, charsLeft), text); out.write(text.array, text.offset, text.count); charsLeft -= text.count; offset += text.count; progress.setValue(offset); try { Thread.sleep(10); } catch (InterruptedException e) { Logger.getLogger(FileSaver.class.getName()).log(Level.SEVERE, null, e); } } out.flush(); out.close(); } catch (IOException e) { final String msg = e.getMessage(); SwingUtilities.invokeLater( new Runnable() { public void run() { JOptionPane.showMessageDialog( getFrame(), "Could not save file: " + msg, "Error saving file", JOptionPane.ERROR_MESSAGE); } }); } catch (BadLocationException e) { System.err.println(e.getMessage()); } // we are done... get rid of progressbar status.removeAll(); status.revalidate(); }
public static TypeList load(String gamePath, String dataName) { System.out.println("load:" + dataName); InputStream is = null; Savable sav = null; try { File file = new File( System.getProperty("user.dir") + File.separator + gamePath + File.separator + dataName); if (!file.exists()) { return null; } is = new BufferedInputStream(new FileInputStream(file)); // is = new GZIPInputStream(new BufferedInputStream(new FileInputStream(file))); XMLImporter imp = XMLImporter.getInstance(); // if (manager != null) { // imp.setAssetManager(manager); // } sav = imp.load(is); } catch (IOException ex) { Logger.getLogger(Type.class.getName()).log(Level.SEVERE, "Error loading data: {0}", ex); ex.printStackTrace(); } finally { if (is != null) { try { is.close(); } catch (IOException ex) { Logger.getLogger(Type.class.getName()).log(Level.SEVERE, "Error loading data: {0}", ex); ex.printStackTrace(); } } } return (TypeList) sav; }
public void save(String gamePath, String dataName) { XMLExporter ex = XMLExporter.getInstance(); OutputStream os = null; try { File daveFolder = new File(System.getProperty("user.dir") + File.separator + gamePath); if (!daveFolder.exists() && !daveFolder.mkdirs()) { Logger.getLogger(Type.class.getName()).log(Level.SEVERE, "Error creating save file!"); throw new IllegalStateException("SaveGame dataset cannot be created"); } File saveFile = new File(daveFolder.getAbsolutePath() + File.separator + dataName); if (!saveFile.exists()) { if (!saveFile.createNewFile()) { Logger.getLogger(Type.class.getName()).log(Level.SEVERE, "Error creating save file!"); throw new IllegalStateException("SaveGame dataset cannot be created"); } } os = new BufferedOutputStream(new FileOutputStream(saveFile)); // os = new GZIPOutputStream(new BufferedOutputStream(new FileOutputStream(saveFile))); ex.save(this, os); } catch (IOException ex1) { Logger.getLogger(Type.class.getName()).log(Level.SEVERE, "Error saving data: {0}", ex1); ex1.printStackTrace(); throw new IllegalStateException("SaveGame dataset cannot be saved"); } finally { try { if (os != null) { os.close(); } } catch (IOException ex1) { Logger.getLogger(Type.class.getName()).log(Level.SEVERE, "Error saving data: {0}", ex1); ex1.printStackTrace(); throw new IllegalStateException("SaveGame dataset cannot be saved"); } } }
/** Builds the XML Schema tree into memory. */ public void parse() { try { File file = new File(fileName); XSOMParser schemaParser = new XSOMParser(); schemaParser.parse(file); XSSchemaSet schemaSet = schemaParser.getResult(); XSSchema schema = schemaSet.getSchema(1); this.createResult(schema); } catch (SAXException | IOException ex) { Logger.getLogger(SchemaParser.class.getName()).log(Level.SEVERE, null, ex); } }
/** @param args the command line arguments */ public static void main(String[] args) throws IOException { // TODO code application logic here Socket socket = null; BufferedReader read = null; BufferedReader in = null; PrintWriter out = null; try { socket = new Socket("10.151.34.155", 6666); read = new BufferedReader(new InputStreamReader(System.in)); in = new BufferedReader(new InputStreamReader(socket.getInputStream())); out = new PrintWriter(socket.getOutputStream(), true); } catch (IOException ex) { Logger.getLogger(UTSProgjar.class.getName()).log(Level.SEVERE, null, ex); } String textToServer; while ((textToServer = read.readLine()) != null) { out.print(textToServer + "\r\n"); out.flush(); String messageFromServer; while ((messageFromServer = textToServer = in.readLine()) != null) { System.out.println(messageFromServer); } } Integer intToServer; while ((intToServer = Integer.parseInt(read.readLine())) != null) { out.print(intToServer + "\r\n"); out.flush(); String messageFromServer; while ((messageFromServer = textToServer = in.readLine()) != null) { System.out.println(messageFromServer); } } out.close(); in.close(); read.close(); socket.close(); }
public class LoggingExceptions2 { private static Logger logger = Logger.getLogger("LoggingExceptions2"); static void logException(Exception e) { StringWriter trace = new StringWriter(); e.printStackTrace(new PrintWriter(trace)); logger.severe(trace.toString()); } public static void main(String[] args) { try { throw new NullPointerException(); } catch (NullPointerException e) { logException(e); } } } /* Output: (90% match)
/** * Action Servlet handling actions on users. * * @author Emmanuel Bourg * @version $Revision$, $Date$ */ public class UserAction extends HttpServlet { private Logger logger = Logger.getLogger("net.jetrix"); protected void service(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { String action = request.getParameter("action"); String name = request.getParameter("name"); String redirect = "/user.jsp?name=" + name; Client client = ClientRepository.getInstance().getClient(name); if ("kick".equals(action)) { logger.info( client.getUser().getName() + " (" + client.getInetAddress() + ") has been kicked by " + request.getRemoteUser() + " (" + request.getRemoteHost() + ")"); } else if ("ban".equals(action)) { Banlist banlist = Banlist.getInstance(); banlist.ban(client.getInetAddress().getHostAddress()); logger.info( client.getUser().getName() + " (" + client.getInetAddress() + ") has been banned by " + request.getRemoteUser() + " (" + request.getRemoteHost() + ")"); // save the server configuration Server.getInstance().getConfig().save(); } client.disconnect(); response.sendRedirect("/channel.jsp?name=" + client.getChannel().getConfig().getName()); } }
private byte[] readBytes(InputStream in) { byte[] result = null; try { ByteArrayOutputStream out = new ByteArrayOutputStream(); byte[] buffer = new byte[BUFFER_SIZE]; int bytesRead = 0; while ((bytesRead = in.read(buffer, 0, BUFFER_SIZE)) != -1) { out.write(buffer, 0, bytesRead); } out.close(); result = out.toByteArray(); } catch (IOException ex) { Logger.getLogger(getClass()).debug("Error loading Zip entry", ex); } return (result); }
public class MyFileListTable extends MyDefaultTable { private static Logger log = Logger.getLogger(MyFileListTable.class.getName()); public MyFileListTable() { super(); } public MyFileListTable(TableModel dm) { super(dm); } protected void init() { super.init(); setRowHeight(16); setSelectionMode(ListSelectionModel.MULTIPLE_INTERVAL_SELECTION); enableAutoPack(true); } public TableCellRenderer getCellRenderer(int row, int column) { // log.info(row+", "+column); return super.getCellRenderer(row, column); } protected void resizeTable() { if (getColumnCount() > 1) { packColumn(this, 0); packColumn(this, 1); } } /** Overrides <code>JComponent</code>'s <code>getToolTipText</code> */ public String getToolTipText(MouseEvent e) { String tip = null; java.awt.Point p = e.getPoint(); int rowIndex = rowAtPoint(p); // int colIndex = columnAtPoint(p); // int realColumnIndex = convertColumnIndexToModel(colIndex); TableModel model = getModel(); tip = (String) model.getValueAt(rowIndex, 1); return tip; } }