コード例 #1
0
ファイル: PlanRenderer.java プロジェクト: escalope/IDK
  public static void setEntity(Plan ent, Map attributes) {
    Map currentMap =
        (Map) RenderComponentManager.retrieveIDs("Plan", ent.getPrefs(attributes).getView());
    current = ent.getPrefs(attributes).getView();
    if (ent != null
        && currentMap.get("_attributes_") != null
        && currentMap.get("_attributes_") instanceof ingenias.editor.rendererxml.AttributesPanel) {

      ((ingenias.editor.rendererxml.AttributesPanel) currentMap.get("_attributes_")).setEntity(ent);
    }

    if (currentMap.get("Tasks") != null
        && currentMap.get("Tasks") instanceof ingenias.editor.rendererxml.CollectionPanel) {
      try {
        ((ingenias.editor.rendererxml.CollectionPanel) currentMap.get("Tasks"))
            .setCollection("Tasks", ent.Tasks, ent.Tasks.getType());
      } catch (IllegalArgumentException ex) {
        ex.printStackTrace();
      } catch (IllegalAccessException ex) {
        ex.printStackTrace();
      }
    }

    if (currentMap.get("Tasks") != null
        && currentMap.get("Tasks") instanceof ingenias.editor.rendererxml.CollectionPanel) {
      try {
        ((ingenias.editor.rendererxml.CollectionPanel) currentMap.get("Tasks"))
            .setCollection("Tasks", ent.Tasks, ent.Tasks.getType());
      } catch (IllegalArgumentException ex) {
        ex.printStackTrace();
      } catch (IllegalAccessException ex) {
        ex.printStackTrace();
      }
    }

    if (currentMap.get("Tasks") != null) {
      if (ent != null && ent.getTasks() != null) {
        if (currentMap.get("Tasks") instanceof javax.swing.JLabel) {
          ((javax.swing.JLabel) (currentMap).get("Tasks")).setText(ent.getTasks().toString());
        } else {
          if (currentMap.get("Tasks") instanceof javax.swing.text.JTextComponent)
            ((javax.swing.text.JTextComponent) (currentMap).get("Tasks"))
                .setText(ent.getTasks().toString());
        }
      } else {
        if (currentMap.get("Tasks") instanceof javax.swing.JLabel)
          ((javax.swing.JLabel) (currentMap).get("Tasks")).setText("");
        else {
          if (!(currentMap.get("Tasks") instanceof ingenias.editor.rendererxml.CollectionPanel))
            ((javax.swing.text.JTextComponent) (currentMap).get("Tasks")).setText("");
        }
      }
    }

    if (currentMap.get("Id") != null) {
      if (ent != null && ent.getId() != null) {
        if (currentMap.get("Id") instanceof javax.swing.JLabel) {
          ((javax.swing.JLabel) (currentMap).get("Id")).setText(ent.getId().toString());
        } else {
          if (currentMap.get("Id") instanceof javax.swing.text.JTextComponent)
            ((javax.swing.text.JTextComponent) (currentMap).get("Id"))
                .setText(ent.getId().toString());
        }
      } else {
        if (currentMap.get("Id") instanceof javax.swing.JLabel)
          ((javax.swing.JLabel) (currentMap).get("Id")).setText("");
        else {
          if (!(currentMap.get("Id") instanceof ingenias.editor.rendererxml.CollectionPanel))
            ((javax.swing.text.JTextComponent) (currentMap).get("Id")).setText("");
        }
      }
    }
  }
コード例 #2
0
ファイル: Main.java プロジェクト: achyut/incubator-mrql
 public static void main(String[] args) throws Exception {
   Config.hadoop_mode = false;
   if (args.length == 2 && args[0].equals("args")) // needed for mrql.flink script
   args = args[1].substring(1).split("!");
   for (String arg : args) {
     Config.hadoop_mode |= arg.equals("-local") || arg.equals("-dist");
     Config.bsp_mode |= arg.equals("-bsp");
     Config.spark_mode |= arg.equals("-spark");
     Config.flink_mode |= arg.equals("-flink");
   }
   ;
   Config.map_reduce_mode = !Config.bsp_mode && !Config.spark_mode && !Config.flink_mode;
   initialize_evaluator();
   if (Config.hadoop_mode) {
     conf = Evaluator.evaluator.new_configuration();
     GenericOptionsParser gop = new GenericOptionsParser(conf, args);
     conf = gop.getConfiguration();
     args = gop.getRemainingArgs();
   }
   ;
   Config.parse_args(args, conf);
   Config.hadoop_mode = Config.local_mode || Config.distributed_mode;
   if (!Config.info) {
     for (Enumeration en = LogManager.getCurrentLoggers(); en.hasMoreElements(); )
       ((Logger) en.nextElement()).setLevel(Level.WARN);
     LogManager.getRootLogger().setLevel(Level.WARN);
   }
   ;
   Evaluator.evaluator.init(conf);
   new TopLevel();
   System.out.print("Apache MRQL version " + version + " (");
   if (Config.compile_functional_arguments) System.out.print("compiled ");
   else System.out.print("interpreted ");
   if (Config.hadoop_mode) {
     if (Config.local_mode) System.out.print("local ");
     else if (Config.distributed_mode) System.out.print("distributed ");
     if (Config.spark_mode) System.out.println("Spark mode using " + Config.nodes + " tasks)");
     else if (Config.flink_mode)
       System.out.println("Flink mode using " + Config.nodes + " tasks)");
     else if (Config.bsp_mode)
       System.out.println("Hama BSP mode over " + Config.nodes + " BSP tasks)");
     else if (Config.nodes > 0)
       System.out.println("Hadoop MapReduce mode with " + Config.nodes + " reducers)");
     else if (!Config.local_mode)
       System.out.println("Hadoop MapReduce mode with 1 reducer, use -nodes to change it)");
     else System.out.println("Hadoop MapReduce mode)");
   } else if (Config.bsp_mode) System.out.println("in-memory BSP mode)");
   else System.out.println("in-memory Java mode)");
   if (Config.interactive) {
     System.out.println("Type quit to exit");
     ConsoleReader reader = new ConsoleReader();
     reader.setBellEnabled(false);
     History history = new History(new File(System.getProperty("user.home") + "/.mrqlhistory"));
     reader.setHistory(history);
     reader.setUseHistory(false);
     try {
       loop:
       while (true) {
         String line = "";
         String s = "";
         try {
           if (Config.hadoop_mode && Config.bsp_mode) Config.write(Plan.conf);
           do {
             s = reader.readLine("> ");
             if (s != null && (s.equals("quit") || s.equals("exit"))) break loop;
             if (s != null) line += " " + s;
           } while (s == null || s.indexOf(";") <= 0);
           line = line.substring(1);
           history.addToHistory(line);
           parser = new MRQLParser(new MRQLLex(new StringReader(line)));
           MRQLLex.reset();
           parser.parse();
         } catch (EOFException x) {
           break;
         } catch (Exception x) {
           if (x.getMessage() != null) System.out.println(x);
         } catch (Error x) {
           System.out.println(x);
         }
       }
     } finally {
       if (Config.hadoop_mode) {
         Plan.clean();
         Evaluator.evaluator.shutdown(Plan.conf);
       }
       ;
       if (Config.compile_functional_arguments) Compiler.clean();
     }
   } else
     try {
       if (Config.hadoop_mode && Config.bsp_mode) Config.write(Plan.conf);
       try {
         parser = new MRQLParser(new MRQLLex(new FileInputStream(query_file)));
       } catch (Exception e) {
         // when the query file is in HDFS
         Path path = new Path(query_file);
         FileSystem fs = path.getFileSystem(conf);
         parser = new MRQLParser(new MRQLLex(fs.open(path)));
       }
       ;
       parser.parse();
     } finally {
       if (Config.hadoop_mode) {
         Plan.clean();
         Evaluator.evaluator.shutdown(Plan.conf);
       }
       ;
       if (Config.compile_functional_arguments) Compiler.clean();
     }
 }