/** * The main entry point for processing project header data. * * @param file parent project file * @param props properties data * @param rootDir Root of the POI file system. */ public void process(ProjectFile file, Props props, DirectoryEntry rootDir) throws MPXJException { // MPPUtility.fileDump("c:\\temp\\props.txt", props.toString().getBytes()); ProjectHeader ph = file.getProjectHeader(); ph.setStartDate(props.getTimestamp(Props.PROJECT_START_DATE)); ph.setFinishDate(props.getTimestamp(Props.PROJECT_FINISH_DATE)); ph.setScheduleFrom(ScheduleFrom.getInstance(1 - props.getShort(Props.SCHEDULE_FROM))); ph.setCalendarName(props.getUnicodeString(Props.DEFAULT_CALENDAR_NAME)); ph.setDefaultStartTime(props.getTime(Props.START_TIME)); ph.setDefaultEndTime(props.getTime(Props.END_TIME)); ph.setStatusDate(props.getTimestamp(Props.STATUS_DATE)); ph.setHyperlinkBase(props.getUnicodeString(Props.HYPERLINK_BASE)); // ph.setDefaultDurationIsFixed(); ph.setDefaultDurationUnits( MPPUtility.getDurationTimeUnits(props.getShort(Props.DURATION_UNITS))); ph.setMinutesPerDay(Integer.valueOf(props.getInt(Props.MINUTES_PER_DAY))); ph.setMinutesPerWeek(Integer.valueOf(props.getInt(Props.MINUTES_PER_WEEK))); ph.setDefaultOvertimeRate(new Rate(props.getDouble(Props.OVERTIME_RATE), TimeUnit.HOURS)); ph.setDefaultStandardRate(new Rate(props.getDouble(Props.STANDARD_RATE), TimeUnit.HOURS)); ph.setDefaultWorkUnits(MPPUtility.getWorkTimeUnits(props.getShort(Props.WORK_UNITS))); ph.setSplitInProgressTasks(props.getBoolean(Props.SPLIT_TASKS)); ph.setUpdatingTaskStatusUpdatesResourceStatus(props.getBoolean(Props.TASK_UPDATES_RESOURCE)); ph.setCurrencyDigits(Integer.valueOf(props.getShort(Props.CURRENCY_DIGITS))); ph.setCurrencySymbol(props.getUnicodeString(Props.CURRENCY_SYMBOL)); ph.setCurrencyCode(props.getUnicodeString(Props.CURRENCY_CODE)); // ph.setDecimalSeparator(); ph.setSymbolPosition(MPPUtility.getSymbolPosition(props.getShort(Props.CURRENCY_PLACEMENT))); // ph.setThousandsSeparator(); ph.setWeekStartDay(Day.getInstance(props.getShort(Props.WEEK_START_DAY) + 1)); ph.setFiscalYearStartMonth(Integer.valueOf(props.getShort(Props.FISCAL_YEAR_START_MONTH))); ph.setFiscalYearStart(props.getShort(Props.FISCAL_YEAR_START) == 1); ph.setDaysPerMonth(Integer.valueOf(props.getShort(Props.DAYS_PER_MONTH))); ph.setEditableActualCosts(props.getBoolean(Props.EDITABLE_ACTUAL_COSTS)); ph.setHonorConstraints(!props.getBoolean(Props.HONOR_CONSTRAINTS)); SummaryInformation summary = new SummaryInformation(rootDir); ph.setProjectTitle(summary.getProjectTitle()); ph.setSubject(summary.getSubject()); ph.setAuthor(summary.getAuthor()); ph.setKeywords(summary.getKeywords()); ph.setComments(summary.getComments()); ph.setCompany(summary.getCompany()); ph.setManager(summary.getManager()); ph.setCategory(summary.getCategory()); ph.setRevision(summary.getRevision()); ph.setCreationDate(summary.getCreationDate()); ph.setLastSaved(summary.getLastSaved()); ph.setDocumentSummaryInformation(summary.getDocumentSummaryInformation()); ph.setCalculateMultipleCriticalPaths(props.getBoolean(Props.CALCULATE_MULTIPLE_CRITICAL_PATHS)); }
/** Helper function to initialize a job configuration */ public static JobConf getJobConf(String name, Props props, Class classobj) throws Exception { JobConf conf = new JobConf(); // set custom class loader with custom find resource strategy. conf.setJobName(name); String hadoop_ugi = props.getProperty("hadoop.job.ugi", null); if (hadoop_ugi != null) { conf.set("hadoop.job.ugi", hadoop_ugi); } if (props.getBoolean("is.local", false)) { conf.set("mapred.job.tracker", "local"); conf.set("fs.default.name", "file:///"); conf.set("mapred.local.dir", "/tmp/map-red"); info("Running locally, no hadoop jar set."); } else { setClassLoaderAndJar(conf, classobj); info("Setting hadoop jar file for class:" + classobj + " to " + conf.getJar()); info("*************************************************************************"); info( " Running on Real Hadoop Cluster(" + conf.get("mapred.job.tracker") + ") "); info("*************************************************************************"); } // set JVM options if present if (props.containsKey("mapred.child.java.opts")) { conf.set("mapred.child.java.opts", props.getProperty("mapred.child.java.opts")); info("mapred.child.java.opts set to " + props.getProperty("mapred.child.java.opts")); } // Adds External jars to hadoop classpath String externalJarList = props.getProperty("hadoop.external.jarFiles", null); if (externalJarList != null) { String[] jarFiles = externalJarList.split(","); for (String jarFile : jarFiles) { info("Adding extenral jar File:" + jarFile); DistributedCache.addFileToClassPath(new Path(jarFile), conf); } } // Adds distributed cache files String cacheFileList = props.getProperty("hadoop.cache.files", null); if (cacheFileList != null) { String[] cacheFiles = cacheFileList.split(","); for (String cacheFile : cacheFiles) { info("Adding Distributed Cache File:" + cacheFile); DistributedCache.addCacheFile(new URI(cacheFile), conf); } } // Adds distributed cache files String archiveFileList = props.getProperty("hadoop.cache.archives", null); if (archiveFileList != null) { String[] archiveFiles = archiveFileList.split(","); for (String archiveFile : archiveFiles) { info("Adding Distributed Cache Archive File:" + archiveFile); DistributedCache.addCacheArchive(new URI(archiveFile), conf); } } String hadoopCacheJarDir = props.getProperty("hdfs.default.classpath.dir", null); if (hadoopCacheJarDir != null) { FileSystem fs = FileSystem.get(conf); if (fs != null) { FileStatus[] status = fs.listStatus(new Path(hadoopCacheJarDir)); if (status != null) { for (int i = 0; i < status.length; ++i) { if (!status[i].isDir()) { Path path = new Path(hadoopCacheJarDir, status[i].getPath().getName()); info("Adding Jar to Distributed Cache Archive File:" + path); DistributedCache.addFileToClassPath(path, conf); } } } else { info("hdfs.default.classpath.dir " + hadoopCacheJarDir + " is empty."); } } else { info("hdfs.default.classpath.dir " + hadoopCacheJarDir + " filesystem doesn't exist"); } } // May want to add this to HadoopUtils, but will await refactoring for (String key : props.stringPropertyNames()) { String lowerCase = key.toLowerCase(); if (lowerCase.startsWith(HADOOP_PREFIX)) { String newKey = key.substring(HADOOP_PREFIX.length()); conf.set(newKey, props.getProperty(key)); } } KafkaETLUtils.setPropsInJob(conf, props); return conf; }
public boolean get(Props props) { return props.getBoolean(fullName, defaultValue); }