@Override public Option<MapStatus> stop(boolean success) { try { // Update task metrics from accumulators (null in UnsafeShuffleWriterSuite) Map<String, Accumulator<Object>> internalAccumulators = taskContext.internalMetricsToAccumulators(); if (internalAccumulators != null) { internalAccumulators .apply(InternalAccumulator.PEAK_EXECUTION_MEMORY()) .add(getPeakMemoryUsedBytes()); } if (stopping) { return Option.apply(null); } else { stopping = true; if (success) { if (mapStatus == null) { throw new IllegalStateException("Cannot call stop(true) without having called write()"); } return Option.apply(mapStatus); } else { // The map task failed, so delete our output data. shuffleBlockResolver.removeDataByMap(shuffleId, mapId); return Option.apply(null); } } } finally { if (sorter != null) { // If sorter is non-null, then this implies that we called stop() in response to an error, // so we need to clean up memory and spill files created by the sorter sorter.cleanupResources(); } } }
@Override public ContainerRoot pull(String targetNodeName) { List<String> ips = KevoreePropertyHelper.getStringNetworkProperties( this.getModelService().getLastModel(), targetNodeName, Constants.KEVOREE_PLATFORM_REMOTE_NODE_IP()); Option<Integer> portOption = KevoreePropertyHelper.getIntPropertyForGroup( this.getModelService().getLastModel(), this.getName(), "port", true, targetNodeName); int PORT = 8000; if (portOption.isDefined()) { PORT = portOption.get(); } for (String ip : ips) { logger.debug("try to pull model on url=>" + "http://" + ip + ":" + PORT + "/model/current"); ContainerRoot model = pullModel("http://" + ip + ":" + PORT + "/model/current"); if (model != null) { return model; } } ContainerRoot model = pullModel("http://127.0.0.1:" + PORT + "/model/current"); if (model == null) { logger.debug("Unable to pull a model on " + targetNodeName); return null; } else { return model; } }
public static void addArrayElements( TreeNode parent, SpreadSheetTable table, List<Integer> rows, LogSupport log) { int valCol = table.numCols() - 1; Map<String, String> values = new HashMap<>(); for (int i = 0; i < rows.size(); ++i) { String name; if (table.numCols() > 1) { name = table.get(rows.get(i), 0); int index = name.lastIndexOf('.'); if (index != -1) { name = name.substring(index + 1); } } else { name = S.fmt("%s", i); } values.put(name, table.get(rows.get(i), valCol)); } for (Map.Entry<String, String> e : values.entrySet()) { String name = e.getKey(); String value = e.getValue(); Option<TreeNodeLike> optVN = parent.findChild(name); TreeNode vn; if (!optVN.isDefined()) { vn = ElementHelper.addElement(parent, name); } else { vn = (TreeNode) optVN.get(); } EntryEditorEditingSupport.writeValue(vn, value, log); } ElementHelper.removeChildren(parent, values.keySet()); }
@Override public void push(ContainerRoot model, String targetNodeName) { List<String> ips = KevoreePropertyHelper.getStringNetworkProperties( model, targetNodeName, Constants.KEVOREE_PLATFORM_REMOTE_NODE_IP()); Option<Integer> portOption = KevoreePropertyHelper.getIntPropertyForGroup( model, this.getName(), "port", true, targetNodeName); int PORT = 8000; if (portOption.isDefined()) { PORT = portOption.get(); } boolean sent = false; for (String ip : ips) { logger.debug("try to send model on url=>" + "http://" + ip + ":" + PORT + "/model/current"); if (sendModel(model, "http://" + ip + ":" + PORT + "/model/current")) { sent = true; break; } } if (!sent) { logger.debug("try to send model on url=>" + "http://127.0.0.1:" + PORT + "/model/current"); if (!sendModel(model, "http://127.0.0.1:" + PORT + "/model/current")) { logger.error("Unable to push a model on " + targetNodeName); } } }
@Override public String getUsername(Context ctx) { Option<User> user = ContextUtil.getCurrentUser(ctx); if (user.isEmpty()) { return null; } return user.get().username; }
/** * Extract a signed token that was signed by {@link #signToken(String)}. * * @param token The signed token to extract. * @return The verified raw token, or null if the token isn't valid. */ public String extractSignedToken(String token) { scala.Option<String> extracted = crypto.extractSignedToken(token); if (extracted.isDefined()) { return extracted.get(); } else { return null; } }
@Override public void preRestart(Throwable reason, Option<Object> message) { log.error( reason, "Restarting due to [{}] when processing [{}]", reason.getMessage(), message.isDefined() ? message.get() : ""); }
public Address getAddressFor(Address remoteAddress) { final scala.Option<Address> optAddr = system.provider().getExternalAddressFor(remoteAddress); if (optAddr.isDefined()) { return optAddr.get(); } else { throw new UnsupportedOperationException("cannot send to remote address " + remoteAddress); } }
public Optional<AssignmentConfig> assign(final RegionStats region, final ServerName server) { final Option<RegionAssignment> maybeOldAssignment = assignments.get(region); final ServerName oldServer = maybeOldAssignment.get().getNewServer(); if (Objects.equal(oldServer, server)) return Optional.absent(); final RegionAssignment assignment = new RegionAssignment(region, server); return Optional.of( new AssignmentConfig( assignments.$plus(new Tuple2<RegionStats, RegionAssignment>(region, assignment)), servers)); }
/** * Restore an XFormsContainingDocument from XFormsState only. * * <p>Used by XFormsStateManager. * * @param xformsState XFormsState containing static and dynamic state * @param disableUpdates whether to disable updates (for recreating initial document upon browser * back) */ public XFormsContainingDocument(XFormsState xformsState, boolean disableUpdates) { super(); // 1. Restore the static state { final scala.Option<String> staticStateDigest = xformsState.staticStateDigest(); if (staticStateDigest.isDefined()) { final XFormsStaticState cachedState = XFormsStaticStateCache.instance().getDocument(staticStateDigest.get()); if (cachedState != null) { // Found static state in cache indentedLogger().logDebug("", "found static state by digest in cache"); this.staticState = cachedState; } else { // Not found static state in cache, create static state from input indentedLogger().logDebug("", "did not find static state by digest in cache"); indentedLogger().startHandleOperation("initialization", "restoring static state"); this.staticState = XFormsStaticStateImpl.restore(staticStateDigest, xformsState.staticState()); indentedLogger().endHandleOperation(); // Store in cache XFormsStaticStateCache.instance().storeDocument(this.staticState); } assert this.staticState.isServerStateHandling(); } else { // Not digest provided, create static state from input indentedLogger().logDebug("", "did not find static state by digest in cache"); this.staticState = XFormsStaticStateImpl.restore(staticStateDigest, xformsState.staticState()); assert this.staticState.isClientStateHandling(); } this.staticOps = new StaticStateGlobalOps(staticState.topLevelPart()); this.xpathDependencies = Version.instance().createUIDependencies(this); this.supportUpdates = !disableUpdates && !isNoUpdates(); } // 2. Restore the dynamic state indentedLogger().startHandleOperation("initialization", "restoring containing document"); try { restoreDynamicState(xformsState.dynamicState()); } catch (Exception e) { throw OrbeonLocationException.wrapException( e, new ExtendedLocationData(null, "re-initializing XForms containing document")); } indentedLogger().endHandleOperation(); }
@Before public void setUp() throws Exception { zookeeper = new EmbeddedZookeeper(); zkConnect = String.format("127.0.0.1:%d", zookeeper.port()); zkUtils = ZkUtils.apply( zkConnect, zkSessionTimeout, zkConnectionTimeout, JaasUtils.isZkSecurityEnabled()); zkClient = zkUtils.zkClient(); configs = new Vector<>(); servers = new Vector<>(); for (int i = 0; i < numBrokers; i++) { final Option<java.io.File> noFile = scala.Option.apply(null); final Option<SecurityProtocol> noInterBrokerSecurityProtocol = scala.Option.apply(null); Properties props = TestUtils.createBrokerConfig( i, zkConnect, false, false, TestUtils.RandomPort(), noInterBrokerSecurityProtocol, noFile, true, false, TestUtils.RandomPort(), false, TestUtils.RandomPort(), false, TestUtils.RandomPort()); props.setProperty("auto.create.topics.enable", "true"); props.setProperty("num.partitions", "1"); // We *must* override this to use the port we allocated (Kafka currently allocates one port // that it always uses for ZK props.setProperty("zookeeper.connect", this.zkConnect); KafkaConfig config = new KafkaConfig(props); configs.add(config); KafkaServer server = TestUtils.createServer(config, SystemTime$.MODULE$); servers.add(server); } brokerList = TestUtils.getBrokerListStrFromServers( JavaConversions.asScalaBuffer(servers), SecurityProtocol.PLAINTEXT); if (setupRestApp) { restApp = new RestApp(choosePort(), zkConnect, KAFKASTORE_TOPIC, compatibilityType); restApp.start(); } }
public List<Date> build() { DateRangeBuilder builder = new DateRangeBuilder( from, to, duration, holiday, Option.apply(loc), scalaList(shifters), selector, Option.apply(customDayOfMonth), Option.apply(customDayOfYear)); return builder.javaList(); }
public SakaiGraderResultFolder(String path) throws NotValidResultFolderException, NotValidDownloadFolderException { File topFolder = new File(path); folder = new File(topFolder, MergingEnvironment.get().getAssignmentName()); if (!(folder.exists() && folder.isDirectory())) { throw new NotValidResultFolderException("Missing assignment folder: " + path); } spreadsheetFile = new File(folder, "grades.xlsx"); Option<SakaiBulkDownloadFolder> bulkDownloadOption = findBulkDownloadFolder(); if (bulkDownloadOption.isEmpty()) { throw new NotValidResultFolderException("Missing a bulk download folder"); } bulkDownloadFolder = bulkDownloadOption.get(); }
private static ValidationResult checkCompilerLibrary(LibraryDescriptor descriptor) { if (descriptor == null || descriptor.data().isEmpty()) return ValidationResult.OK; String libraryName = "Compiler library"; CompilerLibraryData compilerLibraryData = (CompilerLibraryData) descriptor.data().get(); Option<String> compilerLibraryProblem = compilerLibraryData.problem(); if (compilerLibraryProblem.isDefined()) return new ValidationResult(libraryName + ": " + compilerLibraryProblem.get()); return ValidationResult.OK; }
@Override public List<String> getNewMessages() { if (!isConnected) { throw new IllegalStateException("The cluster has been connected to the ApplicationMaster."); } if (hasBeenStopped()) { throw new RuntimeException("The FlinkYarnCluster has already been stopped"); } List<String> ret = new ArrayList<String>(); // get messages from ApplicationClient (locally) while (true) { Object result = null; try { Future<Object> response = Patterns.ask( applicationClient, Messages.getLocalGetYarnMessage(), new Timeout(akkaDuration)); result = Await.result(response, akkaDuration); } catch (Exception ioe) { LOG.warn("Error retrieving the YARN messages locally", ioe); break; } if (!(result instanceof Option)) { throw new RuntimeException( "LocalGetYarnMessage requires a response of type " + "Option. Instead the response is of type " + result.getClass() + "."); } else { Option messageOption = (Option) result; LOG.debug("Received message option {}", messageOption); if (messageOption.isEmpty()) { break; } else { Object obj = messageOption.get(); if (obj instanceof Messages.YarnMessage) { Messages.YarnMessage msg = (Messages.YarnMessage) obj; ret.add("[" + msg.date() + "] " + msg.message()); } else { LOG.warn("LocalGetYarnMessage returned unexpected type: " + messageOption); } } } } return ret; }
@BeforeClass public static void setupJobManager() { Configuration config = new Configuration(); int port = NetUtils.getAvailablePort(); config.setString(ConfigConstants.JOB_MANAGER_IPC_ADDRESS_KEY, "localhost"); config.setInteger(ConfigConstants.JOB_MANAGER_IPC_PORT_KEY, port); scala.Option<Tuple2<String, Object>> listeningAddress = scala.Option.apply(new Tuple2<String, Object>("localhost", port)); jobManagerSystem = AkkaUtils.createActorSystem(config, listeningAddress); ActorRef jobManagerActorRef = JobManager.startJobManagerActors( config, jobManagerSystem, StreamingMode.BATCH_ONLY, JobManager.class, MemoryArchivist.class) ._1(); try { LeaderRetrievalService lrs = LeaderRetrievalUtils.createLeaderRetrievalService(config); jmGateway = LeaderRetrievalUtils.retrieveLeaderGateway(lrs, jobManagerSystem, timeout); } catch (Exception e) { fail("Could not retrieve the JobManager gateway. " + e.getMessage()); } }
public UnsafeShuffleWriter( BlockManager blockManager, IndexShuffleBlockResolver shuffleBlockResolver, TaskMemoryManager memoryManager, ShuffleMemoryManager shuffleMemoryManager, UnsafeShuffleHandle<K, V> handle, int mapId, TaskContext taskContext, SparkConf sparkConf) throws IOException { final int numPartitions = handle.dependency().partitioner().numPartitions(); if (numPartitions > UnsafeShuffleManager.MAX_SHUFFLE_OUTPUT_PARTITIONS()) { throw new IllegalArgumentException( "UnsafeShuffleWriter can only be used for shuffles with at most " + UnsafeShuffleManager.MAX_SHUFFLE_OUTPUT_PARTITIONS() + " reduce partitions"); } this.blockManager = blockManager; this.shuffleBlockResolver = shuffleBlockResolver; this.memoryManager = memoryManager; this.shuffleMemoryManager = shuffleMemoryManager; this.mapId = mapId; final ShuffleDependency<K, V, V> dep = handle.dependency(); this.shuffleId = dep.shuffleId(); this.serializer = Serializer.getSerializer(dep.serializer()).newInstance(); this.partitioner = dep.partitioner(); this.writeMetrics = new ShuffleWriteMetrics(); taskContext.taskMetrics().shuffleWriteMetrics_$eq(Option.apply(writeMetrics)); this.taskContext = taskContext; this.sparkConf = sparkConf; this.transferToEnabled = sparkConf.getBoolean("spark.file.transferTo", true); open(); }
public OperatorStatus onInputOrParameterChange( Map<String, TabularSchema> inputSchemas, OperatorParameters params, OperatorSchemaManager operatorSchemaManager) { this.updateOutputSchema(inputSchemas, params, operatorSchemaManager); scala.Option<String> msg = Option.empty(); return new OperatorStatus(true, msg); }
public String buildRuntimeErrorMessage( Instruction instruction, Throwable throwable, String message) { if (throwable instanceof EngineException && ((EngineException) throwable).cachedRuntimeErrorMessage().isDefined()) { return ((EngineException) throwable).cachedRuntimeErrorMessage().get(); } return StackTraceBuilder.build( activation, agent, instruction, scala.Option.apply(throwable), message); }
@Override public Option<OperatorCheckpointStats> getCheckpointStats() { CheckpointStatsTracker tracker = getGraph().getCheckpointStatsTracker(); if (tracker == null) { return Option.empty(); } else { return tracker.getOperatorStats(getJobVertexId()); } }
@Override public TestCaseResult test(Project project, boolean autoGrade) throws NotAutomatableException, NotGradableException { // There should be a setter (editable) for the command if (project.getClassesManager().isEmpty()) throw new NotGradableException(); Option<ClassDescription> classDescription = new RootTagFinder(project).findClass("Command Interpreter"); if (classDescription.isEmpty()) { if (autoGrade) throw new NotAutomatableException(); classDescription = ManualClassFinder.find(project, "Command Interpreter"); } Class<?> _class = classDescription.get().getJavaClass(); Method[] methods = _class.getMethods(); for (Method method : methods) { if (method.getName().startsWith("set")) return pass(autoGrade); } return fail("Couldn't find an editable property", autoGrade); }
/** * Convertit un objet User (modèle) en objet SocialUser/Identity * * @param user * @return */ private Identity userToIdentity(User user) { UserId userId; String firstName = null; String lastName = null; if (user.authenticationMethod.equals("userPassword")) { userId = new UserId(user.email, "userPassword"); } else { userId = new UserId(user.credentials.extUserId, user.credentials.providerId); firstName = user.credentials.firstName; lastName = user.credentials.lastName; } OAuth1Info oAuth1 = null; OAuth2Info oAuth2 = null; PasswordInfo passwordInfo = null; if (user.authenticationMethod.equals("oauth1")) { oAuth1 = new OAuth1Info(user.credentials.oAuth1Token, user.credentials.oAuth1Secret); } else if (user.authenticationMethod.equals("oauth2")) { oAuth2 = new OAuth2Info( user.credentials.oAuth2AccessToken, Option.apply(user.credentials.oAuth2TokenType), Option.apply((Object) user.credentials.oAuth2ExpiresIn), Option.apply(user.credentials.oAuth2RefreshToken)); } else if (user.authenticationMethod.equals("userPassword")) { passwordInfo = new PasswordInfo( user.credentials.passwordHasher, user.credentials.password, Option.apply(user.credentials.passwordSalt)); } SocialUser socialUser = new SocialUser( userId, firstName, lastName, user.fullname, Option.apply(user.email), Option.apply(user.avatar), new AuthenticationMethod(user.authenticationMethod), Option.apply(oAuth1), Option.apply(oAuth2), Option.apply(passwordInfo)); return (Identity) socialUser; }
private XFormsContainingDocument createDocumentFromStore( RequestParameters parameters, boolean isInitialState, boolean disableUpdates) { final boolean isServerState = parameters.getEncodedClientStaticState() == null; final XFormsState xformsState; if (isServerState) { // State must be found by UUID in the store final ExternalContext externalContext = NetUtils.getExternalContext(); final XFormsStateStore stateStore = XFormsStateStoreFactory.instance(externalContext); if (indentedLogger.isDebugEnabled()) indentedLogger.logDebug( LOG_TYPE, "Getting document state from store.", "current cache size", Integer.toString(XFormsDocumentCache.instance().getCurrentSize()), "current store size", Long.toString(stateStore.getCurrentSize()), "max store size", Long.toString(stateStore.getMaxSize())); final ExternalContext.Session session = externalContext.getRequest().getSession(XFormsStateManager.FORCE_SESSION_CREATION); xformsState = stateStore.findState(session, parameters.getUUID(), isInitialState); if (xformsState == null) { // 2014-11-12: This means that 1. We had a valid incoming session and 2. we obtained a lock // on the // document, yet we didn't find it. This means that somehow state was not placed into or // expired from // the state store. throw new SessionExpiredException( "Unable to retrieve XForms engine state. Unable to process incoming request."); } } else { // State comes directly with request xformsState = new XFormsState( scala.Option.<String>apply(null), parameters.getEncodedClientStaticState(), DynamicState.apply(parameters.getEncodedClientDynamicState())); } // Create document final XFormsContainingDocument document = new XFormsContainingDocument(xformsState, disableUpdates); assert isServerState ? document.getStaticState().isServerStateHandling() : document.getStaticState().isClientStateHandling(); return document; }
@Override public File compile(File templateFile) throws TemplateCompilationException { File result = null; String fileName = templateFile.getName(); String ext = fileName.substring(fileName.lastIndexOf('.') + 1); String importsAsString = getImportsAsString(ext); int index = getTemplateExtIndex(ext); if (index >= 0) { String formatterType = formatterTypes[index]; try { Option<File> resultOption = ScalaTemplateCompiler.compile( templateFile, sourceDirectory, outputDirectory, formatterType, importsAsString); result = resultOption.isDefined() ? resultOption.get() : null; } catch (TemplateCompilationError e) { throw new TemplateCompilationException(e.source(), e.message(), e.line(), e.column()); } } return result; }
/** * Create an BuildDocHandler that serves the manual from a given directory by wrapping a * FilesystemRepository, and the API docs from a given JAR file by wrapping a JarRepository. * * @param directory The directory to serve the documentation from. * @param jarFile The JAR file to server the documentation from. * @param base The directory within the JAR file to serve the documentation from, or null if the * documentation should be served from the root of the JAR. * @param fallbackToJar Whether the doc handler should fall back to the jar repo for docs. */ public static BuildDocHandler fromDirectoryAndJar( File directory, JarFile jarFile, String base, boolean fallbackToJar) { FileRepository fileRepo = new FilesystemRepository(directory); FileRepository jarRepo = new JarRepository(jarFile, Option.apply(base)); FileRepository manualRepo; if (fallbackToJar) { manualRepo = new AggregateFileRepository(new FileRepository[] {fileRepo, jarRepo}); } else { manualRepo = fileRepo; } return new DocumentationHandler(manualRepo, jarRepo); }
// #http-context-creation public static HttpsContext create( SSLContext sslContext, Optional<Collection<String>> enabledCipherSuites, Optional<Collection<String>> enabledProtocols, Optional<ClientAuth> clientAuth, Optional<SSLParameters> sslParameters) // #http-context-creation { final scala.Option<scala.collection.immutable.Seq<String>> ecs; if (enabledCipherSuites.isPresent()) ecs = scala.Option.apply(Util.immutableSeq(enabledCipherSuites.get())); else ecs = scala.Option.empty(); final scala.Option<scala.collection.immutable.Seq<String>> ep; if (enabledProtocols.isPresent()) ep = scala.Option.apply(Util.immutableSeq(enabledProtocols.get())); else ep = scala.Option.empty(); return new akka.http.scaladsl.HttpsContext( sslContext, ecs, ep, OptionConverters.toScala(clientAuth), OptionConverters.toScala(sslParameters)); }
/** Called when connected to Mesos as a new framework. */ private void registered(Registered message) { connectionMonitor.tell(message, self()); try { workerStore.setFrameworkID(Option.apply(message.frameworkId())); } catch (Exception ex) { fatalError("unable to store the assigned framework ID", ex); return; } launchCoordinator.tell(message, self()); reconciliationCoordinator.tell(message, self()); taskRouter.tell(message, self()); }
@Override public void triggerModelUpdate() { if (starting) { final Option<ContainerRoot> modelOption = NodeNetworkHelper.updateModelWithNetworkProperty(this); if (modelOption.isDefined()) { new Thread() { public void run() { getModelService().unregisterModelListener(getModelListener()); getModelService().atomicUpdateModel(modelOption.get()); getModelService().registerModelListener(getModelListener()); } }.start(); } starting = false; } else { Group group = getModelElement(); for (ContainerNode subNode : group.getSubNodesForJ()) { if (!subNode.getName().equals(this.getNodeName())) { internalPush(getModelService().getLastModel(), subNode.getName(), this.getNodeName()); } } } }
@Override protected void initialize() throws Exception { LOG.info("Initializing Mesos resource master"); workerStore.start(); // create the scheduler driver to communicate with Mesos schedulerCallbackHandler = new SchedulerProxy(self()); // register with Mesos FrameworkInfo.Builder frameworkInfo = mesosConfig.frameworkInfo().clone().setCheckpoint(true); Option<Protos.FrameworkID> frameworkID = workerStore.getFrameworkID(); if (frameworkID.isEmpty()) { LOG.info("Registering as new framework."); } else { LOG.info( "Recovery scenario: re-registering using framework ID {}.", frameworkID.get().getValue()); frameworkInfo.setId(frameworkID.get()); } MesosConfiguration initializedMesosConfig = mesosConfig.withFrameworkInfo(frameworkInfo); MesosConfiguration.logMesosConfig(LOG, initializedMesosConfig); schedulerDriver = initializedMesosConfig.createDriver(schedulerCallbackHandler, false); // create supporting actors connectionMonitor = createConnectionMonitor(); launchCoordinator = createLaunchCoordinator(); reconciliationCoordinator = createReconciliationCoordinator(); taskRouter = createTaskRouter(); recoverWorkers(); connectionMonitor.tell(new ConnectionMonitor.Start(), self()); schedulerDriver.start(); }
public String cssText2sldText(String css) { try { GeoServerDataDirectory datadir = new GeoServerDataDirectory(getCatalog().getResourceLoader()); File styleDir = datadir.findStyleDir(); scala.collection.Seq<org.geoscript.geocss.Rule> rules = CssParser.parse(css).get(); Translator translator = new Translator(scala.Option.apply(styleDir.toURI().toURL())); Style style = translator.css2sld(rules); SLDTransformer tx = new org.geotools.styling.SLDTransformer(); tx.setIndentation(2); StringWriter sldChars = new java.io.StringWriter(); System.out.println(sldChars.toString()); tx.transform(style, sldChars); return sldChars.toString(); } catch (Exception e) { throw new WicketRuntimeException("Error while parsing stylesheet [" + css + "] : " + e); } }