public void fixInconsistent() throws IOException { if (ifFix == true) { for (String segFullName : inconsistentHTables) { String[] sepNameList = segFullName.split(","); HTableDescriptor desc = hbaseAdmin.getTableDescriptor(TableName.valueOf(sepNameList[0])); logger.info( "Change the host of htable " + sepNameList[0] + "belonging to cube " + sepNameList[1] + " from " + desc.getValue(IRealizationConstants.HTableTag) + " to " + dstCfg.getMetadataUrlPrefix()); hbaseAdmin.disableTable(sepNameList[0]); desc.setValue(IRealizationConstants.HTableTag, dstCfg.getMetadataUrlPrefix()); hbaseAdmin.modifyTable(sepNameList[0], desc); hbaseAdmin.enableTable(sepNameList[0]); } } else { logger.info("------ Inconsistent HTables Needed To Be Fixed ------"); for (String hTable : inconsistentHTables) { String[] sepNameList = hTable.split(","); logger.info(sepNameList[0] + " belonging to cube " + sepNameList[1]); } logger.info("----------------------------------------------------"); } }
/* * (non-Javadoc) * * @see * org.springframework.beans.factory.InitializingBean#afterPropertiesSet() */ @Override public void afterPropertiesSet() throws Exception { String timeZone = jobService.getKylinConfig().getTimeZone(); TimeZone tzone = TimeZone.getTimeZone(timeZone); TimeZone.setDefault(tzone); final KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv(); String serverMode = kylinConfig.getServerMode(); if (Constant.SERVER_MODE_JOB.equals(serverMode.toLowerCase()) || Constant.SERVER_MODE_ALL.equals(serverMode.toLowerCase())) { logger.info("Initializing Job Engine ...."); new Thread( new Runnable() { @Override public void run() { try { DefaultScheduler scheduler = DefaultScheduler.getInstance(); scheduler.init(new JobEngineConfig(kylinConfig), new ZookeeperJobLock()); if (!scheduler.hasStarted()) { logger.error("scheduler has not been started"); System.exit(1); } } catch (Exception e) { throw new RuntimeException(e); } } }) .start(); } }
@Test public void testMRConfigOverride() { KylinConfig config = KylinConfig.getInstanceFromEnv(); Map<String, String> override = config.getMRConfigOverride(); assertEquals(2, override.size()); assertEquals("test1", override.get("test1")); assertEquals("test2", override.get("test2")); }
public static void main(String[] args) throws ParseException, IOException { OptionsHelper optionsHelper = new OptionsHelper(); Options options = new Options(); options.addOption(OPTION_FIX); options.addOption(OPTION_DST_CFG_URI); options.addOption(OPTION_CUBE); boolean ifFix = false; String dstCfgUri; String cubeName; logger.info("jobs args: " + Arrays.toString(args)); try { optionsHelper.parseOptions(options, args); logger.info("options: '" + options.toString() + "'"); logger.info("option value 'fix': '" + optionsHelper.getOptionValue(OPTION_FIX) + "'"); ifFix = Boolean.parseBoolean(optionsHelper.getOptionValue(OPTION_FIX)); logger.info( "option value 'dstCfgUri': '" + optionsHelper.getOptionValue(OPTION_DST_CFG_URI) + "'"); dstCfgUri = optionsHelper.getOptionValue(OPTION_DST_CFG_URI); logger.info("option value 'cube': '" + optionsHelper.getOptionValue(OPTION_CUBE) + "'"); cubeName = optionsHelper.getOptionValue(OPTION_CUBE); } catch (ParseException e) { optionsHelper.printUsage(CubeMigrationCheckCLI.class.getName(), options); throw e; } KylinConfig kylinConfig; if (dstCfgUri == null) { kylinConfig = KylinConfig.getInstanceFromEnv(); } else { kylinConfig = KylinConfig.createInstanceFromUri(dstCfgUri); } CubeMigrationCheckCLI checkCLI = new CubeMigrationCheckCLI(kylinConfig, ifFix); checkCLI.execute(cubeName); }
public QueryService() { String metadataUrl = KylinConfig.getInstanceFromEnv().getMetadataUrl(); // split TABLE@HBASE_URL int cut = metadataUrl.indexOf('@'); tableNameBase = cut < 0 ? DEFAULT_TABLE_PREFIX : metadataUrl.substring(0, cut); hbaseUrl = cut < 0 ? metadataUrl : metadataUrl.substring(cut + 1); userTableName = tableNameBase + USER_TABLE_NAME; badQueryDetector.start(); }
/** The PasswordPlaceholderConfigurer will read Kylin properties as the Spring resource */ public PasswordPlaceholderConfigurer() { Resource[] resources = new Resource[1]; Properties prop = KylinConfig.getKylinProperties(); StringWriter writer = new StringWriter(); prop.list(new PrintWriter(writer)); String propString = writer.getBuffer().toString(); IOUtils.closeQuietly(writer); InputStream is = IOUtils.toInputStream(propString, Charset.defaultCharset()); resources[0] = new InputStreamResource(is); this.setLocations(resources); }
@Before public void setUp() throws Exception { this.createTestMetadata(); CubeManager cubeMgr = CubeManager.getInstance(getTestConfig()); cube = cubeMgr.getCube("test_kylin_cube_without_slr_left_join_empty"); Assert.assertNotNull(cube); storageEngine = StorageFactory.createQuery(cube); String url = KylinConfig.getInstanceFromEnv().getStorageUrl(); context = new StorageContext(); context.setConnUrl(url); mockup = new StorageMockUtils(cube.getModel()); }
@Override public int run(String[] args) throws Exception { Options options = new Options(); try { options.addOption(OPTION_INPUT_PATH); options.addOption(OPTION_HTABLE_NAME); options.addOption(OPTION_CUBE_NAME); parseOptions(options, args); String tableName = getOptionValue(OPTION_HTABLE_NAME).toUpperCase(); // e.g // /tmp/kylin-3f150b00-3332-41ca-9d3d-652f67f044d7/test_kylin_cube_with_slr_ready_2_segments/hfile/ // end with "/" String input = getOptionValue(OPTION_INPUT_PATH); Configuration conf = HBaseConfiguration.create(getConf()); FileSystem fs = FileSystem.get(conf); String cubeName = getOptionValue(OPTION_CUBE_NAME).toUpperCase(); KylinConfig config = KylinConfig.getInstanceFromEnv(); CubeManager cubeMgr = CubeManager.getInstance(config); CubeInstance cube = cubeMgr.getCube(cubeName); CubeDesc cubeDesc = cube.getDescriptor(); FsPermission permission = new FsPermission((short) 0777); for (HBaseColumnFamilyDesc cf : cubeDesc.getHBaseMapping().getColumnFamily()) { String cfName = cf.getName(); fs.setPermission(new Path(input + cfName), permission); } String[] newArgs = new String[2]; newArgs[0] = input; newArgs[1] = tableName; log.debug("Start to run LoadIncrementalHFiles"); int ret = ToolRunner.run(new LoadIncrementalHFiles(conf), newArgs); log.debug("End to run LoadIncrementalHFiles"); return ret; } catch (Exception e) { printUsage(options); throw e; } }
public void check(List<String> segFullNameList) { issueExistHTables = Lists.newArrayList(); inconsistentHTables = Lists.newArrayList(); for (String segFullName : segFullNameList) { String[] sepNameList = segFullName.split(","); try { HTableDescriptor hTableDescriptor = hbaseAdmin.getTableDescriptor(TableName.valueOf(sepNameList[0])); String host = hTableDescriptor.getValue(IRealizationConstants.HTableTag); if (!dstCfg.getMetadataUrlPrefix().equalsIgnoreCase(host)) { inconsistentHTables.add(segFullName); } } catch (IOException e) { issueExistHTables.add(segFullName); continue; } } }
public HBaseResourceStore(KylinConfig kylinConfig) throws IOException { super(kylinConfig); String metadataUrl = kylinConfig.getMetadataUrl(); // split TABLE@HBASE_URL int cut = metadataUrl.indexOf('@'); tableNameBase = cut < 0 ? DEFAULT_TABLE_NAME : metadataUrl.substring(0, cut); hbaseUrl = cut < 0 ? metadataUrl : metadataUrl.substring(cut + 1); createHTableIfNeeded(getAllInOneTableName()); // tableNameMap = new LinkedHashMap<String, String>(); // for (Entry<String, String> entry : TABLE_SUFFIX_MAP.entrySet()) { // String pathPrefix = entry.getKey(); // String tableName = tableNameBase + entry.getValue(); // tableNameMap.put(pathPrefix, tableName); // createHTableIfNeeded(tableName); // } }
@Before public void before() throws Exception { HBaseMetadataTestCase.staticCreateTestMetadata(AbstractKylinTestCase.SANDBOX_TEST_DATA); DeployUtil.initCliWorkDir(); DeployUtil.deployMetadata(); DeployUtil.overrideJobJarLocations(); final KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv(); jobService = ExecutableManager.getInstance(kylinConfig); scheduler = DefaultScheduler.getInstance(); scheduler.init(new JobEngineConfig(kylinConfig), new ZookeeperJobLock()); if (!scheduler.hasStarted()) { throw new RuntimeException("scheduler has not been started"); } cubeManager = CubeManager.getInstance(kylinConfig); jobEngineConfig = new JobEngineConfig(kylinConfig); for (String jobId : jobService.getAllJobIds()) { if (jobService.getJob(jobId) instanceof CubingJob) { jobService.deleteJob(jobId); } } }
/** * create Streaming Schema * * @throws java.io.IOException */ @RequestMapping( value = "", method = {RequestMethod.POST}) @ResponseBody public StreamingRequest saveStreamingConfig(@RequestBody StreamingRequest streamingRequest) { String project = streamingRequest.getProject(); TableDesc tableDesc = deserializeTableDesc(streamingRequest); StreamingConfig streamingConfig = deserializeSchemalDesc(streamingRequest); KafkaConfig kafkaConfig = deserializeKafkaSchemalDesc(streamingRequest); boolean saveStreamingSuccess = false, saveKafkaSuccess = false; try { tableDesc.setUuid(UUID.randomUUID().toString()); MetadataManager metaMgr = MetadataManager.getInstance(KylinConfig.getInstanceFromEnv()); metaMgr.saveSourceTable(tableDesc); cubeMgmtService.syncTableToProject(new String[] {tableDesc.getIdentity()}, project); } catch (IOException e) { throw new BadRequestException("Failed to add streaming table."); } streamingConfig.setName(tableDesc.getIdentity()); kafkaConfig.setName(tableDesc.getIdentity()); try { if (StringUtils.isEmpty(streamingConfig.getName())) { logger.info("StreamingConfig should not be empty."); throw new BadRequestException("StremingConfig name should not be empty."); } try { streamingConfig.setUuid(UUID.randomUUID().toString()); streamingService.createStreamingConfig(streamingConfig); saveStreamingSuccess = true; } catch (IOException e) { logger.error("Failed to save StreamingConfig:" + e.getLocalizedMessage(), e); throw new InternalErrorException( "Failed to save StreamingConfig: " + e.getLocalizedMessage()); } try { kafkaConfig.setUuid(UUID.randomUUID().toString()); kafkaConfigService.createKafkaConfig(kafkaConfig); saveKafkaSuccess = true; } catch (IOException e) { try { streamingService.dropStreamingConfig(streamingConfig); } catch (IOException e1) { throw new InternalErrorException( "StreamingConfig is created, but failed to create KafkaConfig: " + e.getLocalizedMessage()); } logger.error("Failed to save KafkaConfig:" + e.getLocalizedMessage(), e); throw new InternalErrorException("Failed to save KafkaConfig: " + e.getLocalizedMessage()); } } finally { if (saveKafkaSuccess == false || saveStreamingSuccess == false) { if (saveStreamingSuccess == true) { StreamingConfig sConfig = streamingService.getStreamingManager().getStreamingConfig(streamingConfig.getName()); try { streamingService.dropStreamingConfig(sConfig); } catch (IOException e) { throw new InternalErrorException( "Action failed and failed to rollback the created streaming config: " + e.getLocalizedMessage()); } } if (saveKafkaSuccess == true) { try { KafkaConfig kConfig = kafkaConfigService.getKafkaConfig(kafkaConfig.getName()); kafkaConfigService.dropKafkaConfig(kConfig); } catch (IOException e) { throw new InternalErrorException( "Action failed and failed to rollback the created kafka config: " + e.getLocalizedMessage()); } } } } streamingRequest.setSuccessful(true); return streamingRequest; }
@Before public void setUp() throws Exception { this.createTestMetadata(); config = KylinConfig.getInstanceFromEnv(); metadataManager = MetadataManager.getInstance(config); }