/** * Run the recovery manager and observe recovery of the transaction. * * @throws Exception */ private static void recoverTransaction() throws Exception { ConfigParticipantRecordTypeMap map = new ConfigParticipantRecordTypeMap(); RecordTypeManager.manager().add(map); RecoveryManager recoveryManager = RecoverySetup.getAndConfigureRecoveryManager(); recoveryManager.scan(); System.out.println("'child-config' value = " + ConfigService.getCommittedValue("child-config")); System.out.println( "'parent-config' value = " + ConfigService.getCommittedValue("parent-config")); recoveryManager.terminate(); }
private boolean isInvalidPicture(TerminalType terminalType, int imgWidth, int imgHeight) { String checkUploadImage = configService.getByName(ConfigConstant.CHECK_UPLOAD_IMAGE_SPECIFICATION); if ("true".equals(checkUploadImage)) { return imgWidth != terminalType.getPicWidth() || imgHeight != terminalType.getPicHeight(); } return false; }
@Override @CoreDataModificationStatus( modificationType = ModificationType.UPDATE, entityClass = OrgPartDO.class) public void deleteIfExists(ServiceContext context, Long orgId) { OrgPartDO orgPartDO = null; ScopeDO scopeDO = null; try { orgPartDO = orgPartDAO.findOrgPart(orgId, context.getScopeId()); scopeDO = scopeDAO.getById(context.getScopeId()); if (orgPartDO != null) { List<OrgPartDO> descendants = orgPartDAO.findParticipatingDescendantOrgParts( orgPartDO.getScope().getScopeId(), orgPartDO.getOrg().getOrgId()); if (descendants != null && descendants.size() > 0) { if (!configService.isBooleanActive( context, scopeDO.getScopeId(), ConfigService.ORG_PART_DESCENDANT_CASCADE_DELETE)) { FaultInfo faultInfo = new FaultInfo(); String errorMessage = messageSource.getMessage("validation.orgPart.childOrgsParticipating", null, null); faultInfo.setMessage(errorMessage); faultInfo.setAttributeErrors(Lists.<ValidationError>newArrayList()); throw new ValidationServiceException(errorMessage, faultInfo); } List<Long> orgPartIds = Lists.newArrayList(); for (OrgPartDO descendantDO : descendants) { orgPartIds.add(descendantDO.getOrgPartId()); } try { orgPartDAO.deleteOrgParts(orgPartIds); } catch (Exception e) { // safe to catch all exceptions // here because any failure is // treated the same. FaultInfo faultInfo = new FaultInfo(); String errorMessage = messageSource.getMessage("validation.orgPart.childOrgsParticipating", null, null); faultInfo.setMessage(errorMessage); faultInfo.setAttributeErrors(Lists.<ValidationError>newArrayList()); throw new ValidationServiceException(errorMessage, faultInfo); } } } } catch (EmptyResultDataAccessException e) { // no action to take. } }
private OrgPart addOrUpdate( ServiceContext context, ScopeDO scopeDO, Long orgId, Map<String, String> exts) { OrgDO orgDO = orgDAO.getById(orgId); if (!scopeDO.getScopeType().isAllowOrgPart()) { throw new ServiceException( messageSource.getMessage("validation.scope.orgPartNotAllowed", null, null)); } if (orgDO == null) { throw new ServiceException(messageSource.getMessage("validation.org.required", null, null)); } OrgPartDO orgPartDO = null; try { orgPartDO = orgPartDAO.findOrgPart(orgId, scopeDO.getScopeId()); if (orgPartDO != null) { orgPartDO.setExtAttributes(exts); } } catch (EmptyResultDataAccessException e) { orgPartDO = null; } if (orgPartDO == null) { orgPartDO = new OrgPartDO(); orgPartDO.setOrg(orgDO); orgPartDO.setScope(scopeDO); orgPartDO.setExtAttributes(exts); orgPartDAO.persist(orgPartDO); } storeExtFields( context, orgPartDO, orgPartExtDAO, EntityTypeCode.ORG_PART, orgPartDO.getScope().getScopeId()); if (configService.isBooleanActive( context, scopeDO.getScopeId(), ConfigService.ORG_PART_DESCENDANT_CASCADE_ADD)) { orgPartDAO.createOrgPartsForDescendants(scopeDO.getScopeId(), orgDO.getOrgId()); } return getMappingService().map(orgPartDO); }
/** * Run the transaction and simulate a crash during commit. * * @throws Exception */ private static void runTransaction() throws Exception { RootTransaction ba1 = new RootTransaction(); ba1.begin(); ConfigService configService1 = new ConfigService("1"); ConfigService configService2 = new ConfigService("2", true); ba1.add(configService1.getParticipant()); ba1.add(configService2.getParticipant()); configService1.setNewValue("1", "newVal1"); configService2.setNewValue("2", "newVal2"); try { // Will fail when the ConfigService2 commits ba1.commit(); System.err.println("Was able to commit, but should have failed"); } catch (Error e) { System.out.println("Server simulated a crash, as expected"); } }
/** * Prepares cache of parsed filter configurations and search filters valid for actual request. * This method should be called as soon as {@link QuerySettings.Filters} is available. * * @param filters to use to prepare relevant parsed filter configurations into request scope * @throws java.lang.ReflectiveOperationException if filter field configuration file can not be * parsed correctly */ protected void prepareFiltersForRequest(QuerySettings.Filters filters) throws ReflectiveOperationException { semiParsedFilters = new LinkedHashMap<>(); searchFilters = new LinkedHashMap<>(); rangeFiltersIntervals = new LinkedHashMap<>(); if (filters != null && !filters.getFilterCandidatesKeys().isEmpty()) { Map<String, Object> filtersConfig = configService.get(ConfigService.CFGNAME_SEARCH_FULLTEXT_FILTER_FIELDS); if (filtersConfig == null || filtersConfig.isEmpty()) { if (log.isLoggable(Level.FINEST)) { log.log( Level.FINEST, "Configuration document [" + ConfigService.CFGNAME_SEARCH_FULLTEXT_FILTER_FIELDS + "] not found or is empty! This might be a bug."); } return; } // collect parsed filter configurations that are relevant to filters required by client for (String filterCandidateKey : filters.getFilterCandidatesKeys()) { if (filtersConfig.containsKey(filterCandidateKey)) { // get filter types for filterCandidateKey and check all types are the same Object filterConfig = filtersConfig.get(filterCandidateKey); // TODO search filter configuration - cache it SemiParsedFilterConfig parsedFilterConfig = ConfigParseUtil.parseFilterType(filterConfig, filterCandidateKey); semiParsedFilters.put(filterCandidateKey, parsedFilterConfig); } } // iterate over all collected filters and drop those that are suppressed for (String filterName : semiParsedFilters.keySet().toArray(new String[semiParsedFilters.size()])) { // parsed filters could have been removed in the meantime so we check if it is still present if (semiParsedFilters.containsKey(filterName)) { SemiParsedFilterConfig parsedFilterConfig = semiParsedFilters.get(filterName); if (parsedFilterConfig instanceof SemiParsedFilterConfigSupportSuppressed) { List<String> suppressed = ((SemiParsedFilterConfigSupportSuppressed) parsedFilterConfig).getSuppressed(); if (suppressed != null) { for (String suppress : suppressed) { if (semiParsedFilters.containsKey(suppress)) { semiParsedFilters.remove(suppress); } } } } } } // iterate over filters for (SemiParsedFilterConfig filterConfig : semiParsedFilters.values()) { // terms filter if (filterConfig instanceof SemiParsedTermsFilterConfig) { SemiParsedTermsFilterConfig conf = (SemiParsedTermsFilterConfig) filterConfig; Set<String> fn = this.getFilterNamesForDocumentField(conf.getFieldName()); final List<String> filterValues = new ArrayList<>(filters.getFilterCandidateValues(fn)); if (!filterValues.isEmpty()) { // handle <_lowercase> if specified if (conf.isLowercase()) { for (int i = 0; i < filterValues.size(); i++) { filterValues.set(i, filterValues.get(i).toLowerCase(Locale.ENGLISH)); } } TermsFilterBuilder tfb = new TermsFilterBuilder(conf.getFieldName(), filterValues); // handle terms filter <optional_settings> if (conf.getName() != null) { tfb.filterName(conf.getName()); } if (conf.isCache() != null) { tfb.cache(conf.isCache()); } if (conf.isCache() != null && conf.isCache() && conf.getCacheKey() != null) { tfb.cacheKey(conf.getCacheKey()); } // TODO handle tfb.execution() searchFilters.put(conf.getFieldName(), tfb); } // range filter } else if (filterConfig instanceof SemiParsedRangeFilterConfig) { SemiParsedRangeFilterConfig conf = (SemiParsedRangeFilterConfig) filterConfig; RangeFilterBuilder rfb; // check if there is already range filter for this document field if (searchFilters.containsKey(conf.getFieldName()) && searchFilters.get(conf.getFieldName()) instanceof RangeFilterBuilder) { // in this case we will be adding (or overriding) settings of existing filter rfb = (RangeFilterBuilder) searchFilters.get(conf.getFieldName()); } else { rfb = new RangeFilterBuilder(conf.getFieldName()); } final String filterValue = filters.getFirstValueForFilterCandidate(conf.getFilterName()); ParsableIntervalConfig interval = null; if (filterValue != null) { IntervalRange intervalRange = rangeFiltersIntervals.get(conf.getFieldName()); if (intervalRange == null) { intervalRange = new IntervalRange(); rangeFiltersIntervals.put(conf.getFieldName(), intervalRange); } // handle <_processor> if specified if (conf.getProcessor() != null) { Class<?> processorClass = Class.forName(conf.getProcessor()); if (!processorClass.isEnum()) { throw new RuntimeException( "Class [" + conf.getProcessor() + "] is not an enum type."); } // TODO: improve ParsableIntervalConfig design to make sure this method has to be // implemented Method m = processorClass.getMethod("parseRequestParameterValue", String.class); interval = (ParsableIntervalConfig) m.invoke(processorClass, filterValue); } if (conf.definesGte()) { if (interval != null) { DateTime gte = new DateTime(interval.getGteValue(System.currentTimeMillis())); rfb.gte(gte.toString(DATE_TIME_FORMATTER_UTC)); intervalRange.setGte(gte); } else { rfb.gte(filterValue); intervalRange.setGte(DATE_TIME_FORMATTER_UTC.parseDateTime(filterValue)); } } else if (conf.definesLte()) { if (interval != null) { DateTime lte = new DateTime(interval.getLteValue(System.currentTimeMillis())); rfb.lte(lte.toString(DATE_TIME_FORMATTER_UTC)); intervalRange.setLte(lte); } else { rfb.lte(filterValue); intervalRange.setLte(DATE_TIME_FORMATTER_UTC.parseDateTime(filterValue)); } } } // handle range filter <optional_settings> if (conf.getName() != null) { rfb.filterName(conf.getName()); } if (conf.isCache() != null) { rfb.cache(conf.isCache()); } if (conf.isCache() != null && conf.isCache() && conf.getCacheKey() != null) { rfb.cacheKey(conf.getCacheKey()); } searchFilters.put(conf.getFieldName(), rfb); } else { if (log.isLoggable(Level.FINE)) { log.log( Level.FINE, "Unsupported SemiParsedFilterConfig type: " + filterConfig.getClass().getName()); } } } } }