protected String[] buildOperand(Operand operand, Query query, Map entityAliasesMaps) { String[] operandElement; logger.debug("IN"); try { Assert.assertNotNull( operand, "Input parameter [operand] cannot be null in order to execute method [buildUserProvidedWhereField]"); operandElement = new String[] {""}; if (parentStatement.OPERAND_TYPE_STATIC.equalsIgnoreCase(operand.type)) { operandElement = buildStaticOperand(operand); } else if (parentStatement.OPERAND_TYPE_SUBQUERY.equalsIgnoreCase(operand.type)) { operandElement = new String[] {buildQueryOperand(operand)}; } else if (parentStatement.OPERAND_TYPE_SIMPLE_FIELD.equalsIgnoreCase(operand.type) || parentStatement.OPERAND_TYPE_INLINE_CALCULATED_FIELD.equalsIgnoreCase(operand.type)) { operandElement = new String[] {buildFieldOperand(operand, query, entityAliasesMaps)}; } else if (parentStatement.OPERAND_TYPE_PARENT_FIELD.equalsIgnoreCase(operand.type)) { operandElement = new String[] {buildParentFieldOperand(operand, query, entityAliasesMaps)}; } else { // NOTE: OPERAND_TYPE_CALCULATED_FIELD cannot be used in where condition Assert.assertUnreachable("Invalid operand type [" + operand.type + "]"); } } finally { logger.debug("OUT"); } return operandElement; }
protected String buildFieldOperand(Operand operand, Query query, Map entityAliasesMaps) { String operandElement; IModelField datamartField; IModelEntity rootEntity; String queryName; String rootEntityAlias; Map targetQueryEntityAliasesMap; logger.debug("IN"); try { targetQueryEntityAliasesMap = (Map) entityAliasesMaps.get(query.getId()); Assert.assertNotNull( targetQueryEntityAliasesMap, "Entity aliases map for query [" + query.getId() + "] cannot be null in order to execute method [buildUserProvidedWhereField]"); datamartField = parentStatement.getDataSource().getModelStructure().getField(operand.values[0]); Assert.assertNotNull( datamartField, "DataMart does not cantain a field named [" + operand.values[0] + "]"); Couple queryNameAndRoot = datamartField.getQueryName(); queryName = (String) queryNameAndRoot.getFirst(); logger.debug("select field query name [" + queryName + "]"); if (queryNameAndRoot.getSecond() != null) { rootEntity = (IModelEntity) queryNameAndRoot.getSecond(); } else { rootEntity = datamartField.getParent().getRoot(); } logger.debug("where field query name [" + queryName + "]"); logger.debug("where field root entity unique name [" + rootEntity.getUniqueName() + "]"); if (!targetQueryEntityAliasesMap.containsKey(rootEntity.getUniqueName())) { logger.debug("Entity [" + rootEntity.getUniqueName() + "] require a new alias"); rootEntityAlias = getEntityAlias(rootEntity, targetQueryEntityAliasesMap, entityAliasesMaps); logger.debug("A new alias has been generated [" + rootEntityAlias + "]"); } rootEntityAlias = (String) targetQueryEntityAliasesMap.get(rootEntity.getUniqueName()); logger.debug("where field root entity alias [" + rootEntityAlias + "]"); if (operand instanceof HavingField.Operand) { HavingField.Operand havingFieldOperand = (HavingField.Operand) operand; IAggregationFunction function = havingFieldOperand.function; operandElement = function.apply(parentStatement.getFieldAlias(rootEntityAlias, queryName)); } else { operandElement = parentStatement.getFieldAlias(rootEntityAlias, queryName); } logger.debug("where element operand value [" + operandElement + "]"); } finally { logger.debug("OUT"); } return operandElement; }
private ObjTemplate getTemplate(BIObject biObject) { ObjTemplate template; IObjTemplateDAO templateDAO; logger.debug("IN"); try { Assert.assertNotNull(biObject, "Input [biObject] cannot be null"); templateDAO = DAOFactory.getObjTemplateDAO(); Assert.assertNotNull(templateDAO, "Impossible to instantiate templateDAO"); template = templateDAO.getBIObjectActiveTemplate(biObject.getId()); Assert.assertNotNull(template, "Loaded template cannot be null"); logger.debug( "Active template [" + template.getName() + "] of document [" + biObject.getLabel() + "] loaded succesfully"); } catch (Throwable t) { throw new RuntimeException( "Impossible to load template for document [" + biObject.getLabel() + "]", t); } finally { logger.debug("OUT"); } return template; }
/** * Returns a map of parameters which will be send in the request to the engine application. * * @param profile Profile of the user * @param roleName the name of the execution role * @param analyticalDocument the biobject * @return Map The map of the execution call parameters */ public Map getParameterMap(Object analyticalDocument, IEngUserProfile profile, String roleName) { Map parameters; BIObject biObject; logger.debug("IN"); try { Assert.assertNotNull( analyticalDocument, "Input parameter [analyticalDocument] cannot be null"); Assert.assertTrue( (analyticalDocument instanceof BIObject), "Input parameter [analyticalDocument] cannot be an instance of [" + analyticalDocument.getClass().getName() + "]"); biObject = (BIObject) analyticalDocument; parameters = new Hashtable(); parameters = getRequestParameters(biObject); parameters = applySecurity(parameters, profile); parameters = applyService(parameters, biObject, profile); parameters = applyDatasourceForWriting(parameters, biObject); } finally { logger.debug("OUT"); } return parameters; }
protected DataStoreChangedEvent createEvent( IDataStore prev, final IDataStore curr, IDataSet set) { if (prev == null) { return new DataStoreChangedEvent( set, curr, curr, getAllRecords(curr), new ArrayList<IRecord>(0), new ArrayList<IRecord>(0)); } // prev!=null Helper.checkNotNull(curr, "curr"); IMetaData prevMeta = prev.getMetaData(); IMetaData currMeta = curr.getMetaData(); Assert.assertNotNull(prevMeta, "prevMeta"); Assert.assertNotNull(currMeta, "currMeta"); int prevFieldIndex = prevMeta.getIdFieldIndex(); int currFieldIndex = currMeta.getIdFieldIndex(); if (prevFieldIndex == ID_NOT_DEFINED || currFieldIndex == ID_NOT_DEFINED) { return new DataStoreChangedEvent( set, prev, curr, getAllRecords(curr), new ArrayList<IRecord>(0), getAllRecords(prev)); } List<IRecord> updated = getUpdated( prev, new RecordIdRetriever() { public IRecord getRecordById(Object id) { return curr.getRecordByID(id); } }, prevFieldIndex); List<IRecord> deleted = getDeleted(prev, curr, prevFieldIndex); List<IRecord> added = getAdded( prev, new RecordRetriever() { public IRecord getRecord(int index) { return curr.getRecordAt(index); } public int countRecords() { return (int) curr.getRecordsCount(); } }, prevFieldIndex); DataStoreChangedEvent res = new DataStoreChangedEvent(set, prev, curr, added, updated, deleted); return res; }
private static Map<Object, IRecord> getRecordsById( List<IRecord> updatedOrAdded, int idFieldIndex) { Map<Object, IRecord> res = new HashMap<Object, IRecord>(updatedOrAdded.size()); for (IRecord iRecord : updatedOrAdded) { IField fieldId = iRecord.getFieldAt(idFieldIndex); Assert.assertNotNull(fieldId, "fieldId"); Object value = fieldId.getValue(); Assert.assertNotNull(value, "value"); res.put(value, iRecord); } return res; }
/** * Returns a map of parameters which will be send in the request to the engine application. * * @param analyticalDocumentSubObject SubObject to execute * @param profile Profile of the user * @param roleName the name of the execution role * @param analyticalDocument the object * @return Map The map of the execution call parameters */ public Map getParameterMap( Object analyticalDocument, Object analyticalDocumentSubObject, IEngUserProfile profile, String roleName) { Map parameters; BIObject biObject; SubObject subObject; logger.debug("IN"); try { Assert.assertNotNull( analyticalDocument, "Input parameter [analyticalDocument] cannot be null"); Assert.assertTrue( (analyticalDocument instanceof BIObject), "Input parameter [analyticalDocument] cannot be an instance of [" + analyticalDocument.getClass().getName() + "]"); biObject = (BIObject) analyticalDocument; if (analyticalDocumentSubObject == null) { logger.warn("Input parameter [subObject] is null"); return getParameterMap(analyticalDocument, profile, roleName); } Assert.assertTrue( (analyticalDocumentSubObject instanceof SubObject), "Input parameter [subObjectDetail] cannot be an instance of [" + analyticalDocumentSubObject.getClass().getName() + "]"); subObject = (SubObject) analyticalDocumentSubObject; parameters = getRequestParameters(biObject); parameters.put("nameSubObject", subObject.getName() != null ? subObject.getName() : ""); parameters.put( "descriptionSubObject", subObject.getDescription() != null ? subObject.getDescription() : ""); parameters.put( "visibilitySubObject", subObject.getIsPublic().booleanValue() ? "Public" : "Private"); parameters.put("subobjectId", subObject.getId()); parameters = applySecurity(parameters, profile); parameters = applyService(parameters, biObject, profile); parameters = applyDatasourceForWriting(parameters, biObject); parameters.put("isFromCross", "false"); } finally { logger.debug("OUT"); } return parameters; }
private static List<IRecord> getAdded( IDataStore prev, RecordRetriever currRetr, int prevFieldIndex) { List<IRecord> added = new ArrayList<IRecord>(); for (int i = 0; i < currRetr.countRecords(); i++) { IRecord currRec = currRetr.getRecord(i); IField currIdField = currRec.getFieldAt(prevFieldIndex); Assert.assertNotNull(currIdField, "currId"); Object id = currIdField.getValue(); Assert.assertNotNull(id, "id"); if (prev.getRecordByID(id) == null) { added.add(currRec); } } return added; }
private static List<IRecord> getDeleted(IDataStore prev, IDataStore curr, int prevFieldIndex) { List<IRecord> deleted = new ArrayList<IRecord>(); for (int i = 0; i < prev.getRecordsCount(); i++) { IRecord prevRec = prev.getRecordAt(i); IField prevIdField = prevRec.getFieldAt(prevFieldIndex); Assert.assertNotNull(prevIdField, "prevIdField"); Object prevId = prevIdField.getValue(); Assert.assertNotNull(prevId, "prevId"); IRecord currRec = curr.getRecordByID(prevId); if (currRec == null) { deleted.add(prevRec); continue; } } return deleted; }
private String parametersJsonToXML(String parsJson) { String xml = null; SourceBean sb = null; try { JSONObject json = new JSONObject(parsJson); sb = new SourceBean("PARAMETERSLIST"); SourceBean sb1 = new SourceBean("ROWS"); for (Iterator iterator = json.keys(); iterator.hasNext(); ) { String key = (String) iterator.next(); String t = json.getString(key); SourceBean b = new SourceBean("ROW"); b.setAttribute("NAME", key); b.setAttribute("TYPE", t); sb1.setAttribute(b); } sb.setAttribute(sb1); xml = sb.toXML(false); } catch (Exception e) { logger.error("error in parsing " + parsJson, e); } Assert.assertNotNull(xml, "There was an error in parsing " + parsJson); return xml; }
protected Engine getEngineByDocumentType(String type) { Engine engine; List<Engine> engines; engine = null; try { Assert.assertNotNull(DAOFactory.getEngineDAO(), "EngineDao cannot be null"); engines = DAOFactory.getEngineDAO().loadAllEnginesForBIObjectType(type); if (engines == null || engines.size() == 0) { throw new SpagoBIServiceException( SERVICE_NAME, "There are no engines for documents of type [" + type + "] available"); } else { engine = engines.get(0); if (engines.size() > 1) { LogMF.warn( logger, "There are more than one engine for document of type [WORKSHEET]. We will use the one whose label is equal to [{0}]", engine.getLabel()); } else { LogMF.debug(logger, "Using worksheet engine with label [{0}]", engine.getLabel()); } } } catch (Throwable t) { throw new SpagoBIServiceException( SERVICE_NAME, "Impossible to load a valid engine for document of type [WORKSHEET]", t); } finally { logger.debug("OUT"); } return engine; }
public static Engine getEngineByDocumentType(String type) { Engine engine; List<Engine> engines; engine = null; try { Assert.assertNotNull(DAOFactory.getEngineDAO(), "EngineDao cannot be null"); engines = DAOFactory.getEngineDAO().loadAllEnginesForBIObjectType(type); if (engines == null || engines.size() == 0) { throw new SpagoBIRuntimeException( "There are no engines for documents of type [" + type + "] available"); } else { engine = (Engine) engines.get(0); LogMF.warn( logger, "There are more than one engine for document of type [" + type + "]. We will use the one whose label is equal to [{0}]", engine.getLabel()); } } catch (Throwable t) { throw new SpagoBIRuntimeException( "Impossible to load a valid engine for document of type [" + type + "]", t); } finally { logger.debug("OUT"); } return engine; }
private Map applyService(Map parameters, BIObject biObject) { logger.debug("IN"); try { Assert.assertNotNull(parameters, "Input [parameters] cannot be null"); ObjTemplate objectTemplate = biObject.getActiveTemplate(); byte[] content = objectTemplate.getContent(); SourceBean sbTemplate = getTemplateAsSourceBean(content); if (sbTemplate.getName().equals(EngineConstants.SMART_FILTER_TAG)) { parameters.put(PARAM_SERVICE_NAME, "FORM_ENGINE_FROM_DATASET_START_ACTION"); if (sbTemplate.containsAttribute("DATASET")) { String label = (String) ((SourceBean) sbTemplate.getAttribute("DATASET")).getAttribute("label"); parameters.put("dataset_label", label); } } else { parameters.put(PARAM_SERVICE_NAME, "FORM_ENGINE_START_ACTION"); } parameters.put(PARAM_MODALITY, "VIEW"); parameters.put(PARAM_NEW_SESSION, "TRUE"); } catch (Throwable t) { throw new RuntimeException("Cannot apply service parameters", t); } finally { logger.debug("OUT"); } return parameters; }
public WorkSheetDefinition deserialize(Object o) throws SerializationException { WorkSheetDefinition workSheetDefinition = null; JSONObject workSheetDefinitionJSON = null; logger.debug("IN"); try { Assert.assertNotNull(o, "Object to be deserialized cannot be null"); if (o instanceof String) { logger.debug("Deserializing string [" + (String) o + "]"); try { workSheetDefinitionJSON = new JSONObject((String) o); } catch (Throwable t) { logger.debug("Object to be deserialized must be string encoding a JSON object"); throw new SerializationException( "An error occurred while deserializing query: " + (String) o, t); } } else if (o instanceof JSONObject) { workSheetDefinitionJSON = (JSONObject) o; } else { Assert.assertUnreachable( "Object to be deserialized must be of type string or of type JSONObject, not of type [" + o.getClass().getName() + "]"); } workSheetDefinition = new WorkSheetDefinition(); try { deserializeSheets(workSheetDefinitionJSON, workSheetDefinition); deserializeGlobalFilters(workSheetDefinitionJSON, workSheetDefinition); deserializeOptions(workSheetDefinitionJSON, workSheetDefinition); } catch (Exception e) { throw new SerializationException( "An error occurred while deserializing worksheet: " + workSheetDefinitionJSON.toString(), e); } } finally { logger.debug("OUT"); } logger.debug("Worksheet deserialized"); return workSheetDefinition; }
private static List<IRecord> getUpdated( IDataStore prev, RecordIdRetriever currRetr, int prevFieldIndex) { List<IRecord> updated = new ArrayList<IRecord>(); for (int i = 0; i < prev.getRecordsCount(); i++) { IRecord prevRec = prev.getRecordAt(i); IField prevIdField = prevRec.getFieldAt(prevFieldIndex); Assert.assertNotNull(prevIdField, "prevIdField"); Object prevId = prevIdField.getValue(); Assert.assertNotNull(prevId, "prevId"); IRecord currRec = currRetr.getRecordById(prevId); if (currRec == null) { // deleted continue; } // check updated by position of field // a impossible improvement: check the changes by field meta's name List<IField> prevFields = prevRec.getFields(); List<IField> currFields = currRec.getFields(); if (prevFields.size() != currFields.size()) { updated.add(currRec); continue; } // check updated by checking fields values for (int j = 0; j < prevFields.size(); j++) { IField prevField = prevFields.get(j); IField currField = currFields.get(j); Assert.assertNotNull(prevField, "prevField"); Assert.assertNotNull(currField, "currField"); Object prevValue = prevField.getValue(); Object currValue = currField.getValue(); boolean equals = areEquals(prevValue, currValue); if (!equals) { updated.add(currRec); break; } } } return updated; }
public Object deserialize(Object o, Class clazz) throws DeserializationException { Object result = null; try { Assert.assertNotNull(o, "Input parameter [" + o + "] cannot be null"); Assert.assertNotNull(o, "Input parameter [" + clazz + "] cannot be null"); SourceBean xml = null; if (o instanceof SourceBean) { xml = (SourceBean) o; } else if (o instanceof String) { xml = SourceBean.fromXMLString((String) o); } else { throw new DeserializationException( "Impossible to deserialize from an object of type [" + o.getClass().getName() + "]"); } Deserializer deserializer = mappings.get(clazz); if (deserializer == null) { throw new DeserializationException( "Impossible to deserialize to an object of type [" + clazz.getName() + "]"); } if (xml.getAttribute("ROWS") != null) { List list = new ArrayList(); List<SourceBean> rows = xml.getAttributeAsList("ROWS.ROW"); for (SourceBean row : rows) { list.add(deserializer.deserialize(row, clazz)); } result = list; } else { result = deserializer.deserialize(o, clazz); } } catch (Throwable t) { throw new DeserializationException("An error occurred while deserializing object: " + o, t); } finally { } return result; }
public synchronized List<ListenerResult> changedDataSet(IDataSet currDataSet) { Helper.checkNotNull(currDataSet, "currDataSet"); IDataStore prev = this.store; IDataStore curr = currDataSet.getDataStore(); Assert.assertNotNull(curr, "curr!=null"); this.store = curr; this.dataSet = currDataSet; List<ListenerResult> res = fireListeners(createEvent(prev, curr, currDataSet)); return res; }
/** * Starting from a BIObject extracts from it the map of the paramaeters for the execution call * * @param biObject BIObject to execute * @return Map The map of the execution call parameters */ private Map getRequestParameters(BIObject biObject) { logger.debug("IN"); Map parameters; ObjTemplate template; IBinContentDAO contentDAO; byte[] content; logger.debug("IN"); parameters = null; try { parameters = new Hashtable(); template = this.getTemplate(biObject); try { contentDAO = DAOFactory.getBinContentDAO(); Assert.assertNotNull(contentDAO, "Impossible to instantiate contentDAO"); content = contentDAO.getBinContent(template.getBinId()); Assert.assertNotNull(content, "Template content cannot be null"); } catch (Throwable t) { throw new RuntimeException( "Impossible to load template content for document [" + biObject.getLabel() + "]", t); } appendRequestParameter(parameters, "document", biObject.getId().toString()); appendAnalyticalDriversToRequestParameters(biObject, parameters); addBIParameterDescriptions(biObject, parameters); addMetadataAndContent(biObject, parameters); } finally { logger.debug("OUT"); } return parameters; }
public static Trigger convertTriggerFromNativeObject(org.quartz.Trigger quartzTrigger) { Trigger spagobiTrigger; spagobiTrigger = new Trigger(); spagobiTrigger.setName(quartzTrigger.getName()); spagobiTrigger.setGroupName(quartzTrigger.getGroup()); spagobiTrigger.setDescription(quartzTrigger.getDescription()); // spagobiTrigger.setCalendarName( quartzTrigger.getCalendarName() ); Assert.assertTrue( quartzTrigger.getCalendarName() == null, "quartz trigger calendar name is not null: " + quartzTrigger.getCalendarName()); spagobiTrigger.setStartTime(quartzTrigger.getStartTime()); spagobiTrigger.setEndTime(quartzTrigger.getEndTime()); // triggers that run immediately have a generated name that starts with schedule_uuid_ (see // TriggerXMLDeserializer) // It would be better anyway to relay on a specific property to recognize if a trigger is // thinked to run immediately spagobiTrigger.setRunImmediately(spagobiTrigger.getName().startsWith("schedule_uuid_")); if (quartzTrigger instanceof org.quartz.CronTrigger) { org.quartz.CronTrigger quartzCronTrigger = (org.quartz.CronTrigger) quartzTrigger; // dirty trick String expression = (String) quartzCronTrigger.getJobDataMap().get(SPAGOBI_CRON_EXPRESSION); if (expression != null) { quartzCronTrigger.getJobDataMap().remove(SPAGOBI_CRON_EXPRESSION); } else { // for back compatibility expression = (String) quartzCronTrigger.getJobDataMap().get(SPAGOBI_CRON_EXPRESSION_DEPRECATED); quartzCronTrigger.getJobDataMap().remove(SPAGOBI_CRON_EXPRESSION_DEPRECATED); } spagobiTrigger.setCronExpression(new CronExpression(expression)); } Job job = new Job(); job.setName(quartzTrigger.getJobName()); job.setGroupName(quartzTrigger.getJobGroup()); job.setVolatile(quartzTrigger.isVolatile()); Map<String, String> parameters = convertParametersFromNativeObject(quartzTrigger.getJobDataMap()); job.addParameters(parameters); spagobiTrigger.setJob(job); return spagobiTrigger; }
protected void setTenant() { logger.debug("IN"); try { IEngUserProfile profile = getUserProfile(); Assert.assertNotNull(profile, "Input parameter [profile] cannot be null"); UserProfile userProfile = (UserProfile) profile; String tenant = userProfile.getOrganization(); LogMF.debug(logger, "Tenant: [{0}]", tenant); TenantManager.setTenant(new Tenant(tenant)); LogMF.debug(logger, "Tenant [{0}] set properly", tenant); } catch (Throwable t) { logger.error("Cannot set tenant", t); throw new SpagoBIRuntimeException("Cannot set tenant", t); } finally { logger.debug("OUT"); } }
private Map applyService(Map parameters, BIObject biObject, IEngUserProfile profile) { logger.debug("IN"); try { Assert.assertNotNull(parameters, "Input [parameters] cannot be null"); String userId = (String) profile.getUserUniqueIdentifier(); if (((UserProfile) profile).isSchedulerUser(userId)) { // if among parameters there is outputType parameter set MIME type that is required by // export Action if (parameters.get("outputType") != null) { String mimeType = EXPORT_MIME_TYPE_XLS; String outputType = parameters.get("outputType").toString(); logger.debug("Export in " + outputType); if (outputType.equalsIgnoreCase("PDF")) mimeType = EXPORT_MIME_TYPE_PDF; else if (outputType.equalsIgnoreCase("XLS")) mimeType = EXPORT_MIME_TYPE_XLS; else { // default is XLS } logger.debug("Mime type to export is " + mimeType); parameters.put(MIME_TYPE, mimeType); } else { logger.debug("Mime type to export is defalt application/xls"); parameters.put(MIME_TYPE, EXPORT_MIME_TYPE_XLS); } parameters.put(PARAM_SERVICE_NAME, MASSIVE_EXPORT_PARAM_ACTION_NAME); } else { parameters.put(PARAM_SERVICE_NAME, PARAM_ACTION_NAME); } parameters.put(PARAM_NEW_SESSION, "TRUE"); } catch (Throwable t) { throw new RuntimeException( "Impossible to guess from template extension the engine startup service to call"); } finally { logger.debug("OUT"); } return parameters; }
public static Engine getEngineByDriver(String driver) { Engine engine; engine = null; try { Assert.assertNotNull(DAOFactory.getEngineDAO(), "EngineDao cannot be null"); engine = DAOFactory.getEngineDAO().loadEngineByDriver(driver); if (engine == null) { throw new SpagoBIRuntimeException( "There are no engines with driver equal to [" + driver + "] available"); } } catch (Throwable t) { throw new SpagoBIRuntimeException( "Impossible to load a valid engine whose drover is equal to [" + driver + "]", t); } finally { logger.debug("OUT"); } return engine; }
/** * Get the definition of the worksheet from the request, serialize and save it into the qbe engine * instance */ public void service(SourceBean request, SourceBean response) { logger.debug("IN"); super.service(request, response); try { // get the worksheet definition from the request JSONObject worksheetDefinitionJSON = getAttributeAsJSONObject(WORKSHEET_DEFINITION); Assert.assertNotNull( worksheetDefinitionJSON, "Parameter [" + WORKSHEET_DEFINITION + "] cannot be null in oder to execute " + this.getActionName() + " service"); logger.debug( "Parameter [" + WORKSHEET_DEFINITION + "] is equals to [" + worksheetDefinitionJSON.toString() + "]"); updateWorksheetDefinition(worksheetDefinitionJSON); } catch (Throwable t) { throw SpagoBIEngineServiceExceptionHandler.getInstance() .getWrappedException(getActionName(), getEngineInstance(), t); } finally { logger.debug("OUT"); } try { writeBackToClient(new JSONAcknowledge()); } catch (IOException e) { String message = "Impossible to write back the responce to the client"; throw new SpagoBIEngineServiceException(getActionName(), message, e); } }
@Override public void doService() { DatasetManagementAPI creationUtilities; IDataSet datasetBean; logger.debug("IN"); try { // create the input parameters to pass to the WorkSheet Edit Service Map worksheetEditActionParameters = buildWorksheetEditServiceBaseParametersMap(); String executionId = ExecuteAdHocUtility.createNewExecutionId(); worksheetEditActionParameters.put("SBI_EXECUTION_ID", executionId); Engine worksheetEngine = getWorksheetEngine(); LogMF.debug(logger, "Engine label is equal to [{0}]", worksheetEngine.getLabel()); IDataSource datasource; try { datasource = DAOFactory.getDataSourceDAO().loadDataSourceWriteDefault(); } catch (EMFUserError e) { throw new SpagoBIRuntimeException("Error while loading default datasource for writing", e); } if (datasource != null) { LogMF.debug(logger, "Datasource label is equal to [{0}]", datasource.getLabel()); worksheetEditActionParameters.put( EngineConstants.DEFAULT_DATASOURCE_FOR_WRITING_LABEL, datasource.getLabel()); } else { logger.debug("There is no default datasource for writing"); } datasetBean = getDatasetAttributesFromRequest(); worksheetEditActionParameters.put("dataset_label", datasetBean.getLabel()); Map<String, String> datasetParameterValuesMap = getDatasetParameterValuesMapFromRequest(); worksheetEditActionParameters.putAll(datasetParameterValuesMap); // create the WorkSheet Edit Service's URL String worksheetEditActionUrl = GeneralUtilities.getUrl(worksheetEngine.getUrl(), worksheetEditActionParameters); LogMF.debug( logger, "Worksheet edit service invocation url is equal to [{}]", worksheetEditActionUrl); // create the dataset logger.trace("Creating the dataset..."); Integer datasetId = null; try { creationUtilities = new DatasetManagementAPI(); datasetId = creationUtilities.creatDataSet(datasetBean); Assert.assertNotNull(datasetId, "Dataset Id cannot be null"); } catch (Throwable t) { throw new SpagoBIServiceException( SERVICE_NAME, "An error occurred while creating dataset from bean [" + datasetBean + "]", t); } LogMF.debug(logger, "Datset [{0}]succesfully created with id [{1}]", datasetBean, datasetId); logger.trace("Copying output parameters to response..."); try { getServiceResponse().setAttribute(OUTPUT_PARAMETER_EXECUTION_ID, executionId); getServiceResponse() .setAttribute(OUTPUT_PARAMETER_WORKSHEET_EDIT_SERVICE_URL, worksheetEditActionUrl); getServiceResponse().setAttribute(OUTPUT_PARAMETER_DATASET_LABEL, datasetBean.getLabel()); getServiceResponse() .setAttribute(OUTPUT_PARAMETER_DATASET_PARAMETERS, datasetParameterValuesMap); // business metadata JSONObject businessMetadata = getBusinessMetadataFromRequest(); if (businessMetadata != null) { getServiceResponse() .setAttribute(OUTPUT_PARAMETER_BUSINESS_METADATA, businessMetadata.toString()); } } catch (Throwable t) { throw new SpagoBIServiceException( SERVICE_NAME, "An error occurred while creating dataset from bean [" + datasetBean + "]", t); } logger.trace("Output parameter succesfully copied to response"); } finally { logger.debug("OUT"); } }
public void load(SourceBean template) throws it.eng.spagobi.engines.commonj.exception.TemplateParseException { logger.debug("IN"); SourceBean workSB; Assert.assertNotNull(template, "Input parameter [template] cannot be null"); workSB = (SourceBean) template.getAttribute("WORK"); Assert.assertNotNull(workSB, "template cannot be null"); workName = (String) workSB.getAttribute("workName"); if (workName == null) { logger.error("Missing work name in document template"); throw new it.eng.spagobi.engines.commonj.exception.TemplateParseException( template, "Missing work name in document template"); } className = (String) workSB.getAttribute("className"); if (className == null) { logger.error("Missing class specification in document template"); throw new it.eng.spagobi.engines.commonj.exception.CommonjEngineException( "Missing class specification in document template"); } cmdParameters = new Vector<String>(); analyticalParameters = new Vector<String>(); classpathParameters = new Vector<String>(); // check for parameters, in particular cmd and cmd_env SourceBean parametersSB = (SourceBean) workSB.getAttribute("PARAMETERS"); if (parametersSB != null) { List parameterList = parametersSB.getAttributeAsList("PARAMETER"); if (parameterList != null) { for (Iterator iterator = parameterList.iterator(); iterator.hasNext(); ) { SourceBean parameter = (SourceBean) iterator.next(); String name = (String) parameter.getAttribute("name"); String value = (String) parameter.getAttribute("value"); // if it is the command name if (name.equalsIgnoreCase(COMMAND)) { logger.debug("command parameter " + value); command = value; } else // if it is the command environment if (name.equalsIgnoreCase(COMMAND_ENVIRONMENT)) { logger.debug("command environment parameter" + value); command_environment = value; } else { logger.debug("general parameter" + value); // if it is a spagobi Analytical driver url name if (name.equalsIgnoreCase(SBI_ANALYTICAL_DRIVER)) { analyticalParameters.add(value); } // if it is a classpath variable else if (name.equalsIgnoreCase(CLASSPATH)) { classpathParameters.add(value); } else if (name.equalsIgnoreCase(CMD_PAR)) { // else it is a command parameter name = value cmdParameters.add(value); } } } // Build arrays // if(cmdparametersVect.size()>0){ // cmdParameters=new String[cmdparametersVect.size()]; // int i=0; // for (Iterator iterator = cmdparametersVect.iterator(); iterator.hasNext();) { // String string = (String) iterator.next(); // cmdParameters[i]=string; // } // } // if(analyticalVect.size()>0){ // analyticalDriverParameters=new String[analyticalVect.size()]; // int i=0; // for (Iterator iterator = analyticalVect.iterator(); iterator.hasNext();) { // String string = (String) iterator.next(); // analyticalDriverParameters[i]=string; // } // } // if(classPathVect.size()>0){ // classpathParameters=new String[classPathVect.size()]; // int i=0; // for (Iterator iterator = classPathVect.iterator(); iterator.hasNext();) { // String string = (String) iterator.next(); // classpathParameters[i]=string; // } // } } } logger.debug("OUT"); }
protected String buildParentFieldOperand(Operand operand, Query query, Map entityAliasesMaps) { String operandElement; String[] chunks; String parentQueryId; String fieldName; IModelField datamartField; IModelEntity rootEntity; String queryName; String rootEntityAlias; logger.debug("IN"); try { // it comes directly from the client side GUI. It is a composition of the parent query id and // filed name, // separated by a space logger.debug("operand is equals to [" + operand.values[0] + "]"); chunks = operand.values[0].split(" "); Assert.assertTrue( chunks.length >= 2, "Operand [" + chunks.toString() + "] does not contains enougth informations in order to resolve the reference to parent field"); parentQueryId = chunks[0]; logger.debug("where right-hand field belonging query [" + parentQueryId + "]"); fieldName = chunks[1]; logger.debug("where right-hand field unique name [" + fieldName + "]"); datamartField = parentStatement.getDataSource().getModelStructure().getField(fieldName); Assert.assertNotNull( datamartField, "DataMart does not cantain a field named [" + fieldName + "]"); Couple queryNameAndRoot = datamartField.getQueryName(); queryName = (String) queryNameAndRoot.getFirst(); logger.debug("select field query name [" + queryName + "]"); if (queryNameAndRoot.getSecond() != null) { rootEntity = (IModelEntity) queryNameAndRoot.getSecond(); } else { rootEntity = datamartField.getParent().getRoot(); } logger.debug("where right-hand field query name [" + queryName + "]"); logger.debug( "where right-hand field root entity unique name [" + rootEntity.getUniqueName() + "]"); Map parentEntityAliases = (Map) entityAliasesMaps.get(parentQueryId); if (parentEntityAliases != null) { if (!parentEntityAliases.containsKey(rootEntity.getUniqueName())) { Assert.assertUnreachable( "Filter of subquery [" + query.getId() + "] refers to a non " + "existing parent query [" + parentQueryId + "] entity [" + rootEntity.getUniqueName() + "]"); } rootEntityAlias = (String) parentEntityAliases.get(rootEntity.getUniqueName()); } else { rootEntityAlias = "unresoved_alias"; logger.warn( "Impossible to get aliases map for parent query [" + parentQueryId + "]. Probably the parent query ha not been compiled yet"); logger.warn( "Query [" + query.getId() + "] refers entities of its parent query [" + parentQueryId + "] so the generated statement wont be executable until the parent query will be compiled"); } logger.debug("where right-hand field root entity alias [" + rootEntityAlias + "]"); operandElement = parentStatement.getFieldAlias(rootEntityAlias, queryName); logger.debug("where element right-hand field value [" + operandElement + "]"); } finally { logger.debug("OUT"); } return operandElement; }
public void doService() { ExecutionInstance instance; Integer documentId; String documentLabel; Integer documentVersion; String executionRole; String userProvidedParametersStr; BIObject obj; IEngUserProfile profile; List roles; logger.debug("IN"); try { profile = getUserProfile(); documentId = requestContainsAttribute(DOCUMENT_ID) ? getAttributeAsInteger(DOCUMENT_ID) : null; documentVersion = requestContainsAttribute(DOCUMENT_VERSION) ? getAttributeAsInteger(DOCUMENT_VERSION) : null; documentLabel = getAttributeAsString(DOCUMENT_LABEL); executionRole = getAttributeAsString(EXECUTION_ROLE); userProvidedParametersStr = getAttributeAsString(ObjectsTreeConstants.PARAMETERS); logger.debug("Parameter [" + DOCUMENT_ID + "] is equals to [" + documentId + "]"); logger.debug("Parameter [" + DOCUMENT_LABEL + "] is equals to [" + documentLabel + "]"); logger.debug("Parameter [" + DOCUMENT_VERSION + "] is equals to [" + documentVersion + "]"); logger.debug("Parameter [" + EXECUTION_ROLE + "] is equals to [" + executionRole + "]"); Assert.assertTrue( !StringUtilities.isEmpty(documentLabel) || documentId != null, "At least one between [" + DOCUMENT_ID + "] and [" + DOCUMENT_LABEL + "] parameter must be specified on request"); Assert.assertTrue( !StringUtilities.isEmpty(executionRole), "Parameter [" + EXECUTION_ROLE + "] cannot be null"); // load object to chek if it exists obj = null; if (!StringUtilities.isEmpty(documentLabel)) { logger.debug("Loading document with label = [" + documentLabel + "] ..."); try { obj = DAOFactory.getBIObjectDAO().loadBIObjectByLabel(documentLabel); } catch (EMFUserError error) { logger.error("Object with label equals to [" + documentLabel + "] not found"); throw new SpagoBIServiceException( SERVICE_NAME, "Object with label equals to [" + documentId + "] not found", error); } } else if (documentId != null) { logger.info("Loading biobject with id = [" + documentId + "] ..."); try { obj = DAOFactory.getBIObjectDAO().loadBIObjectById(documentId); } catch (EMFUserError error) { logger.error("Object with id equals to [" + documentId + "] not found"); throw new SpagoBIServiceException( SERVICE_NAME, "Object with id equals to [" + documentId + "] not found", error); } } else { Assert.assertUnreachable( "At least one between [" + DOCUMENT_ID + "] and [" + DOCUMENT_LABEL + "] parameter must be specified on request"); } Assert.assertNotNull(obj, "Impossible to load document"); logger.debug("... docuemnt loaded succesfully"); // if into the request is specified a version of the template to use it's signed into the // object. if (documentVersion != null) obj.setDocVersion(documentVersion); // retrive roles for execution try { roles = ObjectsAccessVerifier.getCorrectRolesForExecution(obj.getId(), profile); } catch (Throwable t) { throw new SpagoBIServiceException(SERVICE_NAME, t); } if (roles != null && !roles.contains(executionRole)) { logger.error( "Document [id: " + obj.getId() + "; label: " + obj.getLabel() + " ] cannot be executed by any role of the user [" + profile.getUserUniqueIdentifier() + "]"); throw new SpagoBIServiceException( SERVICE_NAME, "Document [id: " + obj.getId() + "; label: " + obj.getLabel() + " ] cannot be executed by any role of the user [" + profile.getUserUniqueIdentifier() + "]"); } // so far so good: everything has been validated successfully. Let's create a new // ExecutionInstance. // instance = createExecutionInstance(obj.getId(), executionRole); UUIDGenerator uuidGen = UUIDGenerator.getInstance(); UUID uuidObj = uuidGen.generateTimeBasedUUID(); String executionContextId = uuidObj.toString(); executionContextId = executionContextId.replaceAll("-", ""); CoreContextManager ccm = createContext(executionContextId); // so far so good: everything has been validated successfully. Let's create a new // ExecutionInstance. instance = createExecutionInstance( obj.getId(), obj.getDocVersion(), executionRole, executionContextId, getLocale()); createContext(executionContextId).set(ExecutionInstance.class.getName(), instance); // instance.refreshParametersValues(getSpagoBIRequestContainer().getRequest(), true); // instance.setParameterValues(userProvidedParametersStr, true); // refresh obj variable because createExecutionInstance load the BIObject in a different way // obj = instance.getBIObject(); // ExecutionInstance has been created it's time to prepare the response with the instance // unique id and flush it to the client JSONObject responseJSON = null; responseJSON = new JSONObject(); try { responseJSON.put("execContextId", executionContextId); } catch (JSONException e) { throw new SpagoBIServiceException("Impossible to serialize response", e); } try { writeBackToClient(new JSONSuccess(responseJSON)); } catch (IOException e) { throw new SpagoBIServiceException("Impossible to write back the responce to the client", e); } } finally { logger.debug("OUT"); } }
@SuppressWarnings({"unchecked", "rawtypes"}) @Override public void service(SourceBean request, SourceBean response) { String dataSetLabel; String callback; String locale; Integer start; Integer limit; Integer limitSS; // for pagination server side Integer rowsLimit; Boolean memoryPagination; IDataSet dataSet; IDataStore dataStore; logger.debug("IN"); Monitor monitor = MonitorFactory.start("SpagoBI_Console.GetConsoleDataAction.service"); try { super.service(request, response); ConsoleEngineInstance consoleEngineInstance = getConsoleEngineInstance(); dataSetLabel = getAttributeAsString(DATASET_LABEL); logger.debug("Parameter [" + DATASET_LABEL + "] is equals to [" + dataSetLabel + "]"); Assert.assertTrue( !StringUtilities.isEmpty(dataSetLabel), "Parameter [" + DATASET_LABEL + "] cannot be null or empty"); callback = getAttributeAsString(CALLBACK); logger.debug("Parameter [" + CALLBACK + "] is equals to [" + callback + "]"); locale = getAttributeAsString(LOCALE); logger.debug("Parameter [" + LOCALE + "] is equals to [" + locale + "]"); memoryPagination = getAttributeAsBoolean(MEMORY_PAGINATION); logger.debug("Parameter [" + MEMORY_PAGINATION + "] is equals to [" + memoryPagination + "]"); limitSS = (getAttributeAsInteger(LIMIT_SS) == null) ? -1 : getAttributeAsInteger(LIMIT_SS); logger.debug("Parameter [" + LIMIT_SS + "] is equals to [" + LIMIT_SS + "]"); rowsLimit = (getAttributeAsInteger(ROWS_LIMIT) == null) ? -1 : getAttributeAsInteger(ROWS_LIMIT); logger.debug("Parameter [" + ROWS_LIMIT + "] is equals to [" + rowsLimit + "]"); start = (getAttributeAsInteger(START) == null) ? 0 : getAttributeAsInteger(START); logger.debug("Parameter [" + START + "] is equals to [" + start + "]"); limit = (getAttributeAsInteger(LIMIT) == null) ? -1 : getAttributeAsInteger(LIMIT); logger.debug("Parameter [" + LIMIT + "] is equals to [" + limit + "]"); dataSet = null; try { dataSet = getDataSet(dataSetLabel); } catch (Throwable t) { throw new SpagoBIServiceException( "Impossible to find a dataset whose label is [" + dataSetLabel + "]", t); } Assert.assertNotNull( dataSet, "Impossible to find a dataset whose label is [" + dataSetLabel + "]"); Map params = consoleEngineInstance.getAnalyticalDrivers(); params.put(LOCALE, locale); dataSet.setParamsMap(params); UserProfile userProfile = (UserProfile) this.getEnv().get(EngineConstants.ENV_USER_PROFILE); dataSet.setUserProfileAttributes(UserProfileUtils.getProfileAttributes(userProfile)); if (dataSet instanceof AbstractDataSet) { AbstractDataSet ads = (AbstractDataSet) dataSet; ads.setUserProfile(userProfile); } // gets the max number of rows for the table // String strRowLimit = // ConsoleEngineConfig.getInstance().getProperty("CONSOLE-TABLE-ROWS-LIMIT"); // rowsLimit = (strRowLimit == null)? -1 : Integer.parseInt(strRowLimit); Monitor monitorLD = MonitorFactory.start("SpagoBI_Console.GetConsoleDataAction.service.LoadData"); if (!memoryPagination) { rowsLimit = -1; // serverSide limit = limitSS; } int totalResults = this.getDataSetTotalResult(dataSet); if (totalResults != -1) { // total results was already loaded, no need to recalculate it dataSet.setCalculateResultNumberOnLoad(false); } dataSet.loadData(start, limit, rowsLimit); monitorLD.stop(); dataStore = dataSet.getDataStore(); Assert.assertNotNull( dataStore, "The dataStore returned by loadData method of the class [" + dataSet.getClass().getName() + "] cannot be null"); Object resultNumber = dataStore.getMetaData().getProperty("resultNumber"); if (resultNumber != null) { this.setDataSetTotalResult(dataSet, (Integer) resultNumber); } JSONObject results = new JSONObject(); try { JSONDataWriter writer = new JSONDataWriter(); // write id property only if is a NGSI rest dataset, to not broke previous logic if (dataSet instanceof RESTDataSet && ((RESTDataSet) dataSet).isNgsi()) { writer.setUseIdProperty(true); // writer.setAdjust(true); } if (totalResults != -1) { // if total result was previously loaded, set this information into dataStore dataStore.getMetaData().setProperty("resultNumber", totalResults); } resultNumber = dataStore.getMetaData().getProperty("resultNumber"); if (resultNumber == null) { dataStore .getMetaData() .setProperty("resultNumber", new Integer((int) dataStore.getRecordsCount())); } JSONObject dataSetJSON = (JSONObject) writer.write(dataStore); results = dataSetJSON; } catch (Throwable e) { throw new SpagoBIServiceException("Impossible to serialize datastore", e); } try { writeBackToClient(new JSONSuccess(results, callback)); } catch (IOException e) { throw new SpagoBIServiceException("Impossible to write back the responce to the client", e); } } catch (Throwable t) { throw SpagoBIEngineServiceExceptionHandler.getInstance() .getWrappedException(getActionName(), getEngineInstance(), t); } finally { monitor.stop(); logger.debug("OUT"); } }
@Override public IDataStore executeQuery(Integer start, Integer limit) { IDataStore dataStore = null; IDataSet dataSet = this.getEngineInstance().getActiveQueryAsDataSet(); AbstractQbeDataSet qbeDataSet = (AbstractQbeDataSet) dataSet; IStatement statement = qbeDataSet.getStatement(); // QueryGraph graph = statement.getQuery().getQueryGraph(); boolean valid = true; // GraphManager.getGraphValidatorInstance(QbeEngineConfig.getInstance().getGraphValidatorImpl()).isValid(graph, statement.getQuery().getQueryEntities(getDataSource())); // logger.debug("QueryGraph valid = " + valid); if (!valid) { throw new SpagoBIEngineServiceException( getActionName(), "error.mesage.description.relationship.not.enough"); } try { logger.debug("Executing query ..."); Integer maxSize = QbeEngineConfig.getInstance().getResultLimit(); logger.debug( "Configuration setting [" + "QBE.QBE-SQL-RESULT-LIMIT.value" + "] is equals to [" + (maxSize != null ? maxSize : "none") + "]"); String jpaQueryStr = statement.getQueryString(); logger.debug("Executable query (HQL/JPQL): [" + jpaQueryStr + "]"); UserProfile userProfile = (UserProfile) getEnv().get(EngineConstants.ENV_USER_PROFILE); auditlogger.info("[" + userProfile.getUserId() + "]:: HQL/JPQL: " + jpaQueryStr); auditlogger.info("[" + userProfile.getUserId() + "]:: SQL: " + statement.getSqlQueryString()); int startI = start; int limitI = (limit == null ? (maxSize == null ? -1 : maxSize) : limit); int maxI = (maxSize == null ? -1 : maxSize.intValue()); dataSet.loadData(startI, limitI, maxI); dataStore = dataSet.getDataStore(); changeAlias(dataStore); Assert.assertNotNull( dataStore, "The dataStore returned by loadData method of the class [" + dataSet.getClass().getName() + "] cannot be null"); } catch (Exception e) { logger.debug("Query execution aborted because of an internal exceptian"); SpagoBIEngineServiceException exception; String message; message = "An error occurred in " + getActionName() + " service while executing query: [" + statement.getQueryString() + "]"; exception = new SpagoBIEngineServiceException(getActionName(), message, e); exception.addHint( "Check if the query is properly formed: [" + statement.getQueryString() + "]"); exception.addHint("Check connection configuration"); exception.addHint("Check the qbe jar file"); throw exception; } logger.debug("Query executed succesfully"); return dataStore; }
public static org.quartz.Trigger convertTriggerToNativeObject(Trigger spagobiTrigger) { org.quartz.Trigger quartzTrigger; logger.debug("IN"); quartzTrigger = null; try { Assert.assertNotNull(spagobiTrigger, "Input parameter [spagobiTrigger] csannot be null"); if (spagobiTrigger.isRunImmediately()) { quartzTrigger = TriggerUtils.makeImmediateTrigger(spagobiTrigger.getName(), 0, 10000); quartzTrigger.setJobName(spagobiTrigger.getJob().getName()); quartzTrigger.setJobGroup(spagobiTrigger.getJob().getGroupName()); JobDataMap jobDataMap = convertParametersToNativeObject(spagobiTrigger.getJob().getParameters()); quartzTrigger.setJobDataMap(jobDataMap); } else { if (spagobiTrigger.isSimpleTrigger()) { quartzTrigger = new org.quartz.SimpleTrigger(); } else { org.quartz.CronTrigger quartzCronTrigger = new org.quartz.CronTrigger(); String quartzCronExpression = convertCronExpressionToNativeObject( spagobiTrigger.getChronExpression(), spagobiTrigger.getStartTime()); quartzCronTrigger.setCronExpression(quartzCronExpression); quartzTrigger = quartzCronTrigger; // dirty trick spagobiTrigger .getJob() .addParameter( SPAGOBI_CRON_EXPRESSION, spagobiTrigger.getChronExpression().getExpression()); } quartzTrigger.setName(spagobiTrigger.getName()); quartzTrigger.setDescription(spagobiTrigger.getDescription()); if (spagobiTrigger.getGroupName() == null) { quartzTrigger.setGroup(Scheduler.DEFAULT_GROUP); } else { quartzTrigger.setGroup(spagobiTrigger.getGroupName()); } quartzTrigger.setStartTime(spagobiTrigger.getStartTime()); if (spagobiTrigger.getEndTime() != null) { quartzTrigger.setEndTime(spagobiTrigger.getEndTime()); } quartzTrigger.setJobName(spagobiTrigger.getJob().getName()); if (spagobiTrigger.getJob().getGroupName() == null) { quartzTrigger.setJobGroup(Scheduler.DEFAULT_GROUP); } else { quartzTrigger.setJobGroup(spagobiTrigger.getJob().getGroupName()); } quartzTrigger.setVolatility(spagobiTrigger.getJob().isVolatile()); JobDataMap jobDataMap = convertParametersToNativeObject(spagobiTrigger.getJob().getParameters()); quartzTrigger.setJobDataMap(jobDataMap); } } catch (Throwable t) { throw new SpagoBIRuntimeException( "An unexpected error occured while converting Trigger to native object", t); } finally { logger.debug("OUT"); } return quartzTrigger; }