Esempio n. 1
0
  public static boolean dbWriteBinary(
      java.lang.String param0, java.lang.Object[] param1, byte[] param2, int param3, int param4) {
    org.sakaiproject.db.api.SqlService service = getInstance();
    if (service == null) return false;

    return service.dbWriteBinary(param0, param1, param2, param3, param4);
  }
Esempio n. 2
0
  public static void dbReadBinary(
      java.lang.String param0, java.lang.Object[] param1, byte[] param2) {
    org.sakaiproject.db.api.SqlService service = getInstance();
    if (service == null) return;

    service.dbReadBinary(param0, param1, param2);
  }
Esempio n. 3
0
  public static boolean dbWriteFailQuiet(
      java.sql.Connection param0, java.lang.String param1, java.lang.Object[] param2) {
    org.sakaiproject.db.api.SqlService service = getInstance();
    if (service == null) return false;

    return service.dbWriteFailQuiet(param0, param1, param2);
  }
Esempio n. 4
0
  public static java.sql.Connection dbReadLock(
      java.lang.String param0, java.lang.StringBuilder param1) {
    org.sakaiproject.db.api.SqlService service = getInstance();
    if (service == null) return null;

    return service.dbReadLock(param0, param1);
  }
Esempio n. 5
0
  public static java.io.InputStream dbReadBinary(
      java.lang.String param0, java.lang.Object[] param1, boolean param2)
      throws org.sakaiproject.exception.ServerOverloadException {
    org.sakaiproject.db.api.SqlService service = getInstance();
    if (service == null) return null;

    return service.dbReadBinary(param0, param1, param2);
  }
Esempio n. 6
0
  public static java.util.List dbRead(
      java.lang.String param0,
      java.lang.Object[] param1,
      org.sakaiproject.db.api.SqlReader param2) {
    org.sakaiproject.db.api.SqlService service = getInstance();
    if (service == null) return null;

    return service.dbRead(param0, param1, param2);
  }
Esempio n. 7
0
  public static void dbUpdateCommit(
      java.lang.String param0,
      java.lang.Object[] param1,
      java.lang.String param2,
      java.sql.Connection param3) {
    org.sakaiproject.db.api.SqlService service = getInstance();
    if (service == null) return;

    service.dbUpdateCommit(param0, param1, param2, param3);
  }
  /** Final initialization, once all dependencies are set. */
  public void init() {
    try {
      // if we are auto-creating our schema, check and create
      if (m_autoDdl) {
        m_sqlService.ddl(this.getClass().getClassLoader(), "sakai_announcement");
      }

      super.init();

      M_log.info(
          "init(): tables: " + m_cTableName + " " + m_rTableName + " locks-in-db: " + m_locksInDb);

      // convert draft?
      if (m_convertToDraft) {
        m_convertToDraft = false;
        convertToDraft();
      }

      // convert pubview?
      if (m_convertToPubView) {
        m_convertToPubView = false;
        convertToPubView();
      }

    } catch (Throwable t) {
      M_log.warn("init(): ", t);
    }
  }
  /**
   * ***********************************************************************************************************************************************
   * ClusterService implementation
   * **********************************************************************************************************************************************
   */
  @SuppressWarnings("unchecked")
  public List<String> getServers() {
    String statement = clusterServiceSql.getListServersSql();
    List<String> servers = m_sqlService.dbRead(statement);

    return servers;
  }
Esempio n. 10
0
  /**
   * Runs a simple SQL statement to check if the databse has been patched
   *
   * @return
   */
  private int checkSAK11204ForUpgrade() {
    String test =
        "select count(*) from CALENDAR_EVENT where (RANGE_START is null) or (RANGE_END is null)";
    Connection connection = null;
    Statement s = null;
    ResultSet rs = null;

    try {
      connection = sqlService.borrowConnection();
      s = connection.createStatement();
      rs = s.executeQuery(test);
      if (rs.next()) {
        long ntodo = rs.getLong(1);
        if (ntodo == 0) {
          log.debug("SAK-11204: Database has been migrated  ");
          return OK;
        } else {
          log.info("SAK-11204: Migration check, there are null range fields ");
          return MIGRATE;
        }
      } else {
        log.warn("SAK-11204: Could not count null range fields, assuming migrate ");
        return MIGRATE;
      }
    } catch (SQLException ex) {
      log.info(
          "SAK-11204: Migration check, CALENDAR_EVENT schema not uptodate, test query said:  "
              + ex.getMessage());
      return UPGRADE_SCHEMA;
    } finally {
      try {
        rs.close();
      } catch (Exception ex) {
      }
      try {
        s.close();
      } catch (Exception ex) {
      }
      try {
        if (connection != null) {
          sqlService.returnConnection(connection);
        }
      } catch (Exception ex) {
      }
    }
  }
  /** Final initialization, once all dependencies are set. */
  public void init() {
    try {
      // if we are auto-creating our schema, check and create
      if (m_autoDdl) {
        m_sqlService.ddl(this.getClass().getClassLoader(), "sakai_presence");
      }

      super.init();
    } catch (Exception t) {
      M_log.warn("init(): ", t);
    }
  }
  /**
   * Runs an SQL select statement to determine if the Quartz lock rows exist in the database. If the
   * rows do not exist this method assumes this is the first time the scheduler has been started.
   * The select statement will be defined in the {vendor}/checkTables.sql file within the shared
   * library deployed by this project. The statement should be of the form "SELECT COUNT(*) from
   * QUARTZ_LOCKS;". If the count is zero it is assumed this is a new install. If the count is
   * non-zero it is assumed the QUARTZ_LOCKS table has been initialized and this is not a new
   * install.
   *
   * @param sqlService
   * @return
   */
  private boolean isInitialStartup(SqlService sqlService) {
    String checkTablesScript = sqlService.getVendor() + "/checkTables.sql";
    ClassLoader loader = this.getClass().getClassLoader();
    String chkStmt = null;
    InputStream in = null;
    BufferedReader r = null;

    try {

      // find the resource from the loader
      in = loader.getResourceAsStream(checkTablesScript);

      r = new BufferedReader(new InputStreamReader(in));

      chkStmt = r.readLine();
    } catch (Exception e) {
      LOG.error(
          "Could not read the file "
              + checkTablesScript
              + " to determine if this is a new installation. Preconfigured jobs will only be loaded if the server property scheduler.loadjobs is \"true\"",
          e);
      return false;
    } finally {
      try {
        r.close();
      } catch (Exception e) {
      }
      try {
        in.close();
      } catch (Exception e) {
      }
    }

    List<String> l = sqlService.dbRead(chkStmt);
    if (l != null && l.size() > 0) {
      return (l.get(0).equalsIgnoreCase("0"));
    } else {
      return false;
    }
  }
  /** Final initialization, once all dependencies are set. */
  public void init() {
    setClusterServiceSql(m_sqlService.getVendor());
    try {
      // if we are auto-creating our schema, check and create
      if (m_autoDdl) {
        m_sqlService.ddl(this.getClass().getClassLoader(), "sakai_cluster");
      }

      // start the maintenance thread
      m_maintenance = new Maintenance();
      m_maintenance.start();

      M_log.info(
          "init: refresh: "
              + m_refresh
              + " expired: "
              + m_expired
              + " ghostingPercent: "
              + m_ghostingPercent);
    } catch (Throwable t) {
      M_log.warn("init(): ", t);
    }
  }
Esempio n. 14
0
  public void init() {

    log.info("Initializing Lesson Builder Tool");

    // for debugging I'd like to be able to reload, so avoid duplicates
    List<String> registered =
        FunctionManager.getRegisteredFunctions(SimplePage.PERMISSION_LESSONBUILDER_PREFIX);
    if (registered == null || !registered.contains(SimplePage.PERMISSION_LESSONBUILDER_UPDATE))
      FunctionManager.registerFunction(SimplePage.PERMISSION_LESSONBUILDER_UPDATE);
    if (registered == null || !registered.contains(SimplePage.PERMISSION_LESSONBUILDER_READ))
      FunctionManager.registerFunction(SimplePage.PERMISSION_LESSONBUILDER_READ);
    if (registered == null || !registered.contains(SimplePage.PERMISSION_LESSONBUILDER_SEE_ALL))
      FunctionManager.registerFunction(SimplePage.PERMISSION_LESSONBUILDER_SEE_ALL);

    try {
      // hibernate will do the tables, but we need this for the indices
      if (autoDdl) {
        sqlService.ddl(this.getClass().getClassLoader(), "simplepage");
        log.info("Completed Lesson Builder DDL");
      }
    } catch (Exception e) {
      log.warn("Unable to DDL Lesson Builder", e);
    }
  }
 public void init() {
   // if we are auto-creating our schema, check and create
   if (m_autoDdl) {
     m_sqlService.ddl(this.getClass().getClassLoader(), "user_audits");
   }
 }
Esempio n. 16
0
  public void apply(boolean autoDDL) {
    // Check for sak11204.disable -- should only be set after successful upgrade completion
    boolean disableUpgrade = serverConfigurationService.getBoolean("sak11204.disable", false);
    if (disableUpgrade) return;

    int upgrade = checkSAK11204ForUpgrade();

    if (upgrade == UPGRADE_SCHEMA) {
      if (autoDDL) {
        log.info("SAK-11204: Updating Schema ");
        sqlService.ddl(this.getClass().getClassLoader(), "SAK-11204");
        if (checkSAK11204ForUpgrade() == UPGRADE_SCHEMA) {
          log.fatal(
              "SAK-11204: =============================================================================");
          log.fatal(
              "SAK-11204: Database Upgrade for SAK-11204 Failed, you must investigate and fix before");
          log.fatal(
              "SAK-11204: continuuing. I attempted to upgrade the schema but this appears to hav failed. You must");
          log.fatal(
              "SAK-11204: ensure that the columns RANGE_START(BIGINT) and RANGE_END(BIGINT) are present in CALENDAR_EVENT");
          log.fatal("SAK-11204: and there are indexes on both of the columns.");
          log.fatal("SAK-11204: Thank you ");
          log.fatal(
              "SAK-11204: =============================================================================");
          System.exit(-10);
        }
      } else {
        log.fatal(
            "SAK-11204: =============================================================================");
        log.fatal(
            "SAK-11204: Database Upgrade for SAK-11204 Failed, you must investigate and fix before");
        log.fatal(
            "SAK-11204: continuuing. AutoDDL was OFF, so I could not change the database schema. You must");
        log.fatal(
            "SAK-11204: ensure that the columns RANGE_START(BIGINT) and RANGE_END(BIGINT) are present in CALENDAR_EVENT");
        log.fatal("SAK-11204: and there are indexes on both of the columns.");
        log.fatal("SAK-11204: Thank you ");
        log.fatal(
            "SAK-11204: =============================================================================");
        System.exit(-10);
      }
      log.info("SAK-11204: Schema Update Sucessfull ");
    }
    boolean forceUpgrade = serverConfigurationService.getBoolean("sak11204.forceupgrade", false);
    if (upgrade == MIGRATE || forceUpgrade) {
      // get a list of channels
      // for each channel get a list of events
      // for each event save
      // do this all at the storage layer so that we dont change the
      // calendars
      List<Calendar> calendars = storage.getCalendars();
      int i = 1;
      for (Iterator<Calendar> icalendars = calendars.iterator(); icalendars.hasNext(); ) {
        log.info("SAK-11204: Converting Calendar " + i + " of " + calendars.size());
        i++;
        Calendar calendar = icalendars.next();
        List<BaseCalendarEventEdit> levents = storage.getEvents(calendar);
        for (Iterator<BaseCalendarEventEdit> ievents = levents.iterator(); ievents.hasNext(); ) {
          BaseCalendarEventEdit event = ievents.next();
          event.activate();
          storage.commitEvent(calendar, event);
        }
      }
    }
    log.info("SAK-11204: Calendar Conversion Complete ");
    if (forceUpgrade) {
      log.warn(
          "SAK-11204: =========================================================================================================  ");
      log.warn(
          "SAK-11204: This Conversion was forced, please ensure that you remove sak11204.forceupgrade from sakai.properties ");
      log.warn(
          "SAK-11204: If you do not remove sak11204.forceupgrade from sakai.properties this conversion will be performed ");
      log.warn(
          "SAK-11204: every time you start this instance of sakai, and it will take the same ammount of time ");
      log.warn(
          "SAK-11204: =========================================================================================================  ");
    }
  }
Esempio n. 17
0
  public static boolean transact(Runnable callback, String tag) {
    org.sakaiproject.db.api.SqlService service = getInstance();
    if (service == null) return false;

    return service.transact(callback, tag);
  }
Esempio n. 18
0
  public static void returnConnection(java.sql.Connection param0) {
    org.sakaiproject.db.api.SqlService service = getInstance();
    if (service == null) return;

    service.returnConnection(param0);
  }
Esempio n. 19
0
  public static java.sql.Connection borrowConnection() throws java.sql.SQLException {
    org.sakaiproject.db.api.SqlService service = getInstance();
    if (service == null) return null;

    return service.borrowConnection();
  }
Esempio n. 20
0
  public static String getBooleanConstant(boolean value) {
    org.sakaiproject.db.api.SqlService service = getInstance();
    if (service == null) return null;

    return service.getBooleanConstant(value);
  }
Esempio n. 21
0
  public static Long getNextSequence(String tableName, java.sql.Connection conn) {
    org.sakaiproject.db.api.SqlService service = getInstance();
    if (service == null) return null;

    return service.getNextSequence(tableName, conn);
  }
Esempio n. 22
0
  public static java.lang.String getVendor() {
    org.sakaiproject.db.api.SqlService service = getInstance();
    if (service == null) return null;

    return service.getVendor();
  }
Esempio n. 23
0
  public static java.util.GregorianCalendar getCal() {
    org.sakaiproject.db.api.SqlService service = getInstance();
    if (service == null) return null;

    return service.getCal();
  }
  /** fill in the draft and owner db fields */
  protected void convertToDraft() {
    M_log.info("convertToDraft");

    try {
      // get a connection
      final Connection connection = m_sqlService.borrowConnection();
      boolean wasCommit = connection.getAutoCommit();
      connection.setAutoCommit(false);

      // read all message records that need conversion
      String sql =
          "select CHANNEL_ID, MESSAGE_ID, XML from " + m_rTableName /* + " where OWNER is null" */;
      m_sqlService.dbRead(
          connection,
          sql,
          null,
          new SqlReader() {
            private int count = 0;

            public Object readSqlResultRecord(ResultSet result) {
              try {
                // create the Resource from the db xml
                String channelId = result.getString(1);
                String messageId = result.getString(2);
                String xml = result.getString(3);

                // read the xml
                Document doc = Xml.readDocumentFromString(xml);

                // verify the root element
                Element root = doc.getDocumentElement();
                if (!root.getTagName().equals("message")) {
                  M_log.warn(
                      "convertToDraft(): XML root element not message: " + root.getTagName());
                  return null;
                }
                Message m = new BaseMessageEdit(null, root);

                // pick up the fields
                String owner = m.getHeader().getFrom().getId();
                boolean draft = m.getHeader().getDraft();

                // update
                String update =
                    "update "
                        + m_rTableName
                        + " set OWNER = ?, DRAFT = ? where CHANNEL_ID = ? and MESSAGE_ID = ?";
                Object fields[] = new Object[4];
                fields[0] = owner;
                fields[1] = (draft ? "1" : "0");
                fields[2] = channelId;
                fields[3] = messageId;
                boolean ok = m_sqlService.dbWrite(connection, update, fields);

                if (!ok)
                  M_log.info(
                      "convertToDraft: channel: "
                          + channelId
                          + " message: "
                          + messageId
                          + " owner: "
                          + owner
                          + " draft: "
                          + draft
                          + " ok: "
                          + ok);

                count++;
                if (count % 100 == 0) {
                  M_log.info("convertToDraft: " + count);
                }
                return null;
              } catch (Throwable ignore) {
                return null;
              }
            }
          });

      connection.commit();
      connection.setAutoCommit(wasCommit);
      m_sqlService.returnConnection(connection);
    } catch (Throwable t) {
      M_log.warn("convertToDraft: failed: " + t);
    }

    M_log.info("convertToDraft: done");
  }
  /** fill in the pubview db fields */
  protected void convertToPubView() {
    M_log.info("convertToPubView");

    try {
      // get a connection
      final Connection connection = m_sqlService.borrowConnection();
      boolean wasCommit = connection.getAutoCommit();
      connection.setAutoCommit(false);

      // read all message records that need conversion
      String sql = "select CHANNEL_ID, MESSAGE_ID, XML, PUBVIEW from " + m_rTableName;
      m_sqlService.dbRead(
          connection,
          sql,
          null,
          new SqlReader() {
            public Object readSqlResultRecord(ResultSet result) {
              try {
                // create the Resource from the db xml
                String channelId = result.getString(1);
                String messageId = result.getString(2);
                String xml = result.getString(3);
                String pubViewSetting = result.getString(4);

                // read the xml
                Document doc = Xml.readDocumentFromString(xml);

                // verify the root element
                Element root = doc.getDocumentElement();
                if (!root.getTagName().equals("message")) {
                  M_log.warn(
                      "convertToPubView(): XML root element not message: " + root.getTagName());
                  return null;
                }
                BaseMessageEdit m = new BaseMessageEdit(null, root);

                // check if the record already has pub view set in the properties
                boolean pubview = false;
                if (m.getProperties().getProperty(ResourceProperties.PROP_PUBVIEW) != null) {
                  // pub view set in properties and in db indicates all is well with this one
                  if ("1".equals(pubViewSetting)) {
                    return null;
                  }

                  // having the property overrides any realm setting...
                  pubview = true;
                }

                // if we don't know pubview from the props, check the realm
                else {
                  // m.getReference() won't work cause we didn't give it its channel...
                  Reference channel = m_entityManager.newReference(channelId);
                  String ref = messageReference(channel.getContext(), channel.getId(), m.getId());
                  pubview = getPubView(ref);

                  // if the pubview setting matches the db, and it's false, all is well
                  if ((!pubview) && ("0".equals(pubViewSetting))) {
                    return null;
                  }
                }

                // update those that have no pubview
                if (!pubview) {
                  String update =
                      "update "
                          + m_rTableName
                          + " set PUBVIEW = ? where CHANNEL_ID = ? and MESSAGE_ID = ?";
                  Object fields[] = new Object[3];
                  fields[0] = "0";
                  fields[1] = channelId;
                  fields[2] = messageId;
                  boolean ok = m_sqlService.dbWrite(connection, update, fields);

                  if (!ok)
                    M_log.info(
                        "convertToPubView: channel: "
                            + channelId
                            + " message: "
                            + messageId
                            + " pubview: "
                            + pubview
                            + " ok: "
                            + ok);
                }

                // update those that have pubview
                else {
                  // set the property
                  m.getPropertiesEdit()
                      .addProperty(ResourceProperties.PROP_PUBVIEW, Boolean.TRUE.toString());

                  // form updated XML
                  doc = Xml.createDocument();
                  m.toXml(doc, new Stack());
                  xml = Xml.writeDocumentToString(doc);

                  String update =
                      "update "
                          + m_rTableName
                          + " set PUBVIEW = ?, XML = ? where CHANNEL_ID = ? and MESSAGE_ID = ?";
                  Object fields[] = new Object[4];
                  fields[0] = "1";
                  fields[1] = xml;
                  fields[2] = channelId;
                  fields[3] = messageId;
                  boolean ok = m_sqlService.dbWrite(connection, update, fields);

                  if (!ok)
                    M_log.info(
                        "convertToPubView: channel: "
                            + channelId
                            + " message: "
                            + messageId
                            + " pubview: "
                            + pubview
                            + " ok: "
                            + ok);
                }

                return null;
              } catch (Throwable ignore) {
                return null;
              }
            }
          });

      connection.commit();
      connection.setAutoCommit(wasCommit);
      m_sqlService.returnConnection(connection);
    } catch (Throwable t) {
      M_log.warn("convertToPubView: failed: " + t);
    }

    M_log.info("convertToPubView: done");
  }
Esempio n. 26
0
  public static void dbReadBlobAndUpdate(java.lang.String param0, byte[] param1) {
    org.sakaiproject.db.api.SqlService service = getInstance();
    if (service == null) return;

    service.dbReadBlobAndUpdate(param0, param1);
  }
  public void init() {
    try {
      qrtzProperties = initQuartzConfiguration();

      qrtzProperties.setProperty("org.quartz.scheduler.instanceId", serverId);

      // note: becuase job classes are jarred , it is impossible to iterate
      // through a directory by calling listFiles on a file object.
      // Therefore, we need the class list list from spring.

      // find quartz jobs from specified 'qrtzJobs' and verify they
      // that these jobs implement the Job interface
      Iterator<String> qrtzJobsIterator = qrtzJobs.iterator();
      while (qrtzJobsIterator.hasNext()) {
        String className = (String) qrtzJobsIterator.next();
        Class cl = null;
        try {
          cl = Class.forName(className);
        } catch (ClassNotFoundException e) {
          LOG.warn("Could not locate class: " + className + " on classpath");
        }
        if (cl != null) {
          // check that each class implements the Job interface
          if (doesImplementJobInterface(cl)) {
            qrtzQualifiedJobs.put(cl.getName(), cl.getName());
          } else {
            LOG.warn("Class: " + className + " does not implement quartz Job interface");
          }
        }
      }
      // run ddl
      if (autoDdl.booleanValue()) {
        try {
          sqlService.ddl(this.getClass().getClassLoader(), "quartz2");
        } catch (Throwable t) {
          LOG.warn(this + ".init(): ", t);
        }
      }

      boolean isInitialStartup = isInitialStartup(sqlService);
      if (isInitialStartup && autoDdl.booleanValue()) {
        LOG.info("Performing initial population of the Quartz tables.");
        sqlService.ddl(this.getClass().getClassLoader(), "init_locks2");
      }
      /*
        Determine whether or not to load the jobs defined in the initialJobSchedules list. These jobs will be loaded
        under the following conditions:
           1) the server configuration property "scheduler.loadjobs" is "true"
           2) "scheduler.loadjobs" is "init" and this is the first startup for the scheduler (eg. this is a new Sakai instance)
        "scheduler.loadjobs" is set to "init" by default
      */
      String loadJobs = serverConfigurationService.getString(SCHEDULER_LOADJOBS, "init").trim();

      List<SpringInitialJobSchedule> initSchedules = getInitialJobSchedules();

      boolean loadInitSchedules =
          (initSchedules != null)
              && (initSchedules.size() > 0)
              && (("init".equalsIgnoreCase(loadJobs) && isInitialStartup)
                  || "true".equalsIgnoreCase(loadJobs));

      if (loadInitSchedules) LOG.debug("Preconfigured jobs will be loaded");
      else LOG.debug("Preconfigured jobs will not be loaded");

      // start scheduler and load jobs
      schedFactory = new StdSchedulerFactory(qrtzProperties);
      scheduler = schedFactory.getScheduler();

      // loop through persisted jobs removing both the job and associated
      // triggers for jobs where the associated job class is not found
      Set<JobKey> jobKeys =
          scheduler.getJobKeys(GroupMatcher.jobGroupEquals(Scheduler.DEFAULT_GROUP));
      for (JobKey key : jobKeys) {
        try {
          JobDetail detail = scheduler.getJobDetail(key);
          String bean = detail.getJobDataMap().getString(JobBeanWrapper.SPRING_BEAN_NAME);
          Job job = (Job) ComponentManager.get(bean);
          if (job == null) {
            LOG.warn("scheduler cannot load class for persistent job:" + key);
            scheduler.deleteJob(key);
            LOG.warn("deleted persistent job:" + key);
          }
        } catch (SchedulerException e) {
          LOG.warn("scheduler cannot load class for persistent job:" + key);
          scheduler.deleteJob(key);
          LOG.warn("deleted persistent job:" + key);
        }
      }

      for (TriggerListener tListener : globalTriggerListeners) {
        scheduler.getListenerManager().addTriggerListener(tListener);
      }

      for (JobListener jListener : globalJobListeners) {
        scheduler.getListenerManager().addJobListener(jListener);
      }

      if (loadInitSchedules) {
        LOG.debug("Loading preconfigured jobs");
        loadInitialSchedules();
      }

      // scheduler.addGlobalTriggerListener(globalTriggerListener);
      if (isStartScheduler()) {
        scheduler.start();
      } else {
        LOG.info("Scheduler Not Started, startScheduler=false");
      }
    } catch (Exception e) {
      LOG.error("Failed to start scheduler.", e);
      throw new IllegalStateException("Scheduler cannot start!", e);
    }
  }
Esempio n. 28
0
  public static boolean dbWrite(java.lang.String param0, java.lang.Object[] param1) {
    org.sakaiproject.db.api.SqlService service = getInstance();
    if (service == null) return false;

    return service.dbWrite(param0, param1);
  }