Exemplo n.º 1
0
  @Override
  public Pair<User, Account> findUserAccountByApiKey(String apiKey) {
    Transaction txn = Transaction.currentTxn();
    PreparedStatement pstmt = null;
    Pair<User, Account> userAcctPair = null;
    try {
      String sql = FIND_USER_ACCOUNT_BY_API_KEY;
      pstmt = txn.prepareAutoCloseStatement(sql);
      pstmt.setString(1, apiKey);
      ResultSet rs = pstmt.executeQuery();
      // TODO:  make sure we don't have more than 1 result?  ApiKey had better be unique
      if (rs.next()) {
        User u = new UserVO(rs.getLong(1));
        u.setUsername(rs.getString(2));
        u.setAccountId(rs.getLong(3));
        u.setSecretKey(DBEncryptionUtil.decrypt(rs.getString(4)));
        u.setState(State.valueOf(rs.getString(5)));

        AccountVO a = new AccountVO(rs.getLong(6));
        a.setAccountName(rs.getString(7));
        a.setType(rs.getShort(8));
        a.setDomainId(rs.getLong(9));
        a.setState(State.valueOf(rs.getString(10)));

        userAcctPair = new Pair<User, Account>(u, a);
      }
    } catch (Exception e) {
      s_logger.warn("Exception finding user/acct by api key: " + apiKey, e);
    }
    return userAcctPair;
  }
Exemplo n.º 2
0
  @Override
  public void updateAccounts(List<AccountVO> accounts) {
    Transaction txn = Transaction.currentTxn();
    try {
      txn.start();
      String sql = UPDATE_ACCOUNT;
      PreparedStatement pstmt = null;
      pstmt =
          txn.prepareAutoCloseStatement(
              sql); // in reality I just want CLOUD_USAGE dataSource connection
      for (AccountVO acct : accounts) {
        pstmt.setString(1, acct.getAccountName());

        Date removed = acct.getRemoved();
        if (removed == null) {
          pstmt.setString(2, null);
        } else {
          pstmt.setString(
              2, DateUtil.getDateDisplayString(TimeZone.getTimeZone("GMT"), acct.getRemoved()));
        }

        pstmt.setLong(3, acct.getId());
        pstmt.addBatch();
      }
      pstmt.executeBatch();
      txn.commit();
    } catch (Exception ex) {
      txn.rollback();
      s_logger.error("error saving account to cloud_usage db", ex);
      throw new CloudRuntimeException(ex.getMessage());
    }
  }
  @Override
  public void createAsync(
      DataStore dataStore,
      DataObject dataObject,
      AsyncCompletionCallback<CreateCmdResult> callback) {
    String iqn = null;
    String errMsg = null;

    if (dataObject.getType() == DataObjectType.VOLUME) {
      VolumeInfo volumeInfo = (VolumeInfo) dataObject;
      AccountVO account = _accountDao.findById(volumeInfo.getAccountId());
      String sfAccountName = getSfAccountName(account.getUuid(), account.getAccountId());

      long storagePoolId = dataStore.getId();
      SolidFireConnection sfConnection = getSolidFireConnection(storagePoolId);

      if (!sfAccountExists(sfAccountName, sfConnection)) {
        SolidFireUtil.SolidFireAccount sfAccount =
            createSolidFireAccount(sfAccountName, sfConnection);

        updateCsDbWithAccountInfo(account.getId(), sfAccount);
      }

      SolidFireUtil.SolidFireVolume sfVolume = createSolidFireVolume(volumeInfo, sfConnection);

      iqn = sfVolume.getIqn();

      VolumeVO volume = this._volumeDao.findById(volumeInfo.getId());

      volume.set_iScsiName(iqn);
      volume.setFolder(String.valueOf(sfVolume.getId()));
      volume.setPoolType(StoragePoolType.IscsiLUN);
      volume.setPoolId(storagePoolId);

      _volumeDao.update(volume.getId(), volume);

      StoragePoolVO storagePool = _storagePoolDao.findById(dataStore.getId());

      long capacityBytes = storagePool.getCapacityBytes();
      long usedBytes = storagePool.getUsedBytes();

      usedBytes += volumeInfo.getSize();

      storagePool.setUsedBytes(usedBytes > capacityBytes ? capacityBytes : usedBytes);

      _storagePoolDao.update(storagePoolId, storagePool);
    } else {
      errMsg = "Invalid DataObjectType (" + dataObject.getType() + ") passed to createAsync";
    }

    // path = iqn
    // size is pulled from DataObject instance, if errMsg is null
    CreateCmdResult result = new CreateCmdResult(iqn, new Answer(null, errMsg == null, errMsg));

    result.setResult(errMsg);

    callback.complete(result);
  }
  private void initializeForTest(VirtualMachineProfileImpl vmProfile, DataCenterDeployment plan) {
    DataCenterVO mockDc = mock(DataCenterVO.class);
    VMInstanceVO vm = mock(VMInstanceVO.class);
    UserVmVO userVm = mock(UserVmVO.class);
    ServiceOfferingVO offering = mock(ServiceOfferingVO.class);

    AccountVO account = mock(AccountVO.class);
    when(account.getId()).thenReturn(accountId);
    when(account.getAccountId()).thenReturn(accountId);
    when(vmProfile.getOwner()).thenReturn(account);
    when(vmProfile.getVirtualMachine()).thenReturn(vm);
    when(vmProfile.getId()).thenReturn(12L);
    when(vmDao.findById(12L)).thenReturn(userVm);
    when(userVm.getAccountId()).thenReturn(accountId);

    when(vm.getDataCenterId()).thenReturn(dataCenterId);
    when(dcDao.findById(1L)).thenReturn(mockDc);
    when(plan.getDataCenterId()).thenReturn(dataCenterId);
    when(plan.getClusterId()).thenReturn(null);
    when(plan.getPodId()).thenReturn(null);
    when(configDao.getValue(anyString())).thenReturn("false").thenReturn("CPU");

    // Mock offering details.
    when(vmProfile.getServiceOffering()).thenReturn(offering);
    when(offering.getId()).thenReturn(offeringId);
    when(vmProfile.getServiceOfferingId()).thenReturn(offeringId);
    when(offering.getCpu()).thenReturn(noOfCpusInOffering);
    when(offering.getSpeed()).thenReturn(cpuSpeedInOffering);
    when(offering.getRamSize()).thenReturn(ramInOffering);

    List<Long> clustersWithEnoughCapacity = new ArrayList<Long>();
    clustersWithEnoughCapacity.add(1L);
    clustersWithEnoughCapacity.add(2L);
    clustersWithEnoughCapacity.add(3L);
    when(capacityDao.listClustersInZoneOrPodByHostCapacities(
            dataCenterId,
            noOfCpusInOffering * cpuSpeedInOffering,
            ramInOffering * 1024L * 1024L,
            CapacityVO.CAPACITY_TYPE_CPU,
            true))
        .thenReturn(clustersWithEnoughCapacity);

    Map<Long, Double> clusterCapacityMap = new HashMap<Long, Double>();
    clusterCapacityMap.put(1L, 2048D);
    clusterCapacityMap.put(2L, 2048D);
    clusterCapacityMap.put(3L, 2048D);
    Pair<List<Long>, Map<Long, Double>> clustersOrderedByCapacity =
        new Pair<List<Long>, Map<Long, Double>>(clustersWithEnoughCapacity, clusterCapacityMap);
    when(capacityDao.orderClustersByAggregateCapacity(
            dataCenterId, CapacityVO.CAPACITY_TYPE_CPU, true))
        .thenReturn(clustersOrderedByCapacity);

    List<Long> disabledClusters = new ArrayList<Long>();
    List<Long> clustersWithDisabledPods = new ArrayList<Long>();
    when(clusterDao.listDisabledClusters(dataCenterId, null)).thenReturn(disabledClusters);
    when(clusterDao.listClustersWithDisabledPods(dataCenterId))
        .thenReturn(clustersWithDisabledPods);
  }
Exemplo n.º 5
0
 @Override
 public void markForCleanup(long accountId) {
   AccountVO account = findByIdIncludingRemoved(accountId);
   if (!account.getNeedsCleanup()) {
     account.setNeedsCleanup(true);
     if (!update(accountId, account)) {
       s_logger.warn("Failed to mark account id=" + accountId + " for cleanup");
     }
   }
 }
  private static void createUsageRecord(
      int type,
      long runningTime,
      Date startDate,
      Date endDate,
      AccountVO account,
      long pfId,
      long zoneId) {
    // Our smallest increment is hourly for now
    if (s_logger.isDebugEnabled()) {
      s_logger.debug("Total running time " + runningTime + "ms");
    }

    float usage = runningTime / 1000f / 60f / 60f;

    DecimalFormat dFormat = new DecimalFormat("#.######");
    String usageDisplay = dFormat.format(usage);

    if (s_logger.isDebugEnabled()) {
      s_logger.debug(
          "Creating usage record for port forwarding rule: "
              + pfId
              + ", usage: "
              + usageDisplay
              + ", startDate: "
              + startDate
              + ", endDate: "
              + endDate
              + ", for account: "
              + account.getId());
    }

    // Create the usage record
    String usageDesc = "Port Forwarding Rule: " + pfId + " usage time";

    // ToDo: get zone id
    UsageVO usageRecord =
        new UsageVO(
            zoneId,
            account.getId(),
            account.getDomainId(),
            usageDesc,
            usageDisplay + " Hrs",
            type,
            new Double(usage),
            null,
            null,
            null,
            null,
            pfId,
            null,
            startDate,
            endDate);
    s_usageDao.persist(usageRecord);
  }
Exemplo n.º 7
0
  private static void createUsageRecord(
      long zoneId,
      long runningTime,
      Date startDate,
      Date endDate,
      AccountVO account,
      long IpId,
      String IPAddress,
      boolean isSourceNat) {
    if (s_logger.isDebugEnabled()) {
      s_logger.debug("Total usage time " + runningTime + "ms");
    }

    float usage = runningTime / 1000f / 60f / 60f;

    DecimalFormat dFormat = new DecimalFormat("#.######");
    String usageDisplay = dFormat.format(usage);

    if (s_logger.isDebugEnabled()) {
      s_logger.debug(
          "Creating IP usage record with id: "
              + IpId
              + ", usage: "
              + usageDisplay
              + ", startDate: "
              + startDate
              + ", endDate: "
              + endDate
              + ", for account: "
              + account.getId());
    }

    String usageDesc = "IPAddress: " + IPAddress;

    // Create the usage record

    UsageVO usageRecord =
        new UsageVO(
            zoneId,
            account.getAccountId(),
            account.getDomainId(),
            usageDesc,
            usageDisplay + " Hrs",
            UsageTypes.IP_ADDRESS,
            new Double(usage),
            null,
            null,
            null,
            null,
            IpId,
            startDate,
            endDate,
            (isSourceNat ? "SourceNat" : ""));
    m_usageDao.persist(usageRecord);
  }
  @Override
  public void deleteAsync(
      DataStore dataStore, DataObject dataObject, AsyncCompletionCallback<CommandResult> callback) {
    String errMsg = null;

    if (dataObject.getType() == DataObjectType.VOLUME) {
      VolumeInfo volumeInfo = (VolumeInfo) dataObject;
      AccountVO account = _accountDao.findById(volumeInfo.getAccountId());
      AccountDetailVO accountDetails =
          _accountDetailsDao.findDetail(account.getAccountId(), SolidFireUtil.ACCOUNT_ID);
      long sfAccountId = Long.parseLong(accountDetails.getValue());

      long storagePoolId = dataStore.getId();
      SolidFireConnection sfConnection = getSolidFireConnection(storagePoolId);

      deleteSolidFireVolume(volumeInfo, sfConnection);

      _volumeDao.deleteVolumesByInstance(volumeInfo.getId());

      //            if (!sfAccountHasVolume(sfAccountId, sfConnection)) {
      //                // delete the account from the SolidFire SAN
      //                deleteSolidFireAccount(sfAccountId, sfConnection);
      //
      //                // delete the info in the account_details table
      //                // that's related to the SolidFire account
      //                _accountDetailsDao.deleteDetails(account.getAccountId());
      //            }

      StoragePoolVO storagePool = _storagePoolDao.findById(storagePoolId);

      long usedBytes = storagePool.getUsedBytes();

      usedBytes -= volumeInfo.getSize();

      storagePool.setUsedBytes(usedBytes < 0 ? 0 : usedBytes);

      _storagePoolDao.update(storagePoolId, storagePool);
    } else {
      errMsg = "Invalid DataObjectType (" + dataObject.getType() + ") passed to deleteAsync";
    }

    CommandResult result = new CommandResult();

    result.setResult(errMsg);

    callback.complete(result);
  }
  @Before
  public void setUp() {
    ComponentContext.initComponentsLifeCycle();

    acct.setType(Account.ACCOUNT_TYPE_NORMAL);
    acct.setAccountName("user1");
    acct.setDomainId(domainId);
    acct.setId(accountId);

    UserVO user =
        new UserVO(
            1,
            "testuser",
            "password",
            "firstname",
            "lastName",
            "email",
            "timezone",
            UUID.randomUUID().toString());

    CallContext.register(user, acct);
  }
  public static boolean parse(AccountVO account, Date startDate, Date endDate) {
    if (s_logger.isDebugEnabled()) {
      s_logger.debug("Parsing all PortForwardingRule usage events for account: " + account.getId());
    }
    if ((endDate == null) || endDate.after(new Date())) {
      endDate = new Date();
    }

    // - query usage_volume table with the following criteria:
    //     - look for an entry for accountId with start date in the given range
    //     - look for an entry for accountId with end date in the given range
    //     - look for an entry for accountId with end date null (currently running vm or owned IP)
    //     - look for an entry for accountId with start date before given range *and* end date after
    // given range
    List<UsagePortForwardingRuleVO> usagePFs =
        s_usagePFRuleDao.getUsageRecords(
            account.getId(), account.getDomainId(), startDate, endDate, false, 0);

    if (usagePFs.isEmpty()) {
      s_logger.debug("No port forwarding usage events for this period");
      return true;
    }

    // This map has both the running time *and* the usage amount.
    Map<String, Pair<Long, Long>> usageMap = new HashMap<String, Pair<Long, Long>>();
    Map<String, PFInfo> pfMap = new HashMap<String, PFInfo>();

    // loop through all the port forwarding rule, create a usage record for each
    for (UsagePortForwardingRuleVO usagePF : usagePFs) {
      long pfId = usagePF.getId();
      String key = "" + pfId;

      pfMap.put(key, new PFInfo(pfId, usagePF.getZoneId()));

      Date pfCreateDate = usagePF.getCreated();
      Date pfDeleteDate = usagePF.getDeleted();

      if ((pfDeleteDate == null) || pfDeleteDate.after(endDate)) {
        pfDeleteDate = endDate;
      }

      // clip the start date to the beginning of our aggregation range if the vm has been running
      // for a while
      if (pfCreateDate.before(startDate)) {
        pfCreateDate = startDate;
      }

      if (pfCreateDate.after(endDate)) {
        // Ignore records created after endDate
        continue;
      }

      long currentDuration =
          (pfDeleteDate.getTime() - pfCreateDate.getTime())
              + 1; // make sure this is an inclusive check for milliseconds (i.e. use n - m + 1 to
                   // find total number of millis to charge)

      updatePFUsageData(usageMap, key, usagePF.getId(), currentDuration);
    }

    for (String pfIdKey : usageMap.keySet()) {
      Pair<Long, Long> sgtimeInfo = usageMap.get(pfIdKey);
      long useTime = sgtimeInfo.second().longValue();

      // Only create a usage record if we have a runningTime of bigger than zero.
      if (useTime > 0L) {
        PFInfo info = pfMap.get(pfIdKey);
        createUsageRecord(
            UsageTypes.PORT_FORWARDING_RULE,
            useTime,
            startDate,
            endDate,
            account,
            info.getId(),
            info.getZoneId());
      }
    }

    return true;
  }
Exemplo n.º 11
0
  public static boolean parse(AccountVO account, Date startDate, Date endDate) {
    if (s_logger.isDebugEnabled()) {
      s_logger.debug("Parsing all VMInstance usage events for account: " + account.getId());
    }
    if ((endDate == null) || endDate.after(new Date())) {
      endDate = new Date();
    }

    // - query usage_vm_instance table with the following criteria:
    //     - look for an entry for accountId with start date in the given range
    //     - look for an entry for accountId with end date in the given range
    //     - look for an entry for accountId with end date null (currently running vm or owned IP)
    //     - look for an entry for accountId with start date before given range *and* end date after
    // given range
    List<UsageVMInstanceVO> usageInstances =
        s_usageInstanceDao.getUsageRecords(account.getId(), startDate, endDate);
    // ToDo: Add domainID for getting usage records

    // This map has both the running time *and* the usage amount.
    Map<String, Pair<String, Long>> usageVMUptimeMap = new HashMap<String, Pair<String, Long>>();
    Map<String, Pair<String, Long>> allocatedVMMap = new HashMap<String, Pair<String, Long>>();

    Map<String, VMInfo> vmServiceOfferingMap = new HashMap<String, VMInfo>();

    // loop through all the usage instances, create a usage record for each
    for (UsageVMInstanceVO usageInstance : usageInstances) {
      long vmId = usageInstance.getVmInstanceId();
      long soId = usageInstance.getSerivceOfferingId();
      long zoneId = usageInstance.getZoneId();
      long tId = usageInstance.getTemplateId();
      int usageType = usageInstance.getUsageType();
      String key = vmId + "-" + soId + "-" + usageType;

      // store the info in the service offering map
      vmServiceOfferingMap.put(
          key, new VMInfo(vmId, zoneId, soId, tId, usageInstance.getHypervisorType()));

      Date vmStartDate = usageInstance.getStartDate();
      Date vmEndDate = usageInstance.getEndDate();

      if ((vmEndDate == null) || vmEndDate.after(endDate)) {
        vmEndDate = endDate;
      }

      // clip the start date to the beginning of our aggregation range if the vm has been running
      // for a while
      if (vmStartDate.before(startDate)) {
        vmStartDate = startDate;
      }

      long currentDuration =
          (vmEndDate.getTime() - vmStartDate.getTime())
              + 1; // make sure this is an inclusive check for milliseconds (i.e. use n - m + 1 to
                   // find total number of millis to charge)

      switch (usageType) {
        case UsageTypes.ALLOCATED_VM:
          updateVmUsageData(allocatedVMMap, key, usageInstance.getVmName(), currentDuration);
          break;
        case UsageTypes.RUNNING_VM:
          updateVmUsageData(usageVMUptimeMap, key, usageInstance.getVmName(), currentDuration);
          break;
      }
    }

    for (String vmIdKey : usageVMUptimeMap.keySet()) {
      Pair<String, Long> vmUptimeInfo = usageVMUptimeMap.get(vmIdKey);
      long runningTime = vmUptimeInfo.second().longValue();

      // Only create a usage record if we have a runningTime of bigger than zero.
      if (runningTime > 0L) {
        VMInfo info = vmServiceOfferingMap.get(vmIdKey);
        createUsageRecord(
            UsageTypes.RUNNING_VM,
            runningTime,
            startDate,
            endDate,
            account,
            info.getVirtualMachineId(),
            vmUptimeInfo.first(),
            info.getZoneId(),
            info.getServiceOfferingId(),
            info.getTemplateId(),
            info.getHypervisorType());
      }
    }

    for (String vmIdKey : allocatedVMMap.keySet()) {
      Pair<String, Long> vmAllocInfo = allocatedVMMap.get(vmIdKey);
      long allocatedTime = vmAllocInfo.second().longValue();

      // Only create a usage record if we have a runningTime of bigger than zero.
      if (allocatedTime > 0L) {
        VMInfo info = vmServiceOfferingMap.get(vmIdKey);
        createUsageRecord(
            UsageTypes.ALLOCATED_VM,
            allocatedTime,
            startDate,
            endDate,
            account,
            info.getVirtualMachineId(),
            vmAllocInfo.first(),
            info.getZoneId(),
            info.getServiceOfferingId(),
            info.getTemplateId(),
            info.getHypervisorType());
      }
    }

    return true;
  }
Exemplo n.º 12
0
  private static void createUsageRecord(
      int type,
      long runningTime,
      Date startDate,
      Date endDate,
      AccountVO account,
      long vmId,
      String vmName,
      long zoneId,
      long serviceOfferingId,
      long templateId,
      String hypervisorType) {
    // Our smallest increment is hourly for now
    if (s_logger.isDebugEnabled()) {
      s_logger.debug("Total running time " + runningTime + "ms");
    }

    float usage = runningTime / 1000f / 60f / 60f;

    DecimalFormat dFormat = new DecimalFormat("#.######");
    String usageDisplay = dFormat.format(usage);

    if (s_logger.isDebugEnabled()) {
      s_logger.debug(
          "Creating VM usage record for vm: "
              + vmName
              + ", type: "
              + type
              + ", usage: "
              + usageDisplay
              + ", startDate: "
              + startDate
              + ", endDate: "
              + endDate
              + ", for account: "
              + account.getId());
    }

    // Create the usage record
    String usageDesc = vmName;
    if (type == UsageTypes.ALLOCATED_VM) {
      usageDesc += " allocated";
    } else {
      usageDesc += " running time";
    }
    usageDesc += " (ServiceOffering: " + serviceOfferingId + ") (Template: " + templateId + ")";
    UsageVO usageRecord =
        new UsageVO(
            Long.valueOf(zoneId),
            account.getId(),
            account.getDomainId(),
            usageDesc,
            usageDisplay + " Hrs",
            type,
            new Double(usage),
            Long.valueOf(vmId),
            vmName,
            Long.valueOf(serviceOfferingId),
            Long.valueOf(templateId),
            Long.valueOf(vmId),
            startDate,
            endDate,
            hypervisorType);
    s_usageDao.persist(usageRecord);
  }
Exemplo n.º 13
0
  public static boolean parse(AccountVO account, Date startDate, Date endDate) {
    if (s_logger.isDebugEnabled()) {
      s_logger.debug("Parsing IP Address usage for account: " + account.getId());
    }
    if ((endDate == null) || endDate.after(new Date())) {
      endDate = new Date();
    }

    // - query usage_ip_address table with the following criteria:
    //     - look for an entry for accountId with start date in the given range
    //     - look for an entry for accountId with end date in the given range
    //     - look for an entry for accountId with end date null (currently running vm or owned IP)
    //     - look for an entry for accountId with start date before given range *and* end date after
    // given range
    List<UsageIPAddressVO> usageIPAddress =
        m_usageIPAddressDao.getUsageRecords(
            account.getId(), account.getDomainId(), startDate, endDate);

    if (usageIPAddress.isEmpty()) {
      s_logger.debug("No IP Address usage for this period");
      return true;
    }

    // This map has both the running time *and* the usage amount.
    Map<String, Pair<Long, Long>> usageMap = new HashMap<String, Pair<Long, Long>>();

    Map<String, IpInfo> IPMap = new HashMap<String, IpInfo>();

    // loop through all the usage IPs, create a usage record for each
    for (UsageIPAddressVO usageIp : usageIPAddress) {
      long IpId = usageIp.getId();

      String key = "" + IpId;

      // store the info in the IP map
      IPMap.put(
          key, new IpInfo(usageIp.getZoneId(), IpId, usageIp.getAddress(), usageIp.isSourceNat()));

      Date IpAssignDate = usageIp.getAssigned();
      Date IpReleaseDeleteDate = usageIp.getReleased();

      if ((IpReleaseDeleteDate == null) || IpReleaseDeleteDate.after(endDate)) {
        IpReleaseDeleteDate = endDate;
      }

      // clip the start date to the beginning of our aggregation range if the vm has been running
      // for a while
      if (IpAssignDate.before(startDate)) {
        IpAssignDate = startDate;
      }

      long currentDuration =
          (IpReleaseDeleteDate.getTime() - IpAssignDate.getTime())
              + 1; // make sure this is an inclusive check for milliseconds (i.e. use n - m + 1 to
                   // find total number of millis to charge)

      updateIpUsageData(usageMap, key, usageIp.getId(), currentDuration);
    }

    for (String ipIdKey : usageMap.keySet()) {
      Pair<Long, Long> ipTimeInfo = usageMap.get(ipIdKey);
      long useTime = ipTimeInfo.second().longValue();

      // Only create a usage record if we have a runningTime of bigger than zero.
      if (useTime > 0L) {
        IpInfo info = IPMap.get(ipIdKey);
        createUsageRecord(
            info.getZoneId(),
            useTime,
            startDate,
            endDate,
            account,
            info.getIpId(),
            info.getIPAddress(),
            info.isSourceNat());
      }
    }

    return true;
  }