@Override public void handleUploadSync(long sserverId) { HostVO storageHost = _serverDao.findById(sserverId); if (storageHost == null) { s_logger.warn("Huh? Agent id " + sserverId + " does not correspond to a row in hosts table?"); return; } s_logger.debug("Handling upload sserverId " + sserverId); List<UploadVO> uploadsInProgress = new ArrayList<UploadVO>(); uploadsInProgress.addAll(_uploadDao.listByHostAndUploadStatus(sserverId, UploadVO.Status.UPLOAD_IN_PROGRESS)); uploadsInProgress.addAll(_uploadDao.listByHostAndUploadStatus(sserverId, UploadVO.Status.COPY_IN_PROGRESS)); if (uploadsInProgress.size() > 0) { for (UploadVO uploadJob : uploadsInProgress) { uploadJob.setUploadState(UploadVO.Status.UPLOAD_ERROR); uploadJob.setErrorString("Could not complete the upload."); uploadJob.setLastUpdated(new Date()); _uploadDao.update(uploadJob.getId(), uploadJob); } } }
vo.setLastUpdated(new Date()); vo.setUploadUrl(uploadUrl); _uploadDao.update(upload.getId(), vo); return _uploadDao.findById(upload.getId(), true); } else { // ssvm publicip changed return null; vo.setUploadUrl(extractURL); vo.setUploadState(Status.DOWNLOAD_URL_CREATED); _uploadDao.update(uploadTemplateObj.getId(), vo); success = true; return _uploadDao.findById(uploadTemplateObj.getId(), true); } finally { if (!success) { UploadVO uploadJob = _uploadDao.createForUpdate(uploadTemplateObj.getId()); uploadJob.setLastUpdated(new Date()); uploadJob.setErrorString(errorString); uploadJob.setUploadState(Status.ERROR); _uploadDao.update(uploadTemplateObj.getId(), uploadJob);
ul.scheduleStatusCheck(RequestType.GET_OR_RESTART); return uploadTemplateObj.getId();
@Override public void extractVolume(UploadVO uploadVolumeObj, DataStore secStore, VolumeVO volume, String url, Long dataCenterId, String installPath, long eventId, long asyncJobId, AsyncJobManager asyncMgr) { uploadVolumeObj.setUploadState(Upload.Status.NOT_UPLOADED); _uploadDao.update(uploadVolumeObj.getId(), uploadVolumeObj); start(); UploadCommand ucmd = new UploadCommand(url, volume.getId(), volume.getSize(), installPath, Type.VOLUME); UploadListener ul = new UploadListener(secStore, _timer, _uploadDao, uploadVolumeObj, this, ucmd, volume.getAccountId(), volume.getName(), Type.VOLUME, eventId, asyncJobId, asyncMgr); _listenerMap.put(uploadVolumeObj, ul); try { EndPoint ep = _epSelector.select(secStore); if (ep == null) { String errMsg = "No remote endpoint to send command, check if host or ssvm is down?"; s_logger.error(errMsg); return; } ep.sendMessageAsync(ucmd, new UploadListener.Callback(ep.getId(), ul)); } catch (Exception e) { s_logger.warn("Unable to start upload of volume " + volume.getName() + " from " + secStore.getName() + " to " + url, e); ul.setDisconnected(); ul.scheduleStatusCheck(RequestType.GET_OR_RESTART); } }
public void cleanupStorage() { final int EXTRACT_URL_LIFE_LIMIT_IN_SECONDS = _urlExpirationInterval; List<UploadVO> extractJobs = _uploadDao.listByModeAndStatus(Mode.HTTP_DOWNLOAD, Status.DOWNLOAD_URL_CREATED); for (UploadVO extractJob : extractJobs) { if (getTimeDiff(extractJob.getLastUpdated()) > EXTRACT_URL_LIFE_LIMIT_IN_SECONDS) { String path = extractJob.getInstallPath(); DataStore secStore = storeMgr.getDataStore(extractJob.getDataStoreId(), DataStoreRole.Image); // Would delete the symlink for the Type and if Type == VOLUME then also the volume DeleteEntityDownloadURLCommand cmd = new DeleteEntityDownloadURLCommand(path, extractJob.getType(), extractJob.getUploadUrl(), ((ImageStoreVO)secStore).getParent()); EndPoint ep = _epSelector.select(secStore); if (ep == null) { s_logger.warn("UploadMonitor cleanup: There is no secondary storage VM for secondary storage host " + extractJob.getDataStoreId()); continue; //TODO: why continue? why not break? } if (s_logger.isDebugEnabled()) { s_logger.debug("UploadMonitor cleanup: Sending deletion of extract URL " + extractJob.getUploadUrl() + " to ssvm " + ep.getHostAddr()); } Answer ans = ep.sendMessage(cmd); if (ans != null && ans.getResult()) { _uploadDao.remove(extractJob.getId()); } else { s_logger.warn("UploadMonitor cleanup: Unable to delete the link for " + extractJob.getType() + " id=" + extractJob.getTypeId() + " url=" + extractJob.getUploadUrl() + " on ssvm " + ep.getHostAddr()); } } } }
uploadJob.setUploadState(Status.DOWNLOAD_URL_NOT_CREATED); uploadJob.setLastUpdated(new Date()); _uploadDao.update(uploadJob.getId(), uploadJob);
public UploadListener(DataStore host, Timer timerInput, UploadDao uploadDao, UploadVO uploadObj, UploadMonitorImpl uploadMonitor, UploadCommand cmd, Long accountId, String typeName, Type type, long eventId, long asyncJobId, AsyncJobManager asyncMgr) { sserver = host; this.uploadDao = uploadDao; this.uploadMonitor = uploadMonitor; this.cmd = cmd; uploadId = uploadObj.getId(); this.accountId = accountId; this.typeName = typeName; this.type = type; initStateMachine(); currState = getState(Status.NOT_UPLOADED.toString()); timer = timerInput; timeoutTask = new TimeoutTask(this); timer.schedule(timeoutTask, 3 * STATUS_POLL_INTERVAL); this.eventId = eventId; this.asyncJobId = asyncJobId; this.asyncMgr = asyncMgr; String extractId = null; if (type == Type.VOLUME) { extractId = ApiDBUtils.findVolumeById(uploadObj.getTypeId()).getUuid(); } else { extractId = ApiDBUtils.findTemplateById(uploadObj.getTypeId()).getUuid(); } resultObj = new ExtractResponse(extractId, typeName, ApiDBUtils.findAccountById(accountId).getUuid(), Status.NOT_UPLOADED.toString(), ApiDBUtils.findUploadById(uploadId) .getUuid()); resultObj.setResponseName(responseNameMap.get(type.toString())); updateDatabase(Status.NOT_UPLOADED, cmd.getUrl(), ""); }