diff --git a/lib/taskana-core/src/main/java/pro/taskana/BulkOperationResults.java b/lib/taskana-core/src/main/java/pro/taskana/BulkOperationResults.java index 56414bb21..1373387a7 100644 --- a/lib/taskana-core/src/main/java/pro/taskana/BulkOperationResults.java +++ b/lib/taskana-core/src/main/java/pro/taskana/BulkOperationResults.java @@ -18,7 +18,7 @@ import org.slf4j.LoggerFactory; */ public class BulkOperationResults { - private Map errorMap = new HashMap(); + private Map errorMap = new HashMap<>(); private static final Logger LOGGER = LoggerFactory.getLogger(BulkOperationResults.class); /** @@ -105,9 +105,11 @@ public class BulkOperationResults { * the other log */ public void addAllErrors(BulkOperationResults log) { - List failedIds = log.getFailedIds(); - for (K id : failedIds) { - addError(id, log.getErrorForId(id)); + if (log != null && log.containsErrors()) { + List failedIds = log.getFailedIds(); + for (K id : failedIds) { + addError(id, log.getErrorForId(id)); + } } } } diff --git a/lib/taskana-core/src/main/java/pro/taskana/TaskanaCallable.java b/lib/taskana-core/src/main/java/pro/taskana/TaskanaCallable.java new file mode 100644 index 000000000..7c5dacfe2 --- /dev/null +++ b/lib/taskana-core/src/main/java/pro/taskana/TaskanaCallable.java @@ -0,0 +1,14 @@ +package pro.taskana; + +/** + * represents a callable Object. + * + * @param + * the type of the returned objects. + * @author bbr + */ +@FunctionalInterface +public interface TaskanaCallable { + + T call(); +} diff --git a/lib/taskana-core/src/main/java/pro/taskana/TaskanaTransactionProvider.java b/lib/taskana-core/src/main/java/pro/taskana/TaskanaTransactionProvider.java new file mode 100644 index 000000000..57d87b1d9 --- /dev/null +++ b/lib/taskana-core/src/main/java/pro/taskana/TaskanaTransactionProvider.java @@ -0,0 +1,13 @@ +package pro.taskana; + +/** + * This class provides support for transactions. + * + * @author bbr + * @param + * the type of the returned objects. + */ +public interface TaskanaTransactionProvider { + + T executeInTransaction(TaskanaCallable action); +} diff --git a/lib/taskana-core/src/main/java/pro/taskana/configuration/TaskanaEngineConfiguration.java b/lib/taskana-core/src/main/java/pro/taskana/configuration/TaskanaEngineConfiguration.java index 0e8086bd4..01f6cc445 100644 --- a/lib/taskana-core/src/main/java/pro/taskana/configuration/TaskanaEngineConfiguration.java +++ b/lib/taskana-core/src/main/java/pro/taskana/configuration/TaskanaEngineConfiguration.java @@ -44,6 +44,8 @@ public class TaskanaEngineConfiguration { private static final String H2_DRIVER = "org.h2.Driver"; private static final String TASKANA_PROPERTIES = "/taskana.properties"; private static final String TASKANA_ROLES_SEPARATOR = "|"; + private static final String TASKANA_JOB_TASK_UPDATES_PER_TRANSACTION = "taskana.job.max.task.updates.per.transaction"; + private static final String TASKANA_JOB_RETRIES_FOR_FAILED_TASK_UPDATES = "taskana.job.max.retries.for.failed.task.updates"; private static final String TASKANA_DOMAINS_PROPERTY = "taskana.domains"; private static final String TASKANA_CLASSIFICATION_TYPES_PROPERTY = "taskana.classification.types"; private static final String TASKANA_CLASSIFICATION_CATEGORIES_PROPERTY = "taskana.classification.categories"; @@ -68,6 +70,10 @@ public class TaskanaEngineConfiguration { private boolean germanPublicHolidaysEnabled; private List customHolidays; + // Properties for task-update Job execution on classification change + private int maxNumberOfTaskUpdatesPerTransaction; + private int maxNumberOfRetriesOfFailedTaskUpdates; + // List of configured domain names protected List domains = new ArrayList(); @@ -117,11 +123,32 @@ public class TaskanaEngineConfiguration { LOGGER.debug("Reading taskana configuration from {} with role separator {}", propertiesFile, rolesSeparator); Properties props = readPropertiesFromFile(propertiesFile); initTaskanaRoles(props, rolesSeparator); + initJobParameters(props); initDomains(props); initClassificationTypes(props); initClassificationCategories(props); } + private void initJobParameters(Properties props) { + String taskUpdates = props.getProperty(TASKANA_JOB_TASK_UPDATES_PER_TRANSACTION); + if (taskUpdates == null || taskUpdates.isEmpty()) { + maxNumberOfTaskUpdatesPerTransaction = 50; + } else { + maxNumberOfTaskUpdatesPerTransaction = Integer.parseInt(taskUpdates); + } + + String retries = props.getProperty(TASKANA_JOB_RETRIES_FOR_FAILED_TASK_UPDATES); + if (retries == null || retries.isEmpty()) { + maxNumberOfRetriesOfFailedTaskUpdates = 3; + } else { + maxNumberOfRetriesOfFailedTaskUpdates = Integer.parseInt(retries); + } + + LOGGER.debug( + "Configured number of task updates per transaction: {}, number of retries of failed task updates: {}", + maxNumberOfTaskUpdatesPerTransaction, maxNumberOfRetriesOfFailedTaskUpdates); + } + private void initDomains(Properties props) { String domainNames = props.getProperty(TASKANA_DOMAINS_PROPERTY); if (domainNames != null && !domainNames.isEmpty()) { @@ -173,6 +200,7 @@ public class TaskanaEngineConfiguration { if (key != null) { roleMap.put(key, roleMemberSet); } else { + LOGGER.error("Internal System error when processing role property {}.", propertyName); throw new SystemException( "Internal System error when processing role property " + propertyName); } @@ -202,6 +230,7 @@ public class TaskanaEngineConfiguration { LOGGER.debug("Role properties were loaded from file {}.", propertiesFile); } } catch (IOException e) { + LOGGER.error("caught IOException when processing properties file {}.", propertiesFile); throw new SystemException("internal System error when processing properties file " + propertiesFile, e.getCause()); } @@ -271,6 +300,14 @@ public class TaskanaEngineConfiguration { return this.propertiesFileName; } + public int getMaxNumberOfTaskUpdatesPerTransaction() { + return maxNumberOfTaskUpdatesPerTransaction; + } + + public int getMaxNumberOfRetriesOfFailedTaskUpdates() { + return maxNumberOfRetriesOfFailedTaskUpdates; + } + public void setPropertiesFileName(String propertiesFileName) { this.propertiesFileName = propertiesFileName; } diff --git a/lib/taskana-core/src/main/java/pro/taskana/impl/ClassificationChangedJobExecutor.java b/lib/taskana-core/src/main/java/pro/taskana/impl/ClassificationChangedJobExecutor.java new file mode 100644 index 000000000..e1cbe09b7 --- /dev/null +++ b/lib/taskana-core/src/main/java/pro/taskana/impl/ClassificationChangedJobExecutor.java @@ -0,0 +1,98 @@ +package pro.taskana.impl; + +import java.time.Instant; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import pro.taskana.BulkOperationResults; +import pro.taskana.impl.util.LoggerUtils; +import pro.taskana.mappings.AttachmentMapper; +import pro.taskana.mappings.ClassificationMapper; +import pro.taskana.mappings.JobMapper; +import pro.taskana.mappings.TaskMapper; + +/** + * This class executes a job of type CLASSIFICATIONCHANGEDJOB. + * + * @author bbr + */ + +public class ClassificationChangedJobExecutor implements SingleJobExecutor { + + private static final Logger LOGGER = LoggerFactory.getLogger(ClassificationChangedJobExecutor.class); + + private TaskanaEngineImpl taskanaEngine; + private Job job; + private String classificationId; + private boolean priorityChanged; + private boolean serviceLevelChanged; + private TaskMapper taskMapper; + private ClassificationMapper classificationMapper; + private AttachmentMapper attachmentMapper; + + @Override + public BulkOperationResults runSingleJob(Job job, TaskanaEngineImpl taskanaEngine) { + + this.job = job; + this.taskanaEngine = taskanaEngine; + this.taskMapper = taskanaEngine.getSqlSession().getMapper(TaskMapper.class); + this.classificationMapper = taskanaEngine.getSqlSession().getMapper(ClassificationMapper.class); + this.attachmentMapper = taskanaEngine.getSqlSession().getMapper(AttachmentMapper.class); + Map args = job.getArguments(); + classificationId = args.get(CLASSIFICATION_ID); + priorityChanged = Boolean.parseBoolean(args.get(PRIORITY_CHANGED)); + serviceLevelChanged = Boolean.parseBoolean(args.get(SERVICE_LEVEL_CHANGED)); + BulkOperationResults bulkLog = new BulkOperationResults<>(); + bulkLog.addAllErrors(findAffectedTasksAndScheduleUpdateJobs()); + + return bulkLog; + + } + + private BulkOperationResults findAffectedTasksAndScheduleUpdateJobs() { + List tasks = taskMapper.findTasksAffectedByClassificationChange(classificationId); + List taskIdsFromAttachments = attachmentMapper + .findTaskIdsAffectedByClassificationChange(classificationId); + + List filteredTaskIdsFromAttachments = taskIdsFromAttachments.isEmpty() ? new ArrayList<>() + : taskMapper.filterTaskIdsForNotCompleted(taskIdsFromAttachments); + + Set affectedTaskIds = new HashSet<>(filteredTaskIdsFromAttachments); + for (TaskSummaryImpl task : tasks) { + affectedTaskIds.add(task.getTaskId()); + } + if (LOGGER.isDebugEnabled()) { + LOGGER.debug("the following tasks are affected by the update of classification {} : {}", classificationId, + LoggerUtils.setToString(affectedTaskIds)); + } + int batchSize = taskanaEngine.getConfiguration().getMaxNumberOfTaskUpdatesPerTransaction(); + List> affectedTaskBatches = JobRunner.partition(affectedTaskIds, batchSize); + for (List taskIdBatch : affectedTaskBatches) { + Map args = new HashMap<>(); + if (!taskIdBatch.isEmpty()) { + String taskIds = String.join(",", taskIdBatch); + args.put(ClassificationChangedJobExecutor.TASKIDS, taskIds); + args.put(CLASSIFICATION_ID, classificationId); + args.put(PRIORITY_CHANGED, new Boolean(priorityChanged).toString()); + args.put(SERVICE_LEVEL_CHANGED, new Boolean(serviceLevelChanged).toString()); + Job job = new Job(); + job.setCreated(Instant.now()); + job.setState(Job.State.READY); + job.setRetryCount(0); + job.setType(Job.Type.UPDATETASKSJOB); + job.setExecutor(TaskUpdateJobExecutor.class.getName()); + job.setArguments(args); + taskanaEngine.getSqlSession().getMapper(JobMapper.class).insertJob(job); + } + } + return null; + } + +} diff --git a/lib/taskana-core/src/main/java/pro/taskana/impl/ClassificationServiceImpl.java b/lib/taskana-core/src/main/java/pro/taskana/impl/ClassificationServiceImpl.java index 5b881876f..4c68453a9 100644 --- a/lib/taskana-core/src/main/java/pro/taskana/impl/ClassificationServiceImpl.java +++ b/lib/taskana-core/src/main/java/pro/taskana/impl/ClassificationServiceImpl.java @@ -8,6 +8,7 @@ import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Objects; import org.apache.ibatis.exceptions.PersistenceException; import org.slf4j.Logger; @@ -98,7 +99,7 @@ public class ClassificationServiceImpl implements ClassificationService { } private void addClassificationToRootDomain(ClassificationImpl classificationImpl) { - if (!classificationImpl.getDomain().equals("")) { + if (!Objects.equals(classificationImpl.getDomain(), "")) { boolean doesExist = true; String idBackup = classificationImpl.getId(); String domainBackup = classificationImpl.getDomain(); @@ -178,26 +179,22 @@ public class ClassificationServiceImpl implements ClassificationService { } classificationMapper.update(classificationImpl); boolean priorityChanged = oldClassification.getPriority() != classification.getPriority(); - boolean serviceLevelChanged = true; - if (oldClassification.getServiceLevel() == null) { - if (classification.getServiceLevel() == null) { - serviceLevelChanged = false; - } - } else if (oldClassification.getServiceLevel().equals(classification.getServiceLevel())) { - serviceLevelChanged = false; - } + + boolean serviceLevelChanged = !Objects.equals(oldClassification.getServiceLevel(), + classification.getServiceLevel()); if (priorityChanged || serviceLevelChanged) { Map args = new HashMap<>(); - args.put(TaskUpdateOnClassificationChangeExecutor.CLASSIFICATION_ID, classificationImpl.getId()); - args.put(TaskUpdateOnClassificationChangeExecutor.PRIORITY_CHANGED, String.valueOf(priorityChanged)); - args.put(TaskUpdateOnClassificationChangeExecutor.SERVICE_LEVEL_CHANGED, + args.put(TaskUpdateJobExecutor.CLASSIFICATION_ID, classificationImpl.getId()); + args.put(TaskUpdateJobExecutor.PRIORITY_CHANGED, String.valueOf(priorityChanged)); + args.put(TaskUpdateJobExecutor.SERVICE_LEVEL_CHANGED, String.valueOf(serviceLevelChanged)); Job job = new Job(); job.setCreated(Instant.now()); job.setState(Job.State.READY); - job.setExecutor(TaskUpdateOnClassificationChangeExecutor.class.getName()); + job.setExecutor(ClassificationChangedJobExecutor.class.getName()); job.setArguments(args); + job.setType(Job.Type.CLASSIFICATIONCHANGEDJOB); taskanaEngine.getSqlSession().getMapper(JobMapper.class).insertJob(job); } diff --git a/lib/taskana-core/src/main/java/pro/taskana/impl/Job.java b/lib/taskana-core/src/main/java/pro/taskana/impl/Job.java index d5d64c9ac..d680bcf02 100644 --- a/lib/taskana-core/src/main/java/pro/taskana/impl/Job.java +++ b/lib/taskana-core/src/main/java/pro/taskana/impl/Job.java @@ -15,9 +15,18 @@ public class Job { private Instant started; private Instant completed; private State state; + private Type type; + private int retryCount; private String executor; + private String errors; Map arguments; + public Job() { + created = Instant.now(); + state = State.READY; + retryCount = 0; + } + public Integer getJobId() { return jobId; } @@ -74,6 +83,30 @@ public class Job { this.arguments = arguments; } + public Type getType() { + return type; + } + + public void setType(Type type) { + this.type = type; + } + + public int getRetryCount() { + return retryCount; + } + + public void setRetryCount(int retryCount) { + this.retryCount = retryCount; + } + + public String getErrors() { + return errors; + } + + public void setErrors(String errors) { + this.errors = errors; + } + @Override public String toString() { StringBuilder builder = new StringBuilder(); @@ -87,10 +120,16 @@ public class Job { builder.append(completed); builder.append(", state="); builder.append(state); + builder.append(", type="); + builder.append(type); + builder.append(", retryCount="); + builder.append(retryCount); builder.append(", executor="); builder.append(executor); builder.append(", arguments="); builder.append(arguments); + builder.append(", errors="); + builder.append(errors); builder.append("]"); return builder.toString(); } @@ -107,4 +146,11 @@ public class Job { COMPLETED } + /** + * This enum controls the type of a job. + */ + public enum Type { + CLASSIFICATIONCHANGEDJOB, + UPDATETASKSJOB; + } } diff --git a/lib/taskana-core/src/main/java/pro/taskana/impl/JobRunner.java b/lib/taskana-core/src/main/java/pro/taskana/impl/JobRunner.java index 5069fceaa..ff2c197f5 100644 --- a/lib/taskana-core/src/main/java/pro/taskana/impl/JobRunner.java +++ b/lib/taskana-core/src/main/java/pro/taskana/impl/JobRunner.java @@ -1,13 +1,20 @@ package pro.taskana.impl; import java.time.Instant; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; import java.util.List; +import java.util.Map; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import pro.taskana.BulkOperationResults; import pro.taskana.TaskanaEngine; +import pro.taskana.TaskanaTransactionProvider; +import pro.taskana.exceptions.SystemException; +import pro.taskana.impl.util.LoggerUtils; import pro.taskana.mappings.JobMapper; /** @@ -17,26 +24,48 @@ import pro.taskana.mappings.JobMapper; */ public class JobRunner { - private static final Logger LOGGER = LoggerFactory.getLogger(TaskServiceImpl.class); + private static final Logger LOGGER = LoggerFactory.getLogger(JobRunner.class); private TaskanaEngineImpl taskanaEngine; private JobMapper jobMapper; + private int maxRetryCount; + private TaskanaTransactionProvider> txProvider; public JobRunner(TaskanaEngine taskanaEngine) { this.taskanaEngine = (TaskanaEngineImpl) taskanaEngine; jobMapper = this.taskanaEngine.getSqlSession().getMapper(JobMapper.class); + maxRetryCount = taskanaEngine.getConfiguration().getMaxNumberOfRetriesOfFailedTaskUpdates(); + txProvider = null; + } + + public void registerTransactionProvider( + TaskanaTransactionProvider> txProvider) { + this.txProvider = txProvider; } public BulkOperationResults runJobs() { LOGGER.info("entry to runJobs()"); BulkOperationResults bulkLog = new BulkOperationResults<>(); + Job currentlyProcessedJob = null; try { - taskanaEngine.openConnection(); - List jobs = jobMapper.findJobsToRun(); - for (Job job : jobs) { - BulkOperationResults log = runSingleJob(job); - bulkLog.addAllErrors(log); + List jobs = findJobsToRun(); + while (!jobs.isEmpty()) { // run as long as Jobs are available for processing + for (Job job : jobs) { + currentlyProcessedJob = job; + processAJob(bulkLog, job); + } + jobs = findJobsToRun(); } return bulkLog; + } catch (Exception e) { + if (currentlyProcessedJob != null) { + bulkLog.addError("JobId:" + currentlyProcessedJob.getJobId(), e); + setJobFailed(currentlyProcessedJob, bulkLog); + return bulkLog; + } else { + LOGGER.error("tried to run jobs and caught exception {} ", e); + bulkLog.addError("unknown", e); + return bulkLog; + } } finally { taskanaEngine.returnConnection(); LOGGER.info("exit from runJobs(). Returning result {} ", bulkLog); @@ -44,10 +73,228 @@ public class JobRunner { } + private List findJobsToRun() { + final List result = new ArrayList<>(); + if (txProvider != null) { + txProvider.executeInTransaction(() -> { // each job in its own transaction + try { + taskanaEngine.openConnection(); + doFindJobsToRun(result); + return null; + } finally { + taskanaEngine.returnConnection(); + } + }); + } else { + doFindJobsToRun(result); + } + return result; + } + + private BulkOperationResults doFindJobsToRun(List jobs) { + List found = taskanaEngine.getSqlSession().getMapper(JobMapper.class).findJobsToRun(); + jobs.addAll(found); + return null; + } + + private void processAJob(BulkOperationResults bulkLog, Job job) { + BulkOperationResults log; + try { + if (txProvider != null) { + log = txProvider.executeInTransaction(() -> { // each job in its own transaction + try { + taskanaEngine.openConnection(); + return runSingleJob(job); + } finally { + taskanaEngine.returnConnection(); + } + }); + + } else { + log = runSingleJob(job); + } + if (log != null && log.containsErrors() + && Job.Type.UPDATETASKSJOB.equals(job.getType())) { + handleRetryForFailuresFromBulkOperationResult(bulkLog, job, log); + } + } catch (Exception e) { + // transaction was rolled back -> split job into 2 half sized jobs + if (job.getRetryCount() < maxRetryCount) { + rescheduleBisectedJob(bulkLog, job); + } else { + List objectIds; + if (job.getType().equals(Job.Type.UPDATETASKSJOB)) { + String taskIdsAsString = job.getArguments().get(SingleJobExecutor.TASKIDS); + objectIds = Arrays.asList(taskIdsAsString.split(",")); + } else if (job.getType().equals(Job.Type.CLASSIFICATIONCHANGEDJOB)) { + String classificationId = job.getArguments().get(SingleJobExecutor.CLASSIFICATION_ID); + objectIds = Arrays.asList(classificationId); + } else { + throw new SystemException("Unknown Jobtype " + job.getType() + " encountered."); + } + for (String objectId : objectIds) { + bulkLog.addError(objectId, e); + } + setJobFailed(job, bulkLog); + } + } + } + + private void setJobFailed(Job job, BulkOperationResults bulkLog) { + try { + if (txProvider != null) { + txProvider.executeInTransaction(() -> { // each job in its own transaction + try { + taskanaEngine.openConnection(); + return doSetJobFailed(job, bulkLog); + } finally { + taskanaEngine.returnConnection(); + } + }); + } else { + doSetJobFailed(job, bulkLog); + } + } catch (Exception e) { + // transaction was rolled back -> log an Error + LOGGER.error("attempted to set job {} to failed, but caught Exception {}", job, e); + } + + } + + private BulkOperationResults doSetJobFailed(Job job, + BulkOperationResults bulkLog) { + job.setState(Job.State.FAILED); + if (job.getStarted() == null) { + job.setStarted(Instant.now()); + } + if (bulkLog.containsErrors()) { + Map errors = bulkLog.getErrorMap(); + job.setErrors(LoggerUtils.mapToString(errors)); + } + taskanaEngine.getSqlSession().getMapper(JobMapper.class).update(job); + return null; + } + + private void handleRetryForFailuresFromBulkOperationResult(BulkOperationResults bulkLog, Job job, + BulkOperationResults errorLogForThisJob) { + if (job.getRetryCount() < maxRetryCount) { + if (errorLogForThisJob.containsErrors()) { + List failedTasks = errorLogForThisJob.getFailedIds(); + if (!failedTasks.isEmpty()) { // some tasks failed to be processed + LOGGER.error("Errors occurred when running job {}. Processing will be retried", job); + scheduleRetryJob(job, failedTasks); + } + } + } else { + bulkLog.addAllErrors(errorLogForThisJob); + setJobFailed(job, errorLogForThisJob); + } + } + + private void rescheduleBisectedJob(BulkOperationResults bulkLog, Job job) { + // the transaction that processed the job was rolled back. + try { + if (txProvider != null) { + txProvider.executeInTransaction(() -> { // each job in its own transaction + try { + taskanaEngine.openConnection(); + return doRescheduleBisectedJob(job); + } finally { + taskanaEngine.returnConnection(); + } + }); + } else { + doRescheduleBisectedJob(job); + } + } catch (Exception e) { + // transaction was rolled back -> log an Error + LOGGER.error("attempted to reschedule bisected jobs for {}, but caught Exception {}", job, e); + } + + } + + private BulkOperationResults doRescheduleBisectedJob(Job job) { + if (job.getType().equals(Job.Type.UPDATETASKSJOB)) { // split the job in halves + Map args = job.getArguments(); + String taskIdsString = args.get(SingleJobExecutor.TASKIDS); + List taskIds = Arrays.asList(taskIdsString.split(",")); + int size = taskIds.size(); + if (size >= 2) { + int halfSize = size % 2 == 0 ? size / 2 : (size / 2 + 1); + List> taskIdListsForNewJobs = partition(taskIds, halfSize); + // now schedule new tasks + + for (List halfSizedTaskIds : taskIdListsForNewJobs) { + Job newJob = new Job(); + newJob.setCreated(Instant.now()); + if (halfSize > 1) { + newJob.setRetryCount(0); + } else { + newJob.setRetryCount(job.getRetryCount() + 1); + } + newJob.setState(Job.State.READY); + newJob.setType(job.getType()); + args.put(SingleJobExecutor.TASKIDS, String.join(",", halfSizedTaskIds)); + newJob.setArguments(args); + newJob.setCreated(Instant.now()); + newJob.setExecutor(job.getExecutor()); + taskanaEngine.getSqlSession().getMapper(JobMapper.class).insertJob(newJob); + } + LOGGER.debug("doRescheduleBisectedJob deleting job {} ", job); + taskanaEngine.getSqlSession().getMapper(JobMapper.class).delete(job); + } + } else { // take care that the job is re-executed + job.setState(Job.State.READY); + job.setRetryCount(job.getRetryCount() + 1); + taskanaEngine.getSqlSession().getMapper(JobMapper.class).update(job); + } + return null; + } + + private void scheduleRetryJob(Job job, List failedTasks) { + if (job.getType().equals(Job.Type.UPDATETASKSJOB)) { + try { + if (txProvider != null) { + txProvider.executeInTransaction(() -> { // each job in its own transaction + try { + taskanaEngine.openConnection(); + return doScheduleRetryJob(job, failedTasks); + } finally { + taskanaEngine.returnConnection(); + } + }); + } else { + doScheduleRetryJob(job, failedTasks); + } + } catch (Exception e) { + // transaction was rolled back -> log an Error + LOGGER.error("attempted to reschedule bisected jobs for {}, but caught Exception {}", job, e); + } + } + } + + private BulkOperationResults doScheduleRetryJob(Job job, List failedTasks) { + LOGGER.debug("entry to doScheduleRetryJob for job {} and failedTasks {}", job, + LoggerUtils.listToString(failedTasks)); + Map args = job.getArguments(); + Job newJob = new Job(); + newJob.setCreated(Instant.now()); + newJob.setRetryCount(job.getRetryCount() + 1); + newJob.setState(Job.State.READY); + newJob.setType(job.getType()); + args.put(SingleJobExecutor.TASKIDS, String.join(",", failedTasks)); + newJob.setArguments(args); + newJob.setExecutor(job.getExecutor()); + taskanaEngine.getSqlSession().getMapper(JobMapper.class).insertJob(newJob); + LOGGER.debug("doScheduleRetryJob deleting job {} and scheduling {} ", job, newJob); + taskanaEngine.getSqlSession().getMapper(JobMapper.class).delete(job); + return null; + } + private BulkOperationResults runSingleJob(Job job) { LOGGER.debug("entry to runSingleJob(job = {})", job); - BulkOperationResults bulkLog = new BulkOperationResults<>(); - if (Job.State.READY.equals(job.getState())) { + BulkOperationResults bulkLog; + if (job.getStarted() == null) { job.setStarted(Instant.now()); } job.setState(Job.State.RUNNING); @@ -55,26 +302,35 @@ public class JobRunner { SingleJobExecutor executor; try { executor = (SingleJobExecutor) Class.forName(job.getExecutor()).newInstance(); - bulkLog = executor.runSingleJob(job, taskanaEngine); - - } catch (Exception e) { - bulkLog.addError("JobId:" + job.getJobId(), e); - job.setCompleted(Instant.now()); - job.setState(Job.State.FAILED); - jobMapper.update(job); - return bulkLog; + } catch (InstantiationException | IllegalAccessException | ClassNotFoundException e) { + LOGGER.error("When attempting to load class {} caught Exception {} ", job.getExecutor(), e); + throw new SystemException("When attempting to load class " + job.getExecutor() + " caught Exception " + e); + } + bulkLog = executor.runSingleJob(job, taskanaEngine); + + if (!bulkLog.containsErrors()) { + LOGGER.debug("runSingleJob deletin job {} ", job); + jobMapper.delete(job); } - job.setCompleted(Instant.now()); - job.setState(Job.State.COMPLETED); - jobMapper.update(job); LOGGER.debug("exit from runSingleJob"); - if (bulkLog.containsErrors()) { - LOGGER.error("Errors occurred when running job {}.", job); - for (String id : bulkLog.getFailedIds()) { - LOGGER.error(id + bulkLog.getErrorForId(id)); - } - } return bulkLog; } + + static List> partition(Collection members, int maxSize) { + List> result = new ArrayList<>(); + List internal = new ArrayList<>(); + for (T member : members) { + internal.add(member); + if (internal.size() == maxSize) { + result.add(internal); + internal = new ArrayList<>(); + } + } + if (!internal.isEmpty()) { + result.add(internal); + } + return result; + } + } diff --git a/lib/taskana-core/src/main/java/pro/taskana/impl/SingleJobExecutor.java b/lib/taskana-core/src/main/java/pro/taskana/impl/SingleJobExecutor.java index 33ec9cc51..6d4a221f6 100644 --- a/lib/taskana-core/src/main/java/pro/taskana/impl/SingleJobExecutor.java +++ b/lib/taskana-core/src/main/java/pro/taskana/impl/SingleJobExecutor.java @@ -9,5 +9,10 @@ import pro.taskana.BulkOperationResults; */ public interface SingleJobExecutor { + String TASKIDS = "taskIds"; + String CLASSIFICATION_ID = "classificationId"; + String PRIORITY_CHANGED = "priorityChanged"; + String SERVICE_LEVEL_CHANGED = "serviceLevelChanged"; + BulkOperationResults runSingleJob(Job job, TaskanaEngineImpl taskanaEngine); } diff --git a/lib/taskana-core/src/main/java/pro/taskana/impl/TaskServiceImpl.java b/lib/taskana-core/src/main/java/pro/taskana/impl/TaskServiceImpl.java index 7772dc123..92ce5554f 100644 --- a/lib/taskana-core/src/main/java/pro/taskana/impl/TaskServiceImpl.java +++ b/lib/taskana-core/src/main/java/pro/taskana/impl/TaskServiceImpl.java @@ -301,7 +301,7 @@ public class TaskServiceImpl implements TaskService { TaskImpl task = (TaskImpl) taskToCreate; try { taskanaEngine.openConnection(); - if (task.getId() != null && !task.getId().equals("")) { + if (task.getId() != null && !"".equals(task.getId())) { throw new TaskAlreadyExistException(task.getId()); } else { LOGGER.debug("Task {} cannot be be found, so it can be created.", task.getId()); @@ -1416,7 +1416,6 @@ public class TaskServiceImpl implements TaskService { Classification classification = classificationService.getClassification(classificationId); task.setClassificationSummary(classification.asSummary()); - PrioDurationHolder prioDurationFromAttachments = handleAttachmentsOnClassificationUpdate(task); updateClassificationRelatedProperties(task, task, prioDurationFromAttachments); diff --git a/lib/taskana-core/src/main/java/pro/taskana/impl/TaskUpdateJobExecutor.java b/lib/taskana-core/src/main/java/pro/taskana/impl/TaskUpdateJobExecutor.java new file mode 100644 index 000000000..25f16a03d --- /dev/null +++ b/lib/taskana-core/src/main/java/pro/taskana/impl/TaskUpdateJobExecutor.java @@ -0,0 +1,60 @@ +package pro.taskana.impl; + +import java.util.Arrays; +import java.util.List; +import java.util.Map; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import pro.taskana.BulkOperationResults; +import pro.taskana.impl.util.LoggerUtils; + +/** + * This class performs task updates if a classification is changed. + * + * @author bbr + */ +public class TaskUpdateJobExecutor implements SingleJobExecutor { + + private static final Logger LOGGER = LoggerFactory.getLogger(TaskUpdateJobExecutor.class); + + private TaskanaEngineImpl taskanaEngine; + private String classificationId; + private List affectedTaskIds; + + public TaskUpdateJobExecutor() { + } + + @Override + public BulkOperationResults runSingleJob(Job job, TaskanaEngineImpl taskanaEngine) { + this.taskanaEngine = taskanaEngine; + Map args = job.getArguments(); + String taskIdsString = args.get(TASKIDS); + affectedTaskIds = Arrays.asList(taskIdsString.split(",")); + + classificationId = args.get(CLASSIFICATION_ID); + BulkOperationResults bulkLog = new BulkOperationResults<>(); + bulkLog.addAllErrors(handleAffectedTasks(job)); + + return bulkLog; + } + + private BulkOperationResults handleAffectedTasks(Job job) { + if (LOGGER.isDebugEnabled()) { + LOGGER.debug("the following tasks will be updated by the current job {}", + LoggerUtils.listToString(affectedTaskIds)); + } + TaskServiceImpl taskService = (TaskServiceImpl) taskanaEngine.getTaskService(); + BulkOperationResults bulkLog = new BulkOperationResults<>(); + for (String taskId : affectedTaskIds) { + try { + bulkLog.addAllErrors(taskService.classificationChanged(taskId, classificationId)); + } catch (Exception e) { + bulkLog.addError(taskId, e); + } + } + return bulkLog; + } + +} diff --git a/lib/taskana-core/src/main/java/pro/taskana/impl/TaskUpdateOnClassificationChangeExecutor.java b/lib/taskana-core/src/main/java/pro/taskana/impl/TaskUpdateOnClassificationChangeExecutor.java deleted file mode 100644 index 08b72e99f..000000000 --- a/lib/taskana-core/src/main/java/pro/taskana/impl/TaskUpdateOnClassificationChangeExecutor.java +++ /dev/null @@ -1,82 +0,0 @@ -package pro.taskana.impl; - -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import pro.taskana.BulkOperationResults; -import pro.taskana.impl.util.LoggerUtils; -import pro.taskana.mappings.AttachmentMapper; -import pro.taskana.mappings.ClassificationMapper; -import pro.taskana.mappings.TaskMapper; - -/** - * This class performs task updates if a classification is changed. - * - * @author bbr - */ -public class TaskUpdateOnClassificationChangeExecutor implements SingleJobExecutor { - - private static final Logger LOGGER = LoggerFactory.getLogger(TaskServiceImpl.class); - public static final String CLASSIFICATION_ID = "classificationId"; - public static final String PRIORITY_CHANGED = "priorityChanged"; - public static final String SERVICE_LEVEL_CHANGED = "serviceLevelChanged"; - - private TaskanaEngineImpl taskanaEngine; - private Job job; - private String classificationId; - private boolean priorityChanged; - private boolean serviceLevelChanged; - private TaskMapper taskMapper; - private ClassificationMapper classificationMapper; - private AttachmentMapper attachmentMapper; - - public TaskUpdateOnClassificationChangeExecutor() { - } - - @Override - public BulkOperationResults runSingleJob(Job job, TaskanaEngineImpl taskanaEngine) { - this.job = job; - this.taskanaEngine = taskanaEngine; - this.taskMapper = taskanaEngine.getSqlSession().getMapper(TaskMapper.class); - this.classificationMapper = taskanaEngine.getSqlSession().getMapper(ClassificationMapper.class); - this.attachmentMapper = taskanaEngine.getSqlSession().getMapper(AttachmentMapper.class); - Map args = job.getArguments(); - classificationId = args.get(CLASSIFICATION_ID); - priorityChanged = Boolean.getBoolean(args.get(PRIORITY_CHANGED)); - serviceLevelChanged = Boolean.getBoolean(args.get(SERVICE_LEVEL_CHANGED)); - BulkOperationResults bulkLog = new BulkOperationResults<>(); - bulkLog.addAllErrors(handleAffectedTasks()); - - return bulkLog; - } - - private BulkOperationResults handleAffectedTasks() { - List tasks = taskMapper.findTasksAffectedByClassificationChange(classificationId); - List taskIdsFromAttachments = attachmentMapper - .findTaskIdsAffectedByClassificationChange(classificationId); - List filteredTaskIdsFromAttachments = taskMapper.filterTaskIdsForNotCompleted(taskIdsFromAttachments); - - Set affectedTaskIds = new HashSet<>(filteredTaskIdsFromAttachments); - for (TaskSummaryImpl task : tasks) { - affectedTaskIds.add(task.getTaskId()); - } - LOGGER.debug("the following tasks are affected by the update of classification {} : {}", classificationId, - LoggerUtils.setToString(affectedTaskIds)); - TaskServiceImpl taskService = (TaskServiceImpl) taskanaEngine.getTaskService(); - BulkOperationResults bulkLog = new BulkOperationResults<>(); - for (String taskId : affectedTaskIds) { - try { - bulkLog.addAllErrors(taskService.classificationChanged(taskId, classificationId)); - } catch (Exception e) { - bulkLog.addError(taskId, e); - } - } - return bulkLog; - } - -} diff --git a/lib/taskana-core/src/main/java/pro/taskana/mappings/JobMapper.java b/lib/taskana-core/src/main/java/pro/taskana/mappings/JobMapper.java index 175a06922..d3398c3fa 100644 --- a/lib/taskana-core/src/main/java/pro/taskana/mappings/JobMapper.java +++ b/lib/taskana-core/src/main/java/pro/taskana/mappings/JobMapper.java @@ -3,6 +3,7 @@ package pro.taskana.mappings; import java.util.List; import java.util.Map; +import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Insert; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Result; @@ -19,7 +20,7 @@ import pro.taskana.impl.persistence.MapTypeHandler; public interface JobMapper { @Insert("") void insertJob(@Param("job") Job job); - @Select("SELECT JOB_ID, CREATED, STARTED, COMPLETED, STATE, EXECUTOR, ARGUMENTS " + @Select("") @Results(value = { @Result(property = "jobId", column = "JOB_ID"), @Result(property = "created", column = "CREATED"), @Result(property = "started", column = "STARTED"), @Result(property = "completed", column = "COMPLETED"), @Result(property = "state", column = "STATE"), + @Result(property = "type", column = "TYPE"), + @Result(property = "retryCount", column = "RETRY_COUNT"), @Result(property = "executor", column = "EXECUTOR"), + @Result(property = "errors", column = "ERRORS"), @Result(property = "arguments", column = "ARGUMENTS", javaType = Map.class, typeHandler = MapTypeHandler.class) }) List findJobsToRun(); @Update( - value = "UPDATE TASKANA.JOB SET CREATED = #{created}, STARTED = #{started}, COMPLETED = #{completed}, STATE = #{state}, EXECUTOR = #{executor}, " + value = "UPDATE TASKANA.JOB SET CREATED = #{created}, STARTED = #{started}, COMPLETED = #{completed}, STATE = #{state}, " + + "TYPE = #{type}, RETRY_COUNT = #{retryCount}, EXECUTOR = #{executor}, " + + "ERRORS = #{errors}, " + "ARGUMENTS = #{arguments,jdbcType=CLOB ,javaType=java.util.Map,typeHandler=pro.taskana.impl.persistence.MapTypeHandler} " + "where JOB_ID = #{jobId}") void update(Job job); + + @Delete( + value = "DELETE FROM TASKANA.JOB WHERE JOB_ID = #{jobId}") + void delete(Job job); } diff --git a/lib/taskana-core/src/main/resources/sql/taskana-schema.sql b/lib/taskana-core/src/main/resources/sql/taskana-schema.sql index 78c82c9ba..15239cbe3 100644 --- a/lib/taskana-core/src/main/resources/sql/taskana-schema.sql +++ b/lib/taskana-core/src/main/resources/sql/taskana-schema.sql @@ -3,7 +3,7 @@ CREATE TABLE TASKANA.TASKANA_SCHEMA_VERSION( VERSION VARCHAR(255) NOT NULL, PRIMARY KEY (ID) ); -INSERT INTO TASKANA.TASKANA_SCHEMA_VERSION VALUES ('1', '0.1.5'); +INSERT INTO TASKANA.TASKANA_SCHEMA_VERSION VALUES ('1', '0.9.2'); CREATE TABLE TASKANA.CLASSIFICATION( ID CHAR(40) NOT NULL, @@ -173,7 +173,10 @@ CREATE TABLE TASKANA.JOB( STARTED TIMESTAMP NULL, COMPLETED TIMESTAMP NULL, STATE VARCHAR(32) NULL, + TYPE VARCHAR(32) NULL, + RETRY_COUNT INTEGER NOT NULL, EXECUTOR VARCHAR(128) NOT NULL, + ERRORS VARCHAR(4096) NULL, ARGUMENTS CLOB NULL, PRIMARY KEY (JOB_ID) ); diff --git a/lib/taskana-core/src/test/resources/taskana.properties b/lib/taskana-core/src/test/resources/taskana.properties index 656883d3d..c4fdfbb77 100644 --- a/lib/taskana-core/src/test/resources/taskana.properties +++ b/lib/taskana-core/src/test/resources/taskana.properties @@ -7,3 +7,6 @@ taskana.domains= Domain_A , DOMAIN_B taskana.classification.types= TASK , document taskana.classification.categories= EXTERNAL , manual, autoMAtic ,Process + +taskana.job.max.task.updates.per.transaction=5 +taskana.job.max.retries.for.failed.task.updates=3 diff --git a/rest/taskana-rest-spring-example/src/main/java/pro/taskana/rest/ExampleRestApplication.java b/rest/taskana-rest-spring-example/src/main/java/pro/taskana/rest/ExampleRestApplication.java index 16a23f943..31e868641 100644 --- a/rest/taskana-rest-spring-example/src/main/java/pro/taskana/rest/ExampleRestApplication.java +++ b/rest/taskana-rest-spring-example/src/main/java/pro/taskana/rest/ExampleRestApplication.java @@ -27,7 +27,7 @@ import pro.taskana.sampledata.SampleDataGenerator; */ @SpringBootApplication @EnableScheduling -@Import(RestConfiguration.class) +@Import(SampleConfiguration.class) public class ExampleRestApplication { @Autowired diff --git a/rest/taskana-rest-spring-example/src/main/java/pro/taskana/rest/JobScheduler.java b/rest/taskana-rest-spring-example/src/main/java/pro/taskana/rest/JobScheduler.java index 41d60e6a9..2d818fb1b 100644 --- a/rest/taskana-rest-spring-example/src/main/java/pro/taskana/rest/JobScheduler.java +++ b/rest/taskana-rest-spring-example/src/main/java/pro/taskana/rest/JobScheduler.java @@ -1,16 +1,17 @@ package pro.taskana.rest; import java.util.Map; +import java.util.concurrent.atomic.AtomicBoolean; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.scheduling.annotation.Scheduled; import org.springframework.stereotype.Component; -import org.springframework.transaction.annotation.Transactional; import pro.taskana.BulkOperationResults; import pro.taskana.TaskanaEngine; +import pro.taskana.TaskanaTransactionProvider; import pro.taskana.impl.JobRunner; import pro.taskana.impl.util.LoggerUtils; @@ -23,17 +24,30 @@ import pro.taskana.impl.util.LoggerUtils; public class JobScheduler { private static final Logger LOGGER = LoggerFactory.getLogger(JobScheduler.class); + private static AtomicBoolean jobRunning = new AtomicBoolean(false); + @Autowired private TaskanaEngine taskanaEngine; - @Scheduled(fixedRate = 60000) - @Transactional(rollbackFor = Exception.class) - public void triggerJobs() { - JobRunner runner = new JobRunner(taskanaEngine); - LOGGER.info("Running Jobs"); - BulkOperationResults result = runner.runJobs(); - Map errors = result.getErrorMap(); - LOGGER.info("Job run completed. Result = {} ", LoggerUtils.mapToString(errors)); - } + @Autowired + TaskanaTransactionProvider> springTransactionProvider; + @Scheduled(fixedRate = 60000) + public void triggerJobs() { + boolean otherJobActive = jobRunning.getAndSet(true); + if (!otherJobActive) { // only one job should be active at any time + try { + JobRunner runner = new JobRunner(taskanaEngine); + runner.registerTransactionProvider(springTransactionProvider); + LOGGER.info("Running Jobs"); + BulkOperationResults result = runner.runJobs(); + Map errors = result.getErrorMap(); + LOGGER.info("Job run completed. Result = {} ", LoggerUtils.mapToString(errors)); + } finally { + jobRunning.set(false); + } + } else { + LOGGER.info("Don't run Jobs because already another JobRunner is running"); + } + } } diff --git a/rest/taskana-rest-spring-example/src/main/java/pro/taskana/rest/SampleConfiguration.java b/rest/taskana-rest-spring-example/src/main/java/pro/taskana/rest/SampleConfiguration.java new file mode 100644 index 000000000..943539927 --- /dev/null +++ b/rest/taskana-rest-spring-example/src/main/java/pro/taskana/rest/SampleConfiguration.java @@ -0,0 +1,20 @@ +package pro.taskana.rest; + +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Import; + +import pro.taskana.BulkOperationResults; +import pro.taskana.TaskanaTransactionProvider; + +/** + * Configuration class for Spring sample application. + */ +@Import(RestConfiguration.class) +public class SampleConfiguration { + + @Bean + public TaskanaTransactionProvider> springTransactionProvider() { + return new SpringTransactionProvider(); + } + +} diff --git a/rest/taskana-rest-spring-example/src/main/java/pro/taskana/rest/SpringTransactionProvider.java b/rest/taskana-rest-spring-example/src/main/java/pro/taskana/rest/SpringTransactionProvider.java new file mode 100644 index 000000000..9765ea5b7 --- /dev/null +++ b/rest/taskana-rest-spring-example/src/main/java/pro/taskana/rest/SpringTransactionProvider.java @@ -0,0 +1,20 @@ +package pro.taskana.rest; + +import org.springframework.stereotype.Component; +import org.springframework.transaction.annotation.Transactional; + +import pro.taskana.BulkOperationResults; +import pro.taskana.TaskanaCallable; +import pro.taskana.TaskanaTransactionProvider; + +@Component +public class SpringTransactionProvider implements TaskanaTransactionProvider> { + + @Override + @Transactional(rollbackFor = Exception.class) + public BulkOperationResults executeInTransaction( + TaskanaCallable> action) { + return action.call(); + } + +} diff --git a/rest/taskana-rest-spring-example/src/main/resources/application.properties b/rest/taskana-rest-spring-example/src/main/resources/application.properties index 648f06abb..39904c200 100644 --- a/rest/taskana-rest-spring-example/src/main/resources/application.properties +++ b/rest/taskana-rest-spring-example/src/main/resources/application.properties @@ -1,5 +1,5 @@ logging.level.pro.taskana=DEBUG -l###logging.level.org.springframework=DEBUG +### logging.level.org.springframework=DEBUG ######## Taskana DB ####### datasource.url=jdbc:h2:mem:taskana;IGNORECASE=TRUE;LOCK_MODE=0;INIT=CREATE SCHEMA IF NOT EXISTS TASKANA datasource.driverClassName=org.h2.Driver diff --git a/rest/taskana-rest-spring-example/src/main/resources/taskana.properties b/rest/taskana-rest-spring-example/src/main/resources/taskana.properties index a823cd532..19f0a8b6b 100644 --- a/rest/taskana-rest-spring-example/src/main/resources/taskana.properties +++ b/rest/taskana-rest-spring-example/src/main/resources/taskana.properties @@ -5,3 +5,6 @@ taskana.roles.monitor=john|teamlead_2 | monitor taskana.domains=DOMAIN_A,DOMAIN_B,DOMAIN_C taskana.classification.types=TASK,DOCUMENT taskana.classification.categories= EXTERNAL , manual, autoMAtic ,Process + +taskana.job.max.task.updates.per.transaction=5 +taskana.job.max.retries.for.failed.task.updates=3 diff --git a/rest/taskana-rest-spring-example/src/test/java/pro/taskana/rest/ClassificationControllerIntTest.java b/rest/taskana-rest-spring-example/src/test/java/pro/taskana/rest/ClassificationControllerIntTest.java index 87add9a39..bd4d2fa26 100644 --- a/rest/taskana-rest-spring-example/src/test/java/pro/taskana/rest/ClassificationControllerIntTest.java +++ b/rest/taskana-rest-spring-example/src/test/java/pro/taskana/rest/ClassificationControllerIntTest.java @@ -9,11 +9,19 @@ import java.io.IOException; import java.io.OutputStreamWriter; import java.net.HttpURLConnection; import java.net.URL; +import java.time.Instant; +import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; +import java.util.List; +import org.json.JSONObject; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.boot.test.context.SpringBootTest.WebEnvironment; import org.springframework.boot.web.server.LocalServerPort; @@ -37,12 +45,27 @@ import org.springframework.web.client.RestTemplate; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.ObjectMapper; +import pro.taskana.Classification; +import pro.taskana.Task; +import pro.taskana.exceptions.InvalidArgumentException; +import pro.taskana.exceptions.NotAuthorizedException; +import pro.taskana.rest.resource.ClassificationResource; import pro.taskana.rest.resource.ClassificationSummaryResource; +import pro.taskana.rest.resource.TaskResource; +import pro.taskana.rest.resource.assembler.ClassificationResourceAssembler; +import pro.taskana.rest.resource.assembler.TaskResourceAssembler; @RunWith(SpringRunner.class) @SpringBootTest(classes = RestConfiguration.class, webEnvironment = WebEnvironment.RANDOM_PORT, properties = {"devMode=true"}) public class ClassificationControllerIntTest { + @Autowired + private ClassificationResourceAssembler classificationResourceAssembler; + + @Autowired + private TaskResourceAssembler taskResourceAssembler; + + private static final Logger LOGGER = LoggerFactory.getLogger(ClassificationControllerIntTest.class); String server = "http://127.0.0.1:"; RestTemplate template; HttpEntity request; @@ -212,6 +235,110 @@ public class ClassificationControllerIntTest { }); } + @Test + public void testUpdateClassificationPrioServiceLevel() + throws IOException, InterruptedException, NotAuthorizedException, InvalidArgumentException { + + // 1st step: get old classification : + Instant before = Instant.now(); + + ResponseEntity response = template.exchange( + "http://127.0.0.1:" + port + "/v1/classifications/CLI:100000000000000000000000000000000003", + HttpMethod.GET, + request, + new ParameterizedTypeReference() { + + }); + + assertNotNull(response.getBody().getLink(Link.REL_SELF)); + ClassificationResource classification = response.getBody(); + + // 2nd step: modify classification and trigger update + classification.removeLinks(); + classification.setServiceLevel("P5D"); + classification.setPriority(1000); + + String updatedClassification = new JSONObject(classification).toString(); + + URL url = new URL(server + port + "/v1/classifications/CLI:100000000000000000000000000000000003"); + HttpURLConnection con = (HttpURLConnection) url.openConnection(); + con.setRequestMethod("PUT"); + con.setRequestProperty("Authorization", "Basic dGVhbWxlYWRfMTp0ZWFtbGVhZF8x"); + con.setDoOutput(true); + con.setRequestProperty("Content-Type", "application/json"); + BufferedWriter out = new BufferedWriter(new OutputStreamWriter(con.getOutputStream())); + out.write(updatedClassification); + out.flush(); + out.close(); + assertEquals(200, con.getResponseCode()); + con.disconnect(); + + // wait a minute to give JobScheduler a chance to run + LOGGER.info("About to sleep for 70 seconds to give JobScheduler a chance to process the classification change"); + Thread.sleep(70000); + LOGGER.info("Sleeping ended. Continuing .... "); + + // verify the classification modified timestamp is after 'before' + ResponseEntity repeatedResponse = template.exchange( + "http://127.0.0.1:" + port + "/v1/classifications/CLI:100000000000000000000000000000000003", + HttpMethod.GET, + request, + new ParameterizedTypeReference() { + + }); + + ClassificationResource modifiedClassificationResource = repeatedResponse.getBody(); + Classification modifiedClassification = classificationResourceAssembler.toModel(modifiedClassificationResource); + + assertTrue(!before.isAfter(modifiedClassification.getModified())); + + List affectedTasks = new ArrayList<>( + Arrays.asList("TKI:000000000000000000000000000000000003", "TKI:000000000000000000000000000000000004", + "TKI:000000000000000000000000000000000005", "TKI:000000000000000000000000000000000006", + "TKI:000000000000000000000000000000000007", "TKI:000000000000000000000000000000000008", + "TKI:000000000000000000000000000000000009", "TKI:000000000000000000000000000000000010", + "TKI:000000000000000000000000000000000011", "TKI:000000000000000000000000000000000012", + "TKI:000000000000000000000000000000000013", "TKI:000000000000000000000000000000000014", + "TKI:000000000000000000000000000000000015", "TKI:000000000000000000000000000000000016", + "TKI:000000000000000000000000000000000017", "TKI:000000000000000000000000000000000018", + "TKI:000000000000000000000000000000000019", "TKI:000000000000000000000000000000000020", + "TKI:000000000000000000000000000000000021", "TKI:000000000000000000000000000000000022", + "TKI:000000000000000000000000000000000023", "TKI:000000000000000000000000000000000024", + "TKI:000000000000000000000000000000000025", "TKI:000000000000000000000000000000000026", + "TKI:000000000000000000000000000000000027", "TKI:000000000000000000000000000000000028", + "TKI:000000000000000000000000000000000029", "TKI:000000000000000000000000000000000030", + "TKI:000000000000000000000000000000000031", "TKI:000000000000000000000000000000000032", + "TKI:000000000000000000000000000000000033", "TKI:000000000000000000000000000000000034", + "TKI:000000000000000000000000000000000035", "TKI:000000000000000000000000000000000100", + "TKI:000000000000000000000000000000000101", "TKI:000000000000000000000000000000000102", + "TKI:000000000000000000000000000000000103")); + for (String taskId : affectedTasks) { + verifyTaskIsModifiedAfter(taskId, before); + } + + } + + private void verifyTaskIsModifiedAfter(String taskId, Instant before) throws InvalidArgumentException { + RestTemplate admTemplate = getRestTemplate(); + HttpHeaders admHeaders = new HttpHeaders(); + admHeaders.add("Authorization", "Basic YWRtaW46YWRtaW4="); // admin:admin + + HttpEntity admRequest = new HttpEntity(admHeaders); + + ResponseEntity taskResponse = admTemplate.exchange( + "http://127.0.0.1:" + port + "/v1/tasks/" + taskId, + HttpMethod.GET, + admRequest, + new ParameterizedTypeReference() { + + }); + + TaskResource taskResource = taskResponse.getBody(); + Task task = taskResourceAssembler.toModel(taskResource); + + assertTrue(!before.isAfter(task.getModified())); + } + /** * Return a REST template which is capable of dealing with responses in HAL format * @@ -226,7 +353,7 @@ public class ClassificationControllerIntTest { converter.setSupportedMediaTypes(MediaType.parseMediaTypes("application/hal+json")); converter.setObjectMapper(mapper); - RestTemplate template = new RestTemplate(Collections.>singletonList(converter)); + RestTemplate template = new RestTemplate(Collections.> singletonList(converter)); return template; } diff --git a/rest/taskana-rest-spring/src/main/java/pro/taskana/rest/resource/ClassificationResource.java b/rest/taskana-rest-spring/src/main/java/pro/taskana/rest/resource/ClassificationResource.java index 1b2f7050e..a481377b8 100644 --- a/rest/taskana-rest-spring/src/main/java/pro/taskana/rest/resource/ClassificationResource.java +++ b/rest/taskana-rest-spring/src/main/java/pro/taskana/rest/resource/ClassificationResource.java @@ -205,4 +205,55 @@ public class ClassificationResource extends ResourceSupport { public void setCustom8(String custom8) { this.custom8 = custom8; } + + @Override + public String toString() { + StringBuilder builder = new StringBuilder(); + builder.append("ClassificationResource [classificationId="); + builder.append(classificationId); + builder.append(", key="); + builder.append(key); + builder.append(", parentId="); + builder.append(parentId); + builder.append(", category="); + builder.append(category); + builder.append(", type="); + builder.append(type); + builder.append(", domain="); + builder.append(domain); + builder.append(", isValidInDomain="); + builder.append(isValidInDomain); + builder.append(", created="); + builder.append(created); + builder.append(", modified="); + builder.append(modified); + builder.append(", name="); + builder.append(name); + builder.append(", description="); + builder.append(description); + builder.append(", priority="); + builder.append(priority); + builder.append(", serviceLevel="); + builder.append(serviceLevel); + builder.append(", applicationEntryPoint="); + builder.append(applicationEntryPoint); + builder.append(", custom1="); + builder.append(custom1); + builder.append(", custom2="); + builder.append(custom2); + builder.append(", custom3="); + builder.append(custom3); + builder.append(", custom4="); + builder.append(custom4); + builder.append(", custom5="); + builder.append(custom5); + builder.append(", custom6="); + builder.append(custom6); + builder.append(", custom7="); + builder.append(custom7); + builder.append(", custom8="); + builder.append(custom8); + builder.append("]"); + return builder.toString(); + } }