TSK-987: Check and fix problems identified by spotbugs

This commit is contained in:
Benjamin Eckstein 2019-12-11 12:35:55 +01:00 committed by Mustapha Zorgati
parent 0d24a29f68
commit 7ba8def59a
17 changed files with 77 additions and 71 deletions

View File

@ -2,10 +2,12 @@ package pro.taskana.simplehistory.configuration;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.io.StringReader;
import java.io.StringWriter;
import java.nio.charset.StandardCharsets;
import java.sql.Connection;
import java.sql.SQLException;
@ -32,7 +34,6 @@ public class DbSchemaCreator {
public DbSchemaCreator(DataSource dataSource, String schema) throws SQLException {
this.dataSource = dataSource;
this.schemaName = schema;
run();
}
/**
@ -49,8 +50,9 @@ public class DbSchemaCreator {
runner.setLogWriter(logWriter);
runner.setErrorLogWriter(errorLogWriter);
try {
BufferedReader reader = new BufferedReader(new InputStreamReader(this.getClass()
.getResourceAsStream(DB_SCHEMA)));
InputStream resourceAsStream = this.getClass()
.getResourceAsStream(DB_SCHEMA);
BufferedReader reader = new BufferedReader(new InputStreamReader(resourceAsStream, StandardCharsets.UTF_8));
runner.runScript(getSqlWithSchemaNameParsed(reader));
} finally {
runner.closeConnection();

View File

@ -34,11 +34,7 @@ public class TaskanaHistoryEngineImpl implements TaskanaHistoryEngine {
protected TransactionFactory transactionFactory;
protected java.sql.Connection connection = null;
protected DbSchemaCreator dbSchemaCreator;
protected static ThreadLocal<Deque<SqlSessionManager>> sessionStack = new ThreadLocal<>();
protected static final ThreadLocal<Deque<SqlSessionManager>> SESSION_STACK = new ThreadLocal<>();
protected TaskanaHistory taskanaHistoryService;
@ -47,9 +43,9 @@ public class TaskanaHistoryEngineImpl implements TaskanaHistoryEngine {
createTransactionFactory(this.taskanaEngineConfiguration.getUseManagedTransactions());
this.sessionManager = createSqlSessionManager();
dbSchemaCreator = new DbSchemaCreator(taskanaEngineConfiguration.getDatasource(),
taskanaEngineConfiguration.getSchemaName());
new DbSchemaCreator(taskanaEngineConfiguration.getDatasource(),
taskanaEngineConfiguration.getSchemaName()).
run();
}
public static TaskanaHistoryEngineImpl createTaskanaEngine(
@ -161,10 +157,10 @@ public class TaskanaHistoryEngineImpl implements TaskanaHistoryEngine {
* @return Stack of SqlSessionManager
*/
protected static Deque<SqlSessionManager> getSessionStack() {
Deque<SqlSessionManager> stack = sessionStack.get();
Deque<SqlSessionManager> stack = SESSION_STACK.get();
if (stack == null) {
stack = new ArrayDeque<>();
sessionStack.set(stack);
SESSION_STACK.set(stack);
}
return stack;
}

View File

@ -6,6 +6,7 @@ import java.io.PrintWriter;
import java.io.StringReader;
import java.io.StringWriter;
import java.nio.charset.StandardCharsets;
import java.sql.Connection;
import java.sql.SQLException;
import javax.sql.DataSource;
@ -21,15 +22,20 @@ public class SampleDataGenerator {
private static final Logger LOGGER = LoggerFactory.getLogger(SampleDataGenerator.class);
private static final String TEST_DATA = "/sql.sample-data";
private static final String CLEAR = TEST_DATA + "/clear-db.sql";
private static final String HISTORY_EVENT = TEST_DATA + "/history-event.sql";
private static final String CLEAR = TEST_DATA + "/clear-db.sql";
private static final String HISTORY_EVENT = TEST_DATA + "/history-event.sql";
private ScriptRunner runner;
DataSource dataSource;
String dbProductName;
public SampleDataGenerator(DataSource dataSource) throws SQLException {
if (LOGGER.isTraceEnabled()) {
LOGGER.trace(dataSource.getConnection().getMetaData().toString());
try (Connection connection = dataSource.getConnection()) {
dbProductName = connection.getMetaData().getDatabaseProductName();
if (LOGGER.isTraceEnabled()) {
String msg = connection.getMetaData().toString();
LOGGER.trace(msg);
}
}
this.dataSource = dataSource;
@ -43,7 +49,7 @@ public class SampleDataGenerator {
StringWriter errorWriter = new StringWriter();
PrintWriter errorLogWriter = new PrintWriter(errorWriter);
try {
runner.runScript(selectSchemaScript(dataSource.getConnection().getMetaData().getDatabaseProductName(), schemaName));
runner.runScript(selectSchemaScript(dbProductName, schemaName));
runner.setStopOnError(false);
runner.runScript(new BufferedReader(
new InputStreamReader(this.getClass().getResourceAsStream(CLEAR), StandardCharsets.UTF_8)));
@ -68,8 +74,8 @@ public class SampleDataGenerator {
private StringReader selectSchemaScript(String dbProductName, String schemaName) {
return new StringReader("PostgreSQL".equals(dbProductName)
? "SET search_path TO " + schemaName + ";"
: "SET SCHEMA " + schemaName + ";");
? "SET search_path TO " + schemaName + ";"
: "SET SCHEMA " + schemaName + ";");
}
}

View File

@ -2,6 +2,8 @@ package pro.taskana;
import java.io.IOException;
import java.io.InputStream;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.SQLException;
import java.util.Properties;
@ -46,7 +48,10 @@ public class TaskanaProducers {
ctx = new InitialContext();
properties.load(propertyStream);
dataSource = (DataSource) ctx.lookup(properties.getProperty("datasource.jndi"));
LOGGER.debug("---------------> " + dataSource.getConnection().getMetaData());
try (Connection connection = dataSource.getConnection()) {
DatabaseMetaData metaData = connection.getMetaData();
LOGGER.debug("---------------> " + metaData);
}
this.taskanaEngineConfiguration = new TaskanaEngineConfiguration(dataSource, true, false, "TASKANA");
} catch (NamingException | SQLException | IOException e) {
LOGGER.error("Could not start Taskana: ", e);

View File

@ -2,10 +2,12 @@ package pro.taskana.configuration;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.io.StringReader;
import java.io.StringWriter;
import java.nio.charset.StandardCharsets;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.Map;
@ -61,12 +63,16 @@ public class DbSchemaCreator {
*/
public void run() throws SQLException {
Connection connection = dataSource.getConnection();
LOGGER.debug("Using database of type {} with url '{}'", connection.getMetaData().getDatabaseProductName(), connection.getMetaData().getURL());
LOGGER.debug("Using database of type {} with url '{}'", connection.getMetaData().getDatabaseProductName(),
connection.getMetaData().getURL());
ScriptRunner runner = getScriptRunnerInstance(connection);
try {
if (!isSchemaPreexisting(connection)) {
BufferedReader reader = new BufferedReader(new InputStreamReader(this.getClass()
.getResourceAsStream(selectDbScriptFileName(connection.getMetaData().getDatabaseProductName()))));
String scriptPath = selectDbScriptFileName(connection.getMetaData().getDatabaseProductName());
InputStream resourceAsStream = this.getClass()
.getResourceAsStream(scriptPath);
BufferedReader reader = new BufferedReader(
new InputStreamReader(resourceAsStream, StandardCharsets.UTF_8));
runner.runScript(getSqlSchemaNameParsed(reader));
}
} finally {
@ -84,15 +90,17 @@ public class DbSchemaCreator {
runner.setLogWriter(logWriter);
runner.setErrorLogWriter(errorLogWriter);
return runner;
}
}
private boolean isSchemaPreexisting(Connection connection) {
ScriptRunner runner = getScriptRunnerInstance(connection);
StringWriter errorWriter = new StringWriter();
runner.setErrorLogWriter(new PrintWriter(errorWriter));
try {
BufferedReader reader = new BufferedReader(new InputStreamReader(this.getClass()
.getResourceAsStream(selectDbSchemaDetectionScript(connection.getMetaData().getDatabaseProductName()))));
String scriptPath = selectDbSchemaDetectionScript(connection.getMetaData().getDatabaseProductName());
InputStream resourceAsStream = this.getClass()
.getResourceAsStream(scriptPath);
BufferedReader reader = new BufferedReader(new InputStreamReader(resourceAsStream, StandardCharsets.UTF_8));
runner.runScript(getSqlSchemaNameParsed(reader));
} catch (Exception e) {
LOGGER.debug("Schema does not exist.");

View File

@ -5,6 +5,7 @@ import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.sql.Connection;
import java.sql.SQLException;
import java.time.Duration;
@ -332,7 +333,7 @@ public class TaskanaEngineConfiguration {
LOGGER.error("taskana properties file {} was not found on classpath.",
propertiesFile);
} else {
props.load(new InputStreamReader(inputStream));
props.load(new InputStreamReader(inputStream, StandardCharsets.UTF_8));
LOGGER.debug("Role properties were loaded from file {} from classpath.", propertiesFile);
}
} else {

View File

@ -9,20 +9,6 @@ import pro.taskana.history.api.TaskanaHistoryEvent;
*/
public class TaskEvent extends TaskanaHistoryEvent {
protected String taskId;
protected String businessProcessId;
protected String parentBusinessProcessId;
protected String domain;
protected String workbasketKey;
protected String taskClassificationCategory;
protected String taskClassificationKey;
protected String attachmentClassificationKey;
protected String porCompany;
protected String porSystem;
protected String porInstance;
protected String porType;
protected String porValue;
public TaskEvent(Task task) {
super();
taskId = task.getId();

View File

@ -196,7 +196,7 @@ public class ClassificationServiceImpl implements ClassificationService {
classificationImpl.setModified(Instant.now());
this.initDefaultClassificationValues(classificationImpl);
if (oldClassification.getCategory() != classificationImpl.getCategory()) {
if (!Objects.equals(oldClassification.getCategory(), classificationImpl.getCategory())) {
this.updateCategoryOnAssociatedTasks(classificationImpl, oldClassification);
}
@ -277,8 +277,8 @@ public class ClassificationServiceImpl implements ClassificationService {
@Override
public Classification getClassification(String id) throws ClassificationNotFoundException {
if (id == null) {
throw new ClassificationNotFoundException(id,
"Classification for id " + id + " was not found.");
throw new ClassificationNotFoundException(null,
"Classification for null id is invalid.");
}
LOGGER.debug("entry to getClassification(id = {})", id);
Classification result = null;
@ -299,7 +299,7 @@ public class ClassificationServiceImpl implements ClassificationService {
public Classification getClassification(String key, String domain) throws ClassificationNotFoundException {
LOGGER.debug("entry to getClassification(key = {}, domain = {})", key, domain);
if (key == null) {
throw new ClassificationNotFoundException(key, domain,
throw new ClassificationNotFoundException(null, domain,
"Classification for null key and domain " + domain + " was not found.");
}

View File

@ -945,9 +945,8 @@ public class TaskQueryImpl implements TaskQuery {
taskanaEngine.openConnection();
checkOpenAndReadPermissionForSpecifiedWorkbaskets();
setupJoinAndOrderParameters();
List<TaskSummaryImpl> tasks = new ArrayList<>();
setupAccessIds();
tasks = taskanaEngine.getSqlSession().selectList(getLinkToMapperScript(), this);
List<TaskSummaryImpl> tasks = taskanaEngine.getSqlSession().selectList(getLinkToMapperScript(), this);
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("mapper returned {} resulting Objects: {} ", tasks.size(),
LoggerUtils.listToString(tasks));

View File

@ -65,8 +65,8 @@ public class ClassificationChangedJob extends AbstractTaskanaJob {
if (!taskIdBatch.isEmpty()) {
String taskIds = String.join(",", affectedTaskIds);
args.put(TASK_IDS, taskIds);
args.put(PRIORITY_CHANGED, new Boolean(priorityChanged).toString());
args.put(SERVICE_LEVEL_CHANGED, new Boolean(serviceLevelChanged).toString());
args.put(PRIORITY_CHANGED, Boolean.valueOf(priorityChanged).toString());
args.put(SERVICE_LEVEL_CHANGED, Boolean.valueOf(serviceLevelChanged).toString());
ScheduledJob job = new ScheduledJob();
job.setType(ScheduledJob.Type.UPDATETASKSJOB);
job.setArguments(args);

View File

@ -24,13 +24,10 @@ public class JobRunner {
private TaskanaEngineImpl taskanaEngine;
private JobServiceImpl jobService;
private TaskanaTransactionProvider<Object> txProvider;
private int maxRetryCount;
private int attempt = 0;
public JobRunner(TaskanaEngine taskanaEngine) {
this.taskanaEngine = (TaskanaEngineImpl) taskanaEngine;
jobService = (JobServiceImpl) taskanaEngine.getJobService();
maxRetryCount = taskanaEngine.getConfiguration().getMaxNumberOfJobRetries();
}
public void registerTransactionProvider(

View File

@ -124,16 +124,16 @@ public class TaskCleanupJob extends AbstractTaskanaJob {
int deletedTaskCount = 0;
if (txProvider != null) {
Integer count = (Integer) txProvider.executeInTransaction(() -> {
int count = (Integer) txProvider.executeInTransaction(() -> {
try {
return new Integer(deleteTasks(tasksToBeDeleted));
return deleteTasks(tasksToBeDeleted);
} catch (Exception e) {
LOGGER.warn("Could not delete tasks.", e);
return new Integer(0);
return 0;
}
});
LOGGER.debug("exit from deleteTasksTransactionally(), returning {}", count.intValue());
return count.intValue();
LOGGER.debug("exit from deleteTasksTransactionally(), returning {}", count);
return count;
} else {
try {
deletedTaskCount = deleteTasks(tasksToBeDeleted);

View File

@ -73,15 +73,15 @@ public class WorkbasketCleanupJob extends AbstractTaskanaJob {
private int deleteWorkbasketsTransactionally(List<String> workbasketsToBeDeleted) {
int deletedWorkbasketsCount = 0;
if (txProvider != null) {
Integer count = (Integer) txProvider.executeInTransaction(() -> {
int count = (Integer) txProvider.executeInTransaction(() -> {
try {
return new Integer(deleteWorkbaskets(workbasketsToBeDeleted));
return deleteWorkbaskets(workbasketsToBeDeleted);
} catch (Exception e) {
LOGGER.warn("Could not delete workbaskets.", e);
return new Integer(0);
return 0;
}
});
return count.intValue();
return count;
} else {
try {
deletedWorkbasketsCount = deleteWorkbaskets(workbasketsToBeDeleted);

View File

@ -89,10 +89,10 @@ public final class CurrentUserContext {
try {
Class.forName(WSSUBJECT_CLASSNAME);
LOGGER.debug("WSSubject detected. Assuming that Taskana runs on IBM WebSphere.");
runningOnWebSphere = new Boolean(true);
runningOnWebSphere = Boolean.TRUE;
} catch (ClassNotFoundException e) {
LOGGER.debug("No WSSubject detected. Using JAAS subject further on.");
runningOnWebSphere = new Boolean(false);
runningOnWebSphere = Boolean.FALSE;
}
}
return runningOnWebSphere;

View File

@ -110,7 +110,7 @@ public abstract class AbstractPagingController {
String param = params.getFirst(PAGING_PAGE_SIZE);
params.remove(PAGING_PAGE_SIZE);
try {
return param != null ? Long.valueOf(param) : Integer.MAX_VALUE;
return param != null ? Long.parseLong(param) : Integer.MAX_VALUE;
} catch (NumberFormatException e) {
throw new InvalidArgumentException("page-size must be a integer value.", e.getCause());
}

View File

@ -199,14 +199,20 @@ public class ClassificationDefinitionController {
throws ClassificationNotFoundException, NotAuthorizedException, ConcurrencyException,
InvalidArgumentException {
LOGGER.debug("Entry to updateParentChildrenRelations()");
for (Classification childRes : childrenInFile.keySet()) {
Classification child = classificationService
.getClassification(childRes.getKey(), childRes.getDomain());
String parentKey = childrenInFile.get(childRes);
for (Map.Entry<Classification, String> entry : childrenInFile.entrySet()) {
Classification childRes = entry.getKey();
String parentKey = entry.getValue();
String classificationKey = childRes.getKey();
String classificationDomain = childRes.getDomain();
Classification child = classificationService.getClassification(classificationKey, classificationDomain);
String parentId = (parentKey == null) ? ""
: classificationService.getClassification(parentKey, childRes.getDomain()).getId();
: classificationService.getClassification(parentKey, classificationDomain).getId();
child.setParentKey(parentKey);
child.setParentId(parentId);
classificationService.updateClassification(child);
}
LOGGER.debug("Exit from updateParentChildrenRelations()");

View File

@ -390,7 +390,7 @@ public class TaskController extends AbstractPagingController {
int[] priorities = new int[prioritiesInString.length];
for (int i = 0; i < prioritiesInString.length; i++) {
priorities[i] = Integer.valueOf(prioritiesInString[i]);
priorities[i] = Integer.parseInt(prioritiesInString[i]);
}
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Exit from extractPriorities(), returning {}", priorities);