fixed findbugs

This commit is contained in:
yanhuqing666
2017-08-29 15:25:21 +08:00
parent e81a714d78
commit 8fee2b3ef9
17 changed files with 51 additions and 79 deletions

3
.gitignore vendored
View File

@@ -121,4 +121,5 @@ src/main/resources/zkconf/rule.xml
conf/dnindex.properties
version.txt
copyResources.bat
dependency-reduced-pom.xml
dependency-reduced-pom.xml
checkstyle-result.out

View File

@@ -30,10 +30,6 @@
<Class name="io.mycat.sqlengine.mpp.RowDataPacketGrouper"/>
</Match>
<!-- need refactor -->
<Match>
<Package name="io.mycat.util.dataMigrator"/>
</Match>
<Match>
<Package name="io.mycat.util.dataMigrator.dataIOImpl"/>
</Match>
@@ -84,30 +80,6 @@
<Class name="io.mycat.plan.common.time.MyTime"/>
</Match>
<!-- need refactor -->
<Match>
<Bug category="STYLE"/>
<Bug pattern="URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD"/>
<Class name="io.mycat.route.sequence.handler.SequenceVal"/>
</Match>
<!-- need refactor -->
<Match>
<Bug category="STYLE"/>
<Bug pattern="URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD"/>
<Class name="io.mycat.plan.common.typelib.TypeLib"/>
</Match>
<!-- need refactor -->
<Match>
<Bug category="STYLE"/>
<Bug pattern="URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD"/>
<Class name="io.mycat.plan.common.locale.MyLocaleErrMsgs"/>
</Match>
<!-- need refactor -->
<Match>
<Bug category="STYLE"/>
<Bug pattern="URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD"/>
<Class name="io.mycat.plan.common.locale.MyLocale"/>
</Match>
<!-- need refactor -->
<Match>
<Bug category="STYLE"/>
<Bug pattern="URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD"/>
@@ -156,7 +128,7 @@
<Match>
<Bug category="STYLE"/>
<Class name="io.mycat.plan.common.time.MyTime"/>
<Method name="extract_date_time"/>
<Method name="extractDateTime"/>
<Bug pattern="SF_SWITCH_FALLTHROUGH"/>
</Match>
<Match>
@@ -173,11 +145,36 @@
</Match>
<!-- STYLE end -->
<!-- PERFORMANCE system.gc() is needed? -->
<!-- PERFORMANCE -->
<!-- system.gc() is needed?-->
<Match>
<Bug category="PERFORMANCE"/>
<Bug pattern="DM_GC"/>
</Match>
<!-- need refactor -->
<Match>
<Bug category="PERFORMANCE"/>
<Class name="io.mycat.plan.common.locale.MyLocale"/>
<Bug pattern="URF_UNREAD_FIELD"/>
</Match>
<!-- need refactor -->
<Match>
<Bug category="PERFORMANCE"/>
<Class name="io.mycat.plan.common.locale.MyLocaleErrMsgs"/>
<Bug pattern="URF_UNREAD_FIELD"/>
</Match>
<!-- need refactor -->
<Match>
<Bug category="PERFORMANCE"/>
<Class name="io.mycat.plan.common.typelib.TypeLib"/>
<Bug pattern="URF_UNREAD_FIELD"/>
</Match>
<!-- need refactor -->
<Match>
<Bug category="PERFORMANCE"/>
<Class name="io.mycat.route.sequence.handler.SequenceVal"/>
<Bug pattern="URF_UNREAD_FIELD"/>
</Match>
<!-- PERFORMANCE end -->
<!-- CORRECTNESS start -->
@@ -187,13 +184,14 @@
<Method name="setRightNode"/>
<Bug pattern="NP_NULL_PARAM_DEREF_ALL_TARGETS_DANGEROUS"/>
</Match>
<!-- CORRECTNESS end -->
<!-- MT_CORRECTNESS start num:7 -->
<Match>
<Bug category="MT_CORRECTNESS"/>
<Class name="io.mycat.MycatServer"/>
<Method name="genXATXID"/>
<Method name="genXaTxId"/>
<Bug pattern="JLM_JSR166_UTILCONCURRENT_MONITORENTER"/>
</Match>
<Match>

View File

@@ -17,7 +17,7 @@
<maven.build.timestamp.format>yyyy-MM-dd HH:mm:ss</maven.build.timestamp.format>
<version.template.file>version.txt.template</version.template.file>
<version.file>version.txt</version.file>
<sever.comment>MyCat Server (OpenCloundDB)</sever.comment>
<sever.comment>MyCat Server (OpenCloudDB)</sever.comment>
<project.build.sourceEncoding>
UTF-8
</project.build.sourceEncoding>

View File

@@ -397,6 +397,7 @@ public class MultiNodeQueryHandler extends MultiNodeHandler implements LoadDataR
if (rrs != null && rrs.getStatement() != null) {
netInBytes += rrs.getStatement().getBytes().length;
}
assert rrs != null;
QueryResult queryResult = new QueryResult(session.getSource().getUser(), rrs.getSqlType(),
rrs.getStatement(), selectRows, netInBytes, netOutBytes, startTime, System.currentTimeMillis(), resultSize);
QueryResultDispatcher.dispatchQuery(queryResult);
@@ -658,9 +659,7 @@ public class MultiNodeQueryHandler extends MultiNodeHandler implements LoadDataR
eof[3] = ++packetId;
buffer = source.writeToBuffer(eof, buffer);
source.write(buffer);
if (dataMergeSvr != null) {
dataMergeSvr.onRowMetaData(columToIndx, fieldCount);
}
dataMergeSvr.onRowMetaData(columToIndx, fieldCount);
}
public void handleDataProcessException(Exception e) {

View File

@@ -30,26 +30,20 @@ public class TwoTableComparator implements Comparator<RowDataPacket> {
public TwoTableComparator(List<FieldPacket> fps1, List<FieldPacket> fps2, List<Order> leftOrders,
List<Order> rightOrders, boolean isAllPushDown, HandlerType type, String charset) {
boolean isAllPushDown1 = isAllPushDown;
HandlerType type1 = type;
this.leftFields = HandlerTool.createFields(fps1);
this.rightFields = HandlerTool.createFields(fps2);
ascs = new ArrayList<>();
for (Order order : leftOrders) {
ascs.add(order.getSortOrder() == SQLOrderingSpecification.ASC);
}
List<Item> leftCmpItems = new ArrayList<>();
List<Item> rightCmpItems = new ArrayList<>();
cmptors = new ArrayList<>();
for (int index = 0; index < ascs.size(); index++) {
Order leftOrder = leftOrders.get(index);
Order rightOrder = rightOrders.get(index);
Item leftCmpItem = HandlerTool.createItem(leftOrder.getItem(), leftFields, 0, isAllPushDown1, type1,
Item leftCmpItem = HandlerTool.createItem(leftOrder.getItem(), leftFields, 0, isAllPushDown, type,
charset);
leftCmpItems.add(leftCmpItem);
Item rightCmpItem = HandlerTool.createItem(rightOrder.getItem(), rightFields, 0, isAllPushDown1,
type1, charset);
rightCmpItems.add(rightCmpItem);
Item rightCmpItem = HandlerTool.createItem(rightOrder.getItem(), rightFields, 0, isAllPushDown,
type, charset);
ArgComparator cmptor = new ArgComparator(leftCmpItem, rightCmpItem);
cmptor.setCmpFunc(null, leftCmpItem, rightCmpItem, false);
cmptors.add(cmptor);

View File

@@ -31,7 +31,7 @@ public abstract class Versions {
public static final byte PROTOCOL_VERSION = 10;
private static byte[] serverVersion = "5.6.29-mycat-2.17.08.0-dev-20170824134330".getBytes();
public static final byte[] VERSION_COMMENT = "MyCat Server (OpenCloundDB)".getBytes();
public static final byte[] VERSION_COMMENT = "MyCat Server (OpenCloudDB)".getBytes();
public static final String ANNOTATION_NAME = "mycat:";
public static final String ROOT_PREFIX = "mycat";

View File

@@ -157,7 +157,7 @@ public final class RollbackConfig {
}
}
// INIT FAILED
if (!rollbackStatus && dataHosts != null) {
if (!rollbackStatus) {
for (PhysicalDBPool dn : dataHosts.values()) {
dn.clearDataSources("rollbackup config");
dn.stopHeartbeat();

View File

@@ -2,7 +2,6 @@ package io.mycat.memory.unsafe.array;
import io.mycat.memory.unsafe.Platform;
import io.mycat.memory.unsafe.memory.MemoryBlock;
import io.mycat.memory.unsafe.memory.mm.MemoryConsumer;
/**
* @author Hash Zhang
@@ -18,13 +17,12 @@ public class CharArray {
private final long length;
public CharArray(MemoryBlock memory, MemoryConsumer memoryConsumer) {
public CharArray(MemoryBlock memory) {
assert memory.size() < (long) Integer.MAX_VALUE * 2 : "Array size > 4 billion elements";
this.memory = memory;
this.baseObj = memory.getBaseObject();
this.baseOffset = memory.getBaseOffset();
this.length = memory.size() / WIDTH;
MemoryConsumer memoryConsumer1 = memoryConsumer;
}

View File

@@ -116,7 +116,7 @@ public abstract class MemoryConsumer {
throw new OutOfMemoryError("Unable to acquire " + required + " bytes of memory, got " + got);
}
used += required;
return new CharArray(page, this);
return new CharArray(page);
}
/**

View File

@@ -4,7 +4,6 @@ package io.mycat.memory.unsafe.storage;
import io.mycat.memory.unsafe.utils.MycatPropertyConf;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
/**
@@ -31,12 +30,11 @@ public class DataNodeDiskManager {
* cases.
*/
public DiskRowWriter getDiskWriter(
ConnectionId blockId,
File file,
SerializerInstance serializerInstance,
int bufferSize) throws IOException {
boolean syncWrites = conf.getBoolean("server.merge.sync", false);
return new DiskRowWriter(file, serializerInstance, bufferSize, new FileOutputStream(file),
syncWrites, blockId);
return new DiskRowWriter(file, serializerInstance, bufferSize,
syncWrites);
}
}

View File

@@ -18,6 +18,11 @@
package io.mycat.memory.unsafe.storage;
import io.mycat.memory.unsafe.utils.JavaUtils;
import io.mycat.memory.unsafe.utils.MycatPropertyConf;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
@@ -26,12 +31,6 @@ import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import io.mycat.memory.unsafe.utils.JavaUtils;
import io.mycat.memory.unsafe.utils.MycatPropertyConf;
/**
* Creates and maintains the logical mapping between logical blocks and physical on-disk
@@ -60,7 +59,6 @@ public class DataNodeFileManager {
public DataNodeFileManager(MycatPropertyConf conf, boolean deleteFilesOnStop) throws IOException {
MycatPropertyConf conf1 = conf;
this.deleteFilesOnStop = deleteFilesOnStop;

View File

@@ -80,19 +80,15 @@ public class DiskRowWriter extends OutputStream {
File file,
SerializerInstance serializerInstance,
int bufferSize,
OutputStream compressStream,
boolean syncWrites,
ConnectionId blockId) throws IOException {
boolean syncWrites) throws IOException {
this.file = file;
this.serializerInstance = serializerInstance;
this.bufferSize = bufferSize;
OutputStream compressStream1 = compressStream;
this.syncWrites = syncWrites;
/*
ShuffleWriteMetrics writeMetrics,
*/
ConnectionId blockId1 = blockId;
initialPosition = file.length();
reportedPosition = initialPosition;
}
@@ -148,20 +144,13 @@ public class DiskRowWriter extends OutputStream {
* Flush the partial writes and commit them as a single atomic block.
*/
public void commitAndClose() throws IOException {
long finalPosition = -1;
if (initialized) {
// NOTE: Because Kryo doesnt flush the underlying stream we explicitly flush both the
// serializer stream and the lower level stream.
objOut.flush();
bs.flush();
close();
finalPosition = file.length();
// In certain compression codecs, more bytes are written after close() is called
//writeMetrics.incBytesWritten(finalPosition - reportedPosition)
} else {
finalPosition = file.length();
}
boolean commitAndCloseHasBeenCalled = true;
}

View File

@@ -16,7 +16,6 @@ public class RowPrefixComputer extends UnsafeExternalRowSorter.PrefixComputer {
private final ColMeta colMeta;
public RowPrefixComputer(StructType schema) {
StructType schema1 = schema;
/**
* get the index of the first key word of order
*/

View File

@@ -262,7 +262,6 @@ public final class UnsafeExternalRowSorter {
row1.pointTo(baseObj1, baseOff1, -1);
row2.pointTo(baseObj2, baseOff2, -1);
int cmp = 0;
int len = orderCols.length;
int type = OrderCol.COL_ORDER_TYPE_ASC;

View File

@@ -63,7 +63,7 @@ public final class UnsafeSorterSpillWriter {
// Our write path doesn't actually use this serializer (since we end up calling the `write()`
// OutputStream methods), but DiskRowWriter still calls some methods on it. To work
// around this, we pass a dummy no-op serializer.
writer = blockManager.getDiskWriter(conId, file, DummySerializerInstance.INSTANCE, fileBufferSize/**,writeMetrics*/);
writer = blockManager.getDiskWriter(file, DummySerializerInstance.INSTANCE, fileBufferSize/**,writeMetrics*/);
// Write the number of records
writeIntToBuffer(numRecordsToWrite, 0);
writer.write(writeBuffer, 0, 4);

View File

@@ -21,7 +21,6 @@ public class RowDataCmp implements Comparator<RowDataPacket> {
public int compare(RowDataPacket o1, RowDataPacket o2) {
OrderCol[] tmp = this.orderCols;
int cmp = 0;
int len = tmp.length;
//compare the columns of order by
int type = OrderCol.COL_ORDER_TYPE_ASC;
for (OrderCol aTmp : tmp) {

View File

@@ -61,7 +61,7 @@ public class BlockManagerTest {
File file2 = diskBlockManager.getFile("mycat1");
DiskRowWriter writer = blockManager.
getDiskWriter(null, file2, DummySerializerInstance.INSTANCE, 1024 * 1024);
getDiskWriter(file2, DummySerializerInstance.INSTANCE, 1024 * 1024);
byte[] writeBuffer = new byte[4];
int v = 4;
writeBuffer[0] = (byte) (v >>> 24);