* Certificate: Fix null cert hash code

* Hash: Cleanup of cached hash
* NetDB: Randomize returned DSM timestamp
This commit is contained in:
zzz
2014-03-15 18:43:42 +00:00
parent af7ce8e18e
commit 24e6750529
5 changed files with 39 additions and 27 deletions

View File

@@ -244,20 +244,20 @@ public class Certificate extends DataStructureImpl {
StringBuilder buf = new StringBuilder(64); StringBuilder buf = new StringBuilder(64);
buf.append("[Certificate: type: "); buf.append("[Certificate: type: ");
if (getCertificateType() == CERTIFICATE_TYPE_NULL) if (getCertificateType() == CERTIFICATE_TYPE_NULL)
buf.append("Null certificate"); buf.append("Null");
else if (getCertificateType() == CERTIFICATE_TYPE_KEY) else if (getCertificateType() == CERTIFICATE_TYPE_KEY)
buf.append("Key certificate"); buf.append("Key");
else if (getCertificateType() == CERTIFICATE_TYPE_HASHCASH) else if (getCertificateType() == CERTIFICATE_TYPE_HASHCASH)
buf.append("Hashcash certificate"); buf.append("HashCash");
else if (getCertificateType() == CERTIFICATE_TYPE_HIDDEN) else if (getCertificateType() == CERTIFICATE_TYPE_HIDDEN)
buf.append("Hidden certificate"); buf.append("Hidden");
else if (getCertificateType() == CERTIFICATE_TYPE_SIGNED) else if (getCertificateType() == CERTIFICATE_TYPE_SIGNED)
buf.append("Signed certificate"); buf.append("Signed");
else else
buf.append("Unknown certificate type (").append(getCertificateType()).append(")"); buf.append("Unknown type (").append(getCertificateType()).append(')');
if (_payload == null) { if (_payload == null) {
buf.append(" null payload"); buf.append(" payload: null");
} else { } else {
buf.append(" payload size: ").append(_payload.length); buf.append(" payload size: ").append(_payload.length);
if (getCertificateType() == CERTIFICATE_TYPE_HASHCASH) { if (getCertificateType() == CERTIFICATE_TYPE_HASHCASH) {
@@ -334,7 +334,8 @@ public class Certificate extends DataStructureImpl {
/** Overridden for efficiency */ /** Overridden for efficiency */
@Override @Override
public int hashCode() { public int hashCode() {
return 99999; // must be the same as type + payload above
return 0;
} }
} }
} }

View File

@@ -34,7 +34,7 @@ public class KeysAndCert extends DataStructureImpl {
protected PublicKey _publicKey; protected PublicKey _publicKey;
protected SigningPublicKey _signingKey; protected SigningPublicKey _signingKey;
protected Certificate _certificate; protected Certificate _certificate;
protected Hash __calculatedHash; private Hash __calculatedHash;
protected byte[] _padding; protected byte[] _padding;
public Certificate getCertificate() { public Certificate getCertificate() {
@@ -48,7 +48,6 @@ public class KeysAndCert extends DataStructureImpl {
if (_certificate != null) if (_certificate != null)
throw new IllegalStateException(); throw new IllegalStateException();
_certificate = cert; _certificate = cert;
__calculatedHash = null;
} }
public PublicKey getPublicKey() { public PublicKey getPublicKey() {
@@ -62,7 +61,6 @@ public class KeysAndCert extends DataStructureImpl {
if (_publicKey != null) if (_publicKey != null)
throw new IllegalStateException(); throw new IllegalStateException();
_publicKey = key; _publicKey = key;
__calculatedHash = null;
} }
public SigningPublicKey getSigningPublicKey() { public SigningPublicKey getSigningPublicKey() {
@@ -76,7 +74,6 @@ public class KeysAndCert extends DataStructureImpl {
if (_signingKey != null) if (_signingKey != null)
throw new IllegalStateException(); throw new IllegalStateException();
_signingKey = key; _signingKey = key;
__calculatedHash = null;
} }
/** /**
@@ -87,7 +84,6 @@ public class KeysAndCert extends DataStructureImpl {
if (_padding != null) if (_padding != null)
throw new IllegalStateException(); throw new IllegalStateException();
_padding = padding; _padding = padding;
__calculatedHash = null;
} }
/** /**
@@ -109,7 +105,6 @@ public class KeysAndCert extends DataStructureImpl {
_signingKey = spk; _signingKey = spk;
_certificate = cert; _certificate = cert;
} }
__calculatedHash = null;
} }
public void writeBytes(OutputStream out) throws DataFormatException, IOException { public void writeBytes(OutputStream out) throws DataFormatException, IOException {
@@ -169,8 +164,10 @@ public class KeysAndCert extends DataStructureImpl {
ByteArrayOutputStream baos = new ByteArrayOutputStream(400); ByteArrayOutputStream baos = new ByteArrayOutputStream(400);
writeBytes(baos); writeBytes(baos);
identBytes = baos.toByteArray(); identBytes = baos.toByteArray();
} catch (Throwable t) { } catch (IOException ioe) {
return null; throw new IllegalStateException("KAC hash error", ioe);
} catch (DataFormatException dfe) {
throw new IllegalStateException("KAC hash error", dfe);
} }
__calculatedHash = SHA256Generator.getInstance().calculateHash(identBytes); __calculatedHash = SHA256Generator.getInstance().calculateHash(identBytes);
return __calculatedHash; return __calculatedHash;

View File

@@ -1,5 +1,14 @@
2014-03-15 zzz
* Certificate: Fix null cert hash code
* Hash: Cleanup of cached hash
* NetDB: Randomize returned DSM timestamp
2014-03-13 zzz
* SAM: Class, field, findbugs cleanups
2014-03-12 zzz 2014-03-12 zzz
* Console: Handle ISO-639-2 language codes (ticket #1229) * Console: Handle ISO-639-2 language codes (ticket #1229)
* I2PTunnel: Send message to client on HTTP Server OOM or out of threads
* Streaming: * Streaming:
- Track recently closed connections (ticket #1161) - Track recently closed connections (ticket #1161)
- Workaround for jwebcache and i2phex (ticket #1231) - Workaround for jwebcache and i2phex (ticket #1231)

View File

@@ -18,10 +18,10 @@ public class RouterVersion {
/** deprecated */ /** deprecated */
public final static String ID = "Monotone"; public final static String ID = "Monotone";
public final static String VERSION = CoreVersion.VERSION; public final static String VERSION = CoreVersion.VERSION;
public final static long BUILD = 19; public final static long BUILD = 20;
/** for example "-test" */ /** for example "-test" */
public final static String EXTRA = ""; public final static String EXTRA = "-rc";
public final static String FULL_VERSION = VERSION + "-" + BUILD + EXTRA; public final static String FULL_VERSION = VERSION + "-" + BUILD + EXTRA;
public static void main(String args[]) { public static void main(String args[]) {
System.out.println("I2P Router version: " + FULL_VERSION); System.out.println("I2P Router version: " + FULL_VERSION);

View File

@@ -56,7 +56,7 @@ public class HandleFloodfillDatabaseStoreMessageJob extends JobImpl {
Hash key = _message.getKey(); Hash key = _message.getKey();
DatabaseEntry entry = _message.getEntry(); DatabaseEntry entry = _message.getEntry();
if (entry.getType() == DatabaseEntry.KEY_TYPE_LEASESET) { if (entry.getType() == DatabaseEntry.KEY_TYPE_LEASESET) {
getContext().statManager().addRateData("netDb.storeLeaseSetHandled", 1, 0); getContext().statManager().addRateData("netDb.storeLeaseSetHandled", 1);
if (_log.shouldLog(Log.INFO)) if (_log.shouldLog(Log.INFO))
_log.info("Handling dbStore of leaseset " + _message); _log.info("Handling dbStore of leaseset " + _message);
//_log.info("Handling dbStore of leasset " + key + " with expiration of " //_log.info("Handling dbStore of leasset " + key + " with expiration of "
@@ -119,7 +119,7 @@ public class HandleFloodfillDatabaseStoreMessageJob extends JobImpl {
} }
} else if (entry.getType() == DatabaseEntry.KEY_TYPE_ROUTERINFO) { } else if (entry.getType() == DatabaseEntry.KEY_TYPE_ROUTERINFO) {
RouterInfo ri = (RouterInfo) entry; RouterInfo ri = (RouterInfo) entry;
getContext().statManager().addRateData("netDb.storeRouterInfoHandled", 1, 0); getContext().statManager().addRateData("netDb.storeRouterInfoHandled", 1);
if (_log.shouldLog(Log.INFO)) if (_log.shouldLog(Log.INFO))
_log.info("Handling dbStore of router " + key + " with publishDate of " _log.info("Handling dbStore of router " + key + " with publishDate of "
+ new Date(ri.getPublished())); + new Date(ri.getPublished()));
@@ -163,7 +163,7 @@ public class HandleFloodfillDatabaseStoreMessageJob extends JobImpl {
} }
long recvEnd = System.currentTimeMillis(); long recvEnd = System.currentTimeMillis();
getContext().statManager().addRateData("netDb.storeRecvTime", recvEnd-recvBegin, 0); getContext().statManager().addRateData("netDb.storeRecvTime", recvEnd-recvBegin);
if (_message.getReplyToken() > 0) if (_message.getReplyToken() > 0)
sendAck(); sendAck();
@@ -174,7 +174,7 @@ public class HandleFloodfillDatabaseStoreMessageJob extends JobImpl {
if (_fromHash != null) { if (_fromHash != null) {
if (invalidMessage == null) { if (invalidMessage == null) {
getContext().profileManager().dbStoreReceived(_fromHash, wasNew); getContext().profileManager().dbStoreReceived(_fromHash, wasNew);
getContext().statManager().addRateData("netDb.storeHandled", ackEnd-recvEnd, 0); getContext().statManager().addRateData("netDb.storeHandled", ackEnd-recvEnd);
} else { } else {
// Should we record in the profile? // Should we record in the profile?
if (_log.shouldLog(Log.WARN)) if (_log.shouldLog(Log.WARN))
@@ -195,7 +195,7 @@ public class HandleFloodfillDatabaseStoreMessageJob extends JobImpl {
if (_facade.shouldThrottleFlood(key)) { if (_facade.shouldThrottleFlood(key)) {
if (_log.shouldLog(Log.WARN)) if (_log.shouldLog(Log.WARN))
_log.warn("Too many recent stores, not flooding key: " + key); _log.warn("Too many recent stores, not flooding key: " + key);
getContext().statManager().addRateData("netDb.floodThrottled", 1, 0); getContext().statManager().addRateData("netDb.floodThrottled", 1);
return; return;
} }
long floodBegin = System.currentTimeMillis(); long floodBegin = System.currentTimeMillis();
@@ -203,10 +203,10 @@ public class HandleFloodfillDatabaseStoreMessageJob extends JobImpl {
// ERR: see comment in HandleDatabaseLookupMessageJob regarding hidden mode // ERR: see comment in HandleDatabaseLookupMessageJob regarding hidden mode
//else if (!_message.getRouterInfo().isHidden()) //else if (!_message.getRouterInfo().isHidden())
long floodEnd = System.currentTimeMillis(); long floodEnd = System.currentTimeMillis();
getContext().statManager().addRateData("netDb.storeFloodNew", floodEnd-floodBegin, 0); getContext().statManager().addRateData("netDb.storeFloodNew", floodEnd-floodBegin);
} else { } else {
// don't flood it *again* // don't flood it *again*
getContext().statManager().addRateData("netDb.storeFloodOld", 1, 0); getContext().statManager().addRateData("netDb.storeFloodOld", 1);
} }
} }
} }
@@ -214,7 +214,11 @@ public class HandleFloodfillDatabaseStoreMessageJob extends JobImpl {
private void sendAck() { private void sendAck() {
DeliveryStatusMessage msg = new DeliveryStatusMessage(getContext()); DeliveryStatusMessage msg = new DeliveryStatusMessage(getContext());
msg.setMessageId(_message.getReplyToken()); msg.setMessageId(_message.getReplyToken());
msg.setArrival(getContext().clock().now()); // Randomize for a little protection against clock-skew fingerprinting.
// But the "arrival" isn't used for anything, right?
// TODO just set to 0?
// TODO we have no session to garlic wrap this with, needs new message
msg.setArrival(getContext().clock().now() - getContext().random().nextInt(3*1000));
/* /*
if (FloodfillNetworkDatabaseFacade.floodfillEnabled(getContext())) { if (FloodfillNetworkDatabaseFacade.floodfillEnabled(getContext())) {
// no need to do anything but send it where they ask // no need to do anything but send it where they ask
@@ -232,7 +236,8 @@ public class HandleFloodfillDatabaseStoreMessageJob extends JobImpl {
_log.warn("No outbound tunnel could be found"); _log.warn("No outbound tunnel could be found");
return; return;
} else { } else {
getContext().tunnelDispatcher().dispatchOutbound(msg, outTunnel.getSendTunnelId(0), _message.getReplyTunnel(), _message.getReplyGateway()); getContext().tunnelDispatcher().dispatchOutbound(msg, outTunnel.getSendTunnelId(0),
_message.getReplyTunnel(), _message.getReplyGateway());
} }
//} //}
} }