private static boolean setupForChannelAssertCheck() { cmdChannelUsageChecker = new IntHashTable(9); return true; } private static IntHashTable getUsageChecker() {
public static IntHashTable newTableExpectingCount(int count) { return new IntHashTable((int) (1+Math.ceil(Math.log(count)/Math.log(2)) )); }
public static IntHashTable newTableExpectingCount(int count) { return new IntHashTable((int) (1+Math.ceil(Math.log(count)/Math.log(2)) )); }
public static IntHashTable newTableExpectingCount(int count) { return new IntHashTable((int) (1+Math.ceil(Math.log(count)/Math.log(2)) )); }
public static IntHashTable doubleSize(IntHashTable ht) { IntHashTable newHT = new IntHashTable(ht.bits+1); int j = ht.mask+1; while (--j >= 0) { long block = ht.data[j]; if (0!=block) { int key = (int)block; int value = (int)(block>>32); setItem(newHT, key, value); } } return newHT; }
public static IntHashTable doubleSize(IntHashTable ht) { IntHashTable newHT = new IntHashTable(ht.bits+1); int j = ht.mask+1; while (--j >= 0) { long block = ht.data[j]; if (0!=block) { int key = (int)block; int value = (int)(block>>32); setItem(newHT, key, value); } } assert(newHT.space>0); return newHT; }
public static IntHashTable doubleSize(IntHashTable ht) { IntHashTable newHT = new IntHashTable(ht.bits+1); int j = ht.mask+1; while (--j >= 0) { long block = ht.data[j]; if (0!=block) { int key = (int)block; int value = (int)(block>>32); setItem(newHT, key, value); } } assert(newHT.space>0); return newHT; }
public final ListenerFilter includeSerialStoreReleaseAck(int ... id) { serialStoreRelAckPipeMap = new IntHashTable(IntHashTable.computeBits(id.length*3)); int i = id.length; while (--i>=0) { Pipe<PersistedBlobLoadReleaseSchema> pipe = builder.serialStoreReleaseAck[i]; if (pipe==null) { throw new UnsupportedOperationException("The id "+id+" release ack has already been assined to another behavior.\n Only 1 behavior may consume this message"); } else { IntHashTable.setItem(serialStoreRelAckPipeMap, NON_ZERO_BASE+pipe.id, NON_ZERO_BASE+i); builder.serialStoreReleaseAck[i] = null; } } return this; }
public final ListenerFilter includeSerialStoreReplay(int ... id) { serialStoreReplayPipeMap = new IntHashTable(IntHashTable.computeBits(id.length*3)); int i = id.length; while (--i>=0) { Pipe<PersistedBlobLoadConsumerSchema> pipe = builder.serialStoreReplay[i]; if (pipe==null) { throw new UnsupportedOperationException("The id "+id+" replay has already been assined to another behavior.\n Only 1 behavior may consume this message"); } else { IntHashTable.setItem(serialStoreReplayPipeMap, NON_ZERO_BASE+pipe.id, NON_ZERO_BASE+i); builder.serialStoreReplay[i] = null; } } return this; }
public final ListenerFilter includeSerialStoreWriteAck(int ... id) { serialStoreProdAckPipeMap = new IntHashTable(IntHashTable.computeBits(id.length*3)); int i = id.length; while (--i>=0) { Pipe<PersistedBlobLoadProducerSchema> pipe = builder.serialStoreWriteAck[i]; if (pipe==null) { throw new UnsupportedOperationException("The id "+id+" write ack has already been assined to another behavior.\n Only 1 behavior may consume this message"); } else { IntHashTable.setItem(serialStoreProdAckPipeMap, NON_ZERO_BASE+pipe.id, NON_ZERO_BASE+i); builder.serialStoreWriteAck[i] = null; } } return this; }
IntHashTable rootsTable = new IntHashTable(bits);
rootsTable = new IntHashTable(bits);
private static < T extends Enum<T> & HTTPContentType> IntHashTable buildFileExtHashTable(Class<T> supportedHTTPContentTypes) { int hashBits = 13; //8K IntHashTable localExtTable = new IntHashTable(hashBits); T[] conentTypes = supportedHTTPContentTypes.getEnumConstants(); int c = conentTypes.length; while (--c >= 0) { if (!conentTypes[c].isAlias()) {//never use an alias for the file Ext lookup. int hash = HTTPSpecification.extHash(conentTypes[c].fileExtension()); if ( IntHashTable.hasItem(localExtTable, hash) ) { final int ord = IntHashTable.getItem(localExtTable, hash); throw new UnsupportedOperationException("Hash error, check for new values and algo. "+conentTypes[c].fileExtension()+" colides with existing "+conentTypes[ord].fileExtension()); } else { IntHashTable.setItem(localExtTable, hash, conentTypes[c].ordinal()); } } } return localExtTable; }
private boolean addAssocHashToTable(Object localObject, int structIdx, int fieldIdx) { if (null==this.fieldAttachedIndex[structIdx]) { this.fieldAttachedIndex[structIdx] = new IntHashTable( IntHashTable.computeBits(this.fieldLocals[structIdx].length*2) ); } int hashCode = localObject.hashCode(); assert(0!=hashCode) : "can not insert null"; assert(!IntHashTable.hasItem(this.fieldAttachedIndex[structIdx], hashCode)) : "These objects are too similar or was attached twice, Hash must be unique. Choose different objects"; if (IntHashTable.hasItem(this.fieldAttachedIndex[structIdx], hashCode)) { logger.warn("Unable to add object {} as an association, Another object with an identical Hash is already held. Try a different object.", localObject); return false; } else { if (!IntHashTable.setItem(this.fieldAttachedIndex[structIdx], hashCode, fieldIdx)) { //we are out of space this.fieldAttachedIndex[structIdx] = IntHashTable.doubleSize(this.fieldAttachedIndex[structIdx]); if (!IntHashTable.setItem(this.fieldAttachedIndex[structIdx], hashCode, fieldIdx)) { throw new RuntimeException("internal error"); } } else { //logger.info("{} set object {} to index {}",structIdx, localObject, fieldIdx); } assert(fieldIdx == IntHashTable.getItem(this.fieldAttachedIndex[structIdx], hashCode)); return true; } }
private boolean addAssocHashToTable(Object localObject, int structIdx, int fieldIdx) { if (null==this.fieldAttachedIndex[structIdx]) { this.fieldAttachedIndex[structIdx] = new IntHashTable( IntHashTable.computeBits(this.fieldLocals[structIdx].length*2) ); } int hashCode = localObject.hashCode(); assert(0!=hashCode) : "can not insert null"; assert(!IntHashTable.hasItem(this.fieldAttachedIndex[structIdx], hashCode)) : "These objects are too similar or was attached twice, Hash must be unique. Choose different objects"; if (IntHashTable.hasItem(this.fieldAttachedIndex[structIdx], hashCode)) { logger.warn("Unable to add object {} as an association, Another object with an identical Hash is already held. Try a different object.", localObject); return false; } else { if (!IntHashTable.setItem(this.fieldAttachedIndex[structIdx], hashCode, fieldIdx)) { //we are out of space this.fieldAttachedIndex[structIdx] = IntHashTable.doubleSize(this.fieldAttachedIndex[structIdx]); if (!IntHashTable.setItem(this.fieldAttachedIndex[structIdx], hashCode, fieldIdx)) { throw new RuntimeException("internal error"); } } else { //logger.info("{} set object {} to index {}",structIdx, localObject, fieldIdx); } assert(fieldIdx == IntHashTable.getItem(this.fieldAttachedIndex[structIdx], hashCode)); return true; } }
void startupPubSub(BuilderImpl hardware) { tempSubject.initBuffers(); int incomingPipeCount = incomingSubsAndPubsPipe.length; //for each pipe we must keep track of the consumed marks before sending the ack back int outgoingPipeCount = outgoingMessagePipes.length; consumedMarks = new long[incomingPipeCount][outgoingPipeCount]; pendingAck = new boolean[incomingPipeCount]; requiredConsumes = new int[incomingPipeCount]; //maximum count of outgoing pipes * 2 for extra hash room deDupeTable = new IntHashTable(IntHashTable.computeBits(outgoingPipeCount*2)); subscriberLists = new int[MessagePubSubImpl.initialSubscriptions*subscriberListSize]; Arrays.fill(subscriberLists, (short)-1); localSubscriptionTrie = new TrieParser(MessagePubSubImpl.initialSubscriptions * MessagePubSubImpl.estimatedAvgTopicLength,1,false,true);//must support extraction for wild cards. //this reader is set up for complete text only, all topics are sent in complete. localSubscriptionTrieReader = new TrieParserReader(true); pendingPublish = new int[subscriberListSize]; processStartupSubscriptions(hardware.consumeStartupSubscriptions()); }
this.fieldAttachedIndex[structIdx] = new IntHashTable( IntHashTable.computeBits(this.fieldLocals[structIdx].length*2) );
private Object[] structLocals = new Object[4]; private IntHashTable structTable = new IntHashTable(3); this.fieldAttachedIndex[structIdx] = new IntHashTable(4 + IntHashTable.computeBits(Math.max(fieldNames.length,8)) ); private IntHashTable aliasStructIdTable = new IntHashTable(5);//alias values....
private Object[] structLocals = new Object[4]; private IntHashTable structTable = new IntHashTable(3); this.fieldAttachedIndex[structIdx] = new IntHashTable(4 + IntHashTable.computeBits(Math.max(fieldNames.length,8)) ); private IntHashTable aliasStructIdTable = new IntHashTable(5);//alias values....
this.fieldLocals[structIdx] = new Object[fieldNames.length]; this.fieldAttachedIndex[structIdx] = new IntHashTable(5+IntHashTable.computeBits(Math.max(fieldNames.length,8)));