private static TObjectIntMap<String> buildAccessors(int accessFlags) { TObjectIntMap<String> map = new TObjectIntHashMap<>(); map.put("public", Modifier.isPublic(accessFlags) ? 1 : 0); map.put("protected", Modifier.isProtected(accessFlags) ? 1 : 0); map.put("private", Modifier.isPrivate(accessFlags) ? 1 : 0); map.put("final", Modifier.isFinal(accessFlags) ? 1 : 0); map.put("interface", Modifier.isInterface(accessFlags) ? 1 : 0); map.put("native", Modifier.isNative(accessFlags) ? 1 : 0); map.put("static", Modifier.isStatic(accessFlags) ? 1 : 0); map.put("strict", Modifier.isStrict(accessFlags) ? 1 : 0); map.put("synchronized", Modifier.isSynchronized(accessFlags) ? 1 : 0); map.put("transient", Modifier.isTransient(accessFlags) ? 1 : 0); map.put("volatile", Modifier.isVolatile(accessFlags) ? 1 : 0); map.put("abstract", Modifier.isAbstract(accessFlags) ? 1 : 0); return map; }
/** * Gathers and enumerates all the itinerary classes. * Returns itinerary class count. * @param os * @param itinClassesMap * @return */ private int collectAllItinClasses(PrintStream os, TObjectIntHashMap<String> itinClassesMap) throws Exception { ArrayList<Record> itinClassList = records.getAllDerivedDefinition("InstrItinClass"); itinClassList.sort(LessRecord); // for each itinenary class. int n = itinClassList.size(); for (int i = 0; i < n; i++) { Record itinClass = itinClassList.get(i); itinClassesMap.put(itinClass.getName(), i); } return n; }
/** * Emits all stages and itineries, folding common patterns. * @param os */ private void emitData(PrintStream os) throws Exception { TObjectIntHashMap<String> itinClassesMap = new TObjectIntHashMap<>(); ArrayList<ArrayList<InstrItinerary>> procList = new ArrayList<>(); // Enumerate all the itinerary classes int nitinCLasses = collectAllItinClasses(os, itinClassesMap); // Make sure the rest is worth the effort hasItrineraries = nitinCLasses != 1; if (hasItrineraries) { // Emit the stage data emitStageAndOperandCycleData(os, nitinCLasses, itinClassesMap, procList); // Emit the processor itinerary data emitProcessorData(os, procList); // Emit the processor lookup data emitProcessorLookup(os); } }
private void findDepVarsOf(TreePatternNode node, TObjectIntHashMap<String> depMap) { if (node.isLeaf()) { if (node.getLeafValue() instanceof DefInit) { if (depMap.containsKey(node.getName())) depMap.put(node.getName(), depMap.get(node.getName()) + 1); else depMap.put(node.getName(), 1); } } else { for (int i = 0, e = node.getNumChildren(); i != e; i++) findDepVarsOf(node.getChild(i), depMap); } }
public PromoteMemToReg(ArrayList<AllocaInst> allocas, DomTree dt, DominanceFrontier df, AliasSetTracker ast) { this.allocas = allocas; this.dt = dt; this.df = df; this.ast = ast; allocaLookup = new TObjectIntHashMap<>(); newPhiNodes = new HashMap<>(); visitedBlocks = new HashSet<>(); bbNumbers = new TObjectIntHashMap<>(); phiToAllocaMap = new TObjectIntHashMap<>(); pointerAllocaValues = new ArrayList<>(); }
/** * This method must be overridded by concrete subclass for performing * desired machine code transformation or analysis. * * @param mf * @return */ @Override public boolean runOnMachineFunction(MachineFunction mf) { this.mf = mf; tm = mf.getTarget(); regInfo = tm.getRegisterInfo(); instrInfo = tm.getInstrInfo(); stackSlotForVirReg = new TIntIntHashMap(); regUsed = new BitMap(); regClassIdx = new TObjectIntHashMap<>(); for (MachineBasicBlock mbb : mf.getBasicBlocks()) allocateBasicBlock(mbb); stackSlotForVirReg.clear(); return true; }
public void testGetMap() { int element_count = 20; String[] keys = new String[element_count]; int[] vals = new int[element_count]; TObjectIntMap<String> raw_map = new TObjectIntHashMap<String>(); for ( int i = 0; i < element_count; i++ ) { keys[i] = Integer.toString( i + 1 ); vals[i] = i + 1; raw_map.put( keys[i], vals[i] ); } //noinspection MismatchedQueryAndUpdateOfCollection TObjectIntMapDecorator<String> map = new TObjectIntMapDecorator<String>( raw_map ); assertEquals( raw_map, map.getMap() ); }
public void testContainsKey() { int element_count = 20; String[] keys = new String[element_count]; int[] vals = new int[element_count]; TObjectIntMap<String> map = new TObjectIntHashMap<String>(); for ( int i = 0; i < element_count; i++ ) { keys[i] = Integer.toString( i + 1 ); vals[i] = i + 1; map.put( keys[i], vals[i] ); } for ( int i = 0; i < element_count; i++ ) { assertTrue( "Key should be present: " + keys[i] + ", map: " + map, map.containsKey( keys[i] ) ); } String key = "1138"; assertFalse( "Key should not be present: " + key + ", map: " + map, map.containsKey( key ) ); assertFalse( "Random object should not be present in map: " + map, map.containsKey( new Object() ) ); }
public void testContainsValue() { int element_count = 20; String[] keys = new String[element_count]; int[] vals = new int[element_count]; TObjectIntMap<String> map = new TObjectIntHashMap<String>(); for ( int i = 0; i < element_count; i++ ) { keys[i] = Integer.toString( i + 1 ); vals[i] = i + 1; map.put( keys[i], vals[i] ); } for ( int i = 0; i < element_count; i++ ) { assertTrue( "Value should be present: " + vals[i] + ", map: " + map, map.containsValue( vals[i] ) ); } int val = 1138; assertFalse( "Key should not be present: " + val + ", map: " + map, map.containsValue( val ) ); }
public void testPutAllMap() { int element_count = 20; String[] keys = new String[element_count]; int[] vals = new int[element_count]; TObjectIntMap<String> control = new TObjectIntHashMap<String>(); for ( int i = 0; i < element_count; i++ ) { keys[i] = Integer.toString( i + 1 ); vals[i] = i + 1; control.put( keys[i], vals[i] ); } TObjectIntMap<String> raw_map = new TObjectIntHashMap<String>(); Map<String,Integer> map = TDecorators.wrap( raw_map ); Map<String, Integer> source = new HashMap<String, Integer>(); for ( int i = 0; i < element_count; i++ ) { source.put( keys[i], vals[i] ); } map.putAll( source ); assertEquals( source, map ); assertEquals( control, raw_map ); }
public void testClear() { int element_count = 20; String[] keys = new String[element_count]; int[] vals = new int[element_count]; TObjectIntMap<String> raw_map = new TObjectIntHashMap<String>(); Map<String,Integer> map = TDecorators.wrap( raw_map ); for ( int i = 0; i < element_count; i++ ) { keys[i] = Integer.toString( i + 1 ); vals[i] = i + 1; map.put( keys[i], vals[i] ); } assertEquals( element_count, raw_map.size() ); map.clear(); assertTrue( map.isEmpty() ); assertEquals( 0, map.size() ); assertNull( map.get( keys[5] ) ); }
public void testValues() { int element_count = 20; String[] keys = new String[element_count]; Integer[] vals = new Integer[element_count]; TObjectIntMap<String> raw_map = new TObjectIntHashMap<String>( element_count, 0.5f, Integer.MIN_VALUE ); Map<String,Integer> map = TDecorators.wrap( raw_map ); for ( int i = 0; i < element_count; i++ ) { keys[i] = Integer.toString( i + 1 ); vals[i] = Integer.valueOf( i + 1 ); map.put( keys[i], vals[i] ); } assertEquals( element_count, map.size() ); // No argument Collection<Integer> values_collection = map.values(); assertEquals( element_count, values_collection.size() ); List<Integer> values_list = new ArrayList<Integer>( values_collection ); for ( int i = 0; i < element_count; i++ ) { assertTrue( values_list.contains( vals[i] ) ); } }
@SuppressWarnings({"unchecked"}) public void testSerialize() throws Exception { Integer[] vals = {1138, 42, 86, 99, 101, 727, 117}; String[] keys = new String[vals.length]; TObjectIntMap<String> raw_map = new TObjectIntHashMap<String>(); Map<String,Integer> map = TDecorators.wrap( raw_map ); for ( int i = 0; i < keys.length; i++ ) { keys[i] = Integer.toString( vals[i] * 2 ); map.put( keys[i], vals[i] ); } ByteArrayOutputStream baos = new ByteArrayOutputStream(); ObjectOutputStream oos = new ObjectOutputStream( baos ); oos.writeObject( map ); ByteArrayInputStream bias = new ByteArrayInputStream( baos.toByteArray() ); ObjectInputStream ois = new ObjectInputStream( bias ); Map<String,Integer> deserialized = ( Map<String,Integer> ) ois.readObject(); assertEquals( map, deserialized ); }
@SubscribeEvent(priority = EventPriority.LOW) public void diagnostics(final DiagnosticEvent.Gather event) { final TObjectIntHashMap<String> counts = new TObjectIntHashMap<String>(); final Iterator<Entry<String, ISound>> iterator = this.playingSounds.entrySet().iterator(); while (iterator.hasNext()) { Entry<String, ISound> entry = iterator.next(); ISound isound = entry.getValue(); counts.adjustOrPutValue(isound.getSound().getSoundLocation().toString(), 1, 1); } final ArrayList<String> results = new ArrayList<String>(); final TObjectIntIterator<String> itr = counts.iterator(); while (itr.hasNext()) { itr.advance(); results.add(String.format(TextFormatting.GOLD + "%s: %d", itr.key(), itr.value())); } Collections.sort(results); event.output.addAll(results); }
/** * This method represents the last step that's executed when processing a query. A list of partial-results (DistanceElements) returned by * the lookup stage is processed based on some internal method and finally converted to a list of ScoreElements. The filtered list of * ScoreElements is returned by the feature module during retrieval. * * @param partialResults List of partial results returned by the lookup stage. * @param qc A ReadableQueryConfig object that contains query-related configuration parameters. * @return List of final results. Is supposed to be de-duplicated and the number of items should not exceed the number of items per module. */ @Override protected List<ScoreElement> postprocessQuery(List<SegmentDistanceElement> partialResults, ReadableQueryConfig qc) { /* Prepare helper data-structures. */ final List<ScoreElement> results = new ArrayList<>(); final TObjectIntHashMap<String> scoreMap = new TObjectIntHashMap<>(); /* Set QueryConfig and extract correspondence function. */ qc = this.setQueryConfig(qc); final CorrespondenceFunction correspondence = qc.getCorrespondenceFunction().orElse(this.linearCorrespondence); for (DistanceElement hit : partialResults) { if (hit.getDistance() < this.distanceThreshold) { scoreMap.adjustOrPutValue(hit.getId(), 1, scoreMap.get(hit.getId())/2); } } /* Prepare final result-set. */ scoreMap.forEachEntry((key, value) -> results.add(new SegmentScoreElement(key, 1.0 - 1.0/value))); ScoreElement.filterMaximumScores(results.stream()); return results; }
@Override public void addDocument(Document doc) throws IOException { // add the document lengths.get(document).add(doc.identifier, doc.terms.size()); // now deal with fields: TObjectIntHashMap<Bytes> currentFieldLengths = new TObjectIntHashMap<>(doc.tags.size()); for (Tag tag : doc.tags) { int len = tag.end - tag.begin; currentFieldLengths.adjustOrPutValue(new Bytes(ByteUtil.fromString(tag.name)), len, len); } for (Bytes field : currentFieldLengths.keySet()) { if (!lengths.containsKey(field)) { lengths.put(field, new FieldLengthList(field)); } lengths.get(field).add(doc.identifier, currentFieldLengths.get(field)); } }
@Override public TObjectIntHashMap<ItemVO<Integer, Integer>> extractData(ResultSet rs) { TObjectIntHashMap<ItemVO<Integer, Integer>> map = new TObjectIntHashMap<>(); int itemId, itemTypeId, tenantId, cnt = 0; try { while (rs.next()) { itemId = rs.getInt(BaseActionDAO.DEFAULT_ITEM_COLUMN_NAME); itemTypeId = rs.getInt(BaseActionDAO.DEFAULT_ITEM_TYPE_COLUMN_NAME); tenantId = rs.getInt(BaseActionDAO.DEFAULT_TENANT_COLUMN_NAME); cnt = rs.getInt("cnt"); map.put(new ItemVO<>(tenantId, itemId, itemTypeId), cnt); } // optimization: replaces former adjustSupport method minSupp = cnt; } catch (SQLException e) { logger.error("An error occured during ResultSet extraction", e); throw new RuntimeException(e); } return map; }
@Override public ArrayList<TupleVO> getTuples(final int support) throws Exception { final ArrayList<TupleVO> ret = new ArrayList<>(); // Set<ItemVO<Integer, Integer>> mainKeys = map.keySet(); Set<Entry<ItemVO<Integer, Integer>, TObjectIntHashMap<ItemVO<Integer, Integer>>>> entries = map.entrySet(); for (final Entry<ItemVO<Integer, Integer>, TObjectIntHashMap<ItemVO<Integer, Integer>>> entry : entries) { entry.getValue().forEachEntry(new TObjectIntProcedure<ItemVO<Integer, Integer>>() { public boolean execute(ItemVO<Integer, Integer> a, int b) { if (b >= support) { ret.add(new TupleVO(entry.getKey() , a, b)); } return true; } }); } return ret; }
public TObjectIntHashMap<ItemVO<Integer, Integer>> extractData(ResultSet rs) { TObjectIntHashMap<ItemVO<Integer, Integer>> map = new TObjectIntHashMap<ItemVO<Integer, Integer>>(); int itemId, itemTypeId, tenantId, cnt = 0; try { while (rs.next()) { itemId = rs.getInt(BaseActionDAO.DEFAULT_ITEM_COLUMN_NAME); itemTypeId = rs.getInt(BaseActionDAO.DEFAULT_ITEM_TYPE_COLUMN_NAME); tenantId = rs.getInt(BaseActionDAO.DEFAULT_TENANT_COLUMN_NAME); cnt = rs.getInt("cnt"); map.put(new ItemVO<Integer, Integer>(tenantId, itemId, itemTypeId), cnt); } // optimization: replaces former adjustSupport method minSupp = cnt; } catch (SQLException e) { logger.error("An error occured during ResultSet extraction", e); throw new RuntimeException(e); } return map; }
public void testTPrimitiveHashMap() { TObjectIntMap<String> map = new TObjectIntHashMap<String>(); // Add 5, remove the first four, repeat String[] to_remove = new String[ 4 ]; int batch_index = 0; for( String s : Constants.STRING_OBJECTS ) { if ( batch_index < 4 ) { to_remove[ batch_index ] = s; } map.put( s, s.length() ); batch_index++; if ( batch_index == 5 ) { for( String s_remove : to_remove ) { map.remove( s_remove ); } batch_index = 0; } } }
public void testTrove2PrimitiveHashMap() { gnu.trove.TObjectIntHashMap<String> map = new gnu.trove.TObjectIntHashMap<String>(); // Add 5, remove the first four, repeat String[] to_remove = new String[ 4 ]; int batch_index = 0; for( String s : Constants.STRING_OBJECTS ) { if ( batch_index < 4 ) { to_remove[ batch_index ] = s; } map.put( s, s.length() ); batch_index++; if ( batch_index == 5 ) { for( String s_remove : to_remove ) { map.remove( s_remove ); } batch_index = 0; } } }
private void readObject (ObjectInputStream in) throws IOException, ClassNotFoundException { lock = new ReentrantReadWriteLock(); lock.writeLock().lock(); try { int version = in.readInt(); int size = in.readInt(); entries = new ArrayList(size); map = new TObjectIntHashMap(size); for (int i = 0; i < size; i++) { Object o = in.readObject(); map.put(o, i); entries.add(o); } growthStopped = in.readBoolean(); entryClass = (Class) in.readObject(); if (version > 0) { // instanced id added in version 1S instanceId = (VMID) in.readObject(); } } finally { lock.writeLock().unlock(); } }
@Test public void testCollectPubDates() throws Exception { final List<ScoredDocument> fakeData = mkRankedList( SD("doc0", 1.0), SD("doc-missing", 0.5), SD("doc1", 0.3) ); Assert.assertEquals(3, fakeData.size()); TObjectIntHashMap<String> pubdates = new TObjectIntHashMap<String>(); pubdates.put("doc17", 1777); pubdates.put("doc0", 1888); pubdates.put("doc1", 1999); List<ScoredDate> dates = ExtractDates.toDates(ExtractDates.collectPubDates(fakeData, pubdates)); Assert.assertEquals(2, dates.size()); Assert.assertEquals(1.0, dates.get(0).score, 0.001); Assert.assertEquals(0.3, dates.get(1).score, 0.001); Assert.assertEquals(1888, dates.get(0).year); Assert.assertEquals(1999, dates.get(1).year); }
private void readObject (ObjectInputStream in) throws IOException, ClassNotFoundException { // in.defaultReadObject (); int version = in.readInt (); // version int numVariables = in.readInt (); var2idx = new TObjectIntHashMap (numVariables); for (int vi = 0; vi < numVariables; vi++) { Variable var = (Variable) in.readObject (); var2idx.put (var, vi); } int numRows = in.readInt (); values = new ArrayList (numRows); for (int ri = 0; ri < numRows; ri++) { Object[] row = (Object[]) in.readObject (); values.add (row); } scale = (version >= 2) ? in.readDouble () : 1.0; }
@Test public void testAddRemoveSearchMultiResultMap3() { DigramStringSearchHistogram digramHistogram = new DigramStringSearchHistogram(); String desiredResultId = "result1"; String desiredResultId2 = "result2"; digramHistogram.add("word1", "word2", desiredResultId); digramHistogram.add("word1", "word2", desiredResultId); digramHistogram.add("word1", "word3", desiredResultId2); digramHistogram.remove("word1", "word2", desiredResultId); TObjectIntHashMap<String> results = digramHistogram.getSearchResults(toSet("word1 word2"), 1); assertEquals(0, results.size()); results = digramHistogram.getSearchResults(toSet("word1 word3"), 1); assertEquals(1, results.size()); // only 1 result returned assertTrue(results.contains(desiredResultId2)); // desired result key contained assertEquals(1, results.get(desiredResultId2)); // desired result has correct weight }
public static void writeData(String file, TObjectIntHashMap<String> data){ BufferedWriter writer; try { writer = new BufferedWriter(new FileWriter(file)); for (String s : data.keySet()) { writer.append(data.get(s) + "\t" + s); writer.newLine(); } writer.flush(); writer.close(); } catch (Exception e){ e.printStackTrace(); } }
public static void write(TObjectIntHashMap<String> uri_id, boolean append, String outFile){ BufferedWriter writer; try { writer = new BufferedWriter(new FileWriter(outFile, append)); for (String s : uri_id.keySet()) { writer.append(uri_id.get(s) + "\t" + s); writer.newLine(); } writer.flush(); writer.close(); } catch (Exception e){ e.printStackTrace(); } }
public static void computeIndex(String file, TObjectIntHashMap<String> value_id, HashSet<String> labels){ if(new File(file).exists()){ try{ BufferedReader br = new BufferedReader(new FileReader(file)); String line = null; int index = 1; while((line=br.readLine()) != null){ String[] vals = line.split("\t"); for(String s : labels){ value_id.put(s + "-" + vals[0], index++); value_id.put(s + "-inv_" + vals[0], index++); } } br.close(); } catch(Exception e){ e.printStackTrace(); } } }
public static void loadInputMetadataID(String metadata_file_index, String input_uri, TIntIntHashMap input_metadata_id){ TObjectIntHashMap<String> metadata_index = new TObjectIntHashMap<String>(); loadIndex(metadata_file_index, metadata_index); try{ BufferedReader br = new BufferedReader(new FileReader(input_uri)); String line = null; while((line=br.readLine()) != null){ String[] vals = line.split("\t"); if(metadata_index.containsKey(vals[1])); input_metadata_id.put(Integer.parseInt(vals[0]), metadata_index.get(vals[1])); } br.close(); } catch(Exception e){ e.printStackTrace(); } }
/** * Load properties from XML file */ private void loadProps(){ props = new NTree(); props_index = new TObjectIntHashMap<String>(); try { // load properties map from XML file XMLUtils.parseXMLFile(propsFile, props_index, props, inverseProps); logger.debug("Properties tree loading."); // write properties index file TextFileUtils.writeData(propsIndexFile, props_index); } catch (Exception e1) { // TODO Auto-generated catch block e1.printStackTrace(); } }
/** * Constuctor */ public MultiPropQueryExecutor(String uri, int uri_id, NTree props, TObjectIntHashMap<String> props_index, String graphURI, String endpoint, SynchronizedCounter counter, TObjectIntHashMap<String> metadata_index, TextFileManager textWriter, ItemFileManager fileManager, boolean inverseProps, boolean caching){ this.uri = uri; this.props = props; this.props_index = props_index; this.graphURI = graphURI; this.endpoint = endpoint; this.counter = counter; this.textWriter = textWriter; this.metadata_index = metadata_index; this.model = null; this.fileManager = fileManager; this.inverseProps = inverseProps; this.itemTree = new PropertyIndexedItemTree(uri_id); this.caching = caching; }