private static void loadSparseDoublePartition(SparseDoubleModel model, FSDataInputStream input, ModelPartitionMeta partMeta) throws IOException { int rowNum = input.readInt(); int rowId = 0; int nnz = 0; int totalNNZ = 0; Int2DoubleOpenHashMap row = null; for (int i = 0; i < rowNum; i++) { rowId = input.readInt(); nnz = input.readInt(); totalNNZ = (int) (nnz * (model.col) / (partMeta.getEndCol() - partMeta.getStartCol())); row = model.getRow(rowId, partMeta.getPartId(), totalNNZ); for (int j = 0; j < nnz; j++) { row.put(input.readInt(), input.readDouble()); } } }
/** * Load dense double model to int->double maps * * @param modelDir model save directory path * @return model data */ public static Int2DoubleOpenHashMap[] loadToDoubleMaps(String modelDir, Configuration conf) throws IOException { // Load model meta ModelFilesMeta meta = getMeta(modelDir, conf); // Check row type if (meta.getRowType() != SPARSE_DOUBLE) { throw new IOException("model row type is not sparse double, you should check it"); } // Load model SparseDoubleModel model = new SparseDoubleModel(meta.getRow(), meta.getCol()); loadModel(modelDir, model, meta, conf); return model.getModel(); }
private static void convertSparseDoubleModel(Configuration conf, FSDataOutputStream output, String modelInputDir, ModelLineConvert lineConvert) throws IOException { Int2DoubleOpenHashMap [] data = ModelLoader.loadToDoubleMaps(modelInputDir, conf); for(int i = 0; i < data.length; i++) { Int2DoubleOpenHashMap row = data[i]; data[i] = null; if(row == null) { continue; } lineConvert.convertRowIndex(output, i); int [] indexes = row.keySet().toIntArray(); double [] values = row.values().toDoubleArray(); row = null; Sort.quickSort(indexes, values, 0, indexes.length - 1); for(int j = 0; j < indexes.length; j++) { lineConvert.convertDouble(output, indexes[j], values[j]); } } }
private double dot(SparseDoubleVector other) { double ret = 0.0; Int2DoubleOpenHashMap smallMap = this.hashMap; Int2DoubleOpenHashMap largeMap = other.hashMap; if (smallMap.size() > largeMap.size()) { smallMap = other.hashMap; largeMap = this.hashMap; } ObjectIterator<Int2DoubleMap.Entry> iter = smallMap.int2DoubleEntrySet().fastIterator(); Int2DoubleMap.Entry entry = null; while (iter.hasNext()) { entry = iter.next(); if (largeMap.containsKey(entry.getIntKey())) { ret += entry.getDoubleValue() * largeMap.get(entry.getIntKey()); } } return ret; }
@Override public TIntDoubleVector filter(double x) { Int2DoubleOpenHashMap newMap = new Int2DoubleOpenHashMap(); ObjectIterator<Int2DoubleMap.Entry> iter = hashMap.int2DoubleEntrySet().fastIterator(); Int2DoubleMap.Entry entry = null; while (iter.hasNext()) { entry = iter.next(); double value = entry.getDoubleValue(); if (Math.abs(value) > x) { newMap.put(entry.getIntKey(), value); } } SparseDoubleVector vector = new SparseDoubleVector(dim, newMap); vector.setRowId(rowId).setMatrixId(matrixId).setClock(clock); return vector; }
@Test public void testWriteTo() throws Exception { ByteBuf buf = Unpooled.buffer(16); buf.writeDouble(0.00); buf.writeDouble(1.00); buf.writeDouble(2.00); serverSparseDoubleRow.update(RowType.T_DOUBLE_DENSE, buf, 3); DataOutputStream out = new DataOutputStream(new FileOutputStream("data")); serverSparseDoubleRow.writeTo(out); out.close(); DataInputStream in = new DataInputStream(new FileInputStream("data")); assertEquals(in.readInt(), 3); Int2DoubleOpenHashMap hashMap = new Int2DoubleOpenHashMap(); hashMap.addTo(0, 0.00); hashMap.addTo(1, 1.00); hashMap.addTo(2, 2.00); assertEquals(serverSparseDoubleRow.getData(), hashMap); }
@Test public void testUpdateDoubleSparseToDoubleSparse() throws Exception { ServerSparseDoubleRow serverSparseDoubleRow = new ServerSparseDoubleRow(rowId, startCol, endCol); ByteBuf buf = Unpooled.buffer(16); buf.writeInt(0); buf.writeDouble(0.00); buf.writeInt(1); buf.writeDouble(1.00); buf.writeInt(2); buf.writeDouble(2.00); rowUpdater.updateDoubleSparseToDoubleSparse(3, buf, serverSparseDoubleRow); Int2DoubleOpenHashMap hashMap = new Int2DoubleOpenHashMap(); hashMap.addTo(0, 0.00); hashMap.addTo(1, 1.00); hashMap.addTo(2, 2.00); assertEquals(serverSparseDoubleRow.getData(), hashMap); }
/** * Distributed matrix set. * * @param a Row or column index. * @param b Row or column index. * @param v New value to set. */ private void matrixSet(int a, int b, double v) { // Remote set on the primary node (where given row or column is stored locally). ignite().compute(getClusterGroupForGivenKey(CACHE_NAME, a)).run(() -> { IgniteCache<RowColMatrixKey, Map<Integer, Double>> cache = Ignition.localIgnite().getOrCreateCache(CACHE_NAME); // Local get. Map<Integer, Double> map = cache.localPeek(getCacheKey(a), CachePeekMode.PRIMARY); if (map == null) { map = cache.get(getCacheKey(a)); //Remote entry get. if (map == null) map = acsMode == SEQUENTIAL_ACCESS_MODE ? new Int2DoubleRBTreeMap() : new Int2DoubleOpenHashMap(); } if (v != 0.0) map.put(b, v); else if (map.containsKey(b)) map.remove(b); // Local put. cache.put(getCacheKey(a), map); }); }
private Int2DoubleMap getProductMap(int uidx) { Int2DoubleOpenHashMap productMap = new Int2DoubleOpenHashMap(); productMap.defaultReturnValue(0.0); if (data.useIteratorsPreferentially()) { IntIterator iidxs = data.getUidxIidxs(uidx); DoubleIterator ivs = data.getUidxVs(uidx); while (iidxs.hasNext()) { int iidx = iidxs.nextInt(); double iv = ivs.nextDouble(); IntIterator vidxs = data.getIidxUidxs(iidx); DoubleIterator vvs = data.getIidxVs(iidx); while (vidxs.hasNext()) { productMap.addTo(vidxs.nextInt(), iv * vvs.nextDouble()); } } } else { data.getUidxPreferences(uidx) .forEach(ip -> data.getIidxPreferences(ip.v1) .forEach(up -> productMap.addTo(up.v1, ip.v2 * up.v2))); } productMap.remove(uidx); return productMap; }
@Override public SparseFloatVector newNumberVector(Int2DoubleOpenHashMap dvalues, int maxdim) { int[] indexes = new int[dvalues.size()]; float[] values = new float[dvalues.size()]; // Import and sort the indexes ObjectIterator<Int2DoubleMap.Entry> iter = dvalues.int2DoubleEntrySet().fastIterator(); for(int i = 0; iter.hasNext(); i++) { indexes[i] = iter.next().getIntKey(); } Arrays.sort(indexes); // Import the values accordingly for(int i = 0; i < dvalues.size(); i++) { values[i] = (float) dvalues.get(indexes[i]); } return new SparseFloatVector(indexes, values, maxdim); }
@Test public void sparseAngleDegenerate() { NumberVector o1 = new SparseDoubleVector(new double[] {}); Int2DoubleOpenHashMap v2 = new Int2DoubleOpenHashMap(); v2.put(3, 0.); v2.put(4, 0.); v2.put(42, 0.); NumberVector o2 = new SparseDoubleVector(v2, 100); Int2DoubleOpenHashMap v3 = new Int2DoubleOpenHashMap(); v3.put(15, 0.); v3.put(5, 1.); NumberVector v1 = new SparseDoubleVector(v3, 100); assertEquals("Angle not exact.", 0., VectorUtil.cosAngle(o1, o1), 0.); assertEquals("Angle not exact.", 0., VectorUtil.cosAngle(o1, o2), 0.); assertEquals("Angle not exact.", 0., VectorUtil.cosAngle(o2, o2), 0.); assertEquals("Angle not exact.", 0., VectorUtil.cosAngle(o1, v1), 0.); assertEquals("Angle not exact.", 0., VectorUtil.cosAngle(o2, v1), 0.); assertEquals("Angle not exact.", 1., VectorUtil.cosAngle(v1, v1), 0.); }
/** * Get a model row use row index * * @param rowId row index * @param partId partition index * @return a model row */ public Int2DoubleOpenHashMap getRow(int rowId, int partId) { synchronized (this) { if (tempModel.get(rowId) == null) { tempModel.put(rowId, new HashMap<>()); tempModel.get(rowId).put(partId, new Int2DoubleOpenHashMap()); } else { if (tempModel.get(rowId).get(partId) == null) { tempModel.get(rowId).put(partId, new Int2DoubleOpenHashMap()); } } return tempModel.get(rowId).get(partId); } }
/** * Get a model row * * @param rowId row index * @param partId partition index * @param nnz estimated non-zero elements number * @return a model row */ public Int2DoubleOpenHashMap getRow(int rowId, int partId, int nnz) { synchronized (this) { if (tempModel.get(rowId) == null) { tempModel.put(rowId, new HashMap<>()); tempModel.get(rowId).put(partId, new Int2DoubleOpenHashMap(nnz)); } else { if (tempModel.get(rowId).get(partId) == null) { tempModel.get(rowId).put(partId, new Int2DoubleOpenHashMap(nnz)); } } return tempModel.get(rowId).get(partId); } }
public static Int2DoubleOpenHashMap loadSparseDoubleRowFromPartition(FSDataInputStream input, ModelPartitionMeta partMeta, int rowId) throws IOException { RowOffset rowOffset = partMeta.getRowMetas().get(rowId); input.seek(rowOffset.getOffset()); Preconditions.checkState (input.readInt() == rowId); int num = input.readInt(); Int2DoubleOpenHashMap row = new Int2DoubleOpenHashMap(); for (int i = 0; i < num; i++) { row.put(input.readInt(), input.readDouble()); } return row; }
/** * init the dim and capacity for vector * * @param dim * @param capacity */ public SparseDoubleVector(int dim, int capacity) { super(); if(capacity > 0) { this.hashMap = new Int2DoubleOpenHashMap(capacity); } else { this.hashMap = new Int2DoubleOpenHashMap(INIT_SIZE); } this.dim = dim; }
@Override public void deserialize(ByteBuf buf) { int dim = buf.readInt(); int length = buf.readInt(); Int2DoubleOpenHashMap data = new Int2DoubleOpenHashMap(length); IntStream.range(0,length).forEach(i-> data.put(buf.readInt(), buf.readDouble())); this.dim = dim; this.hashMap = data; }
private double[] getVluesofServerSparseDoubleRow(ServerRow row, int[] index) { Int2DoubleOpenHashMap data = ((ServerSparseDoubleRow) row).getData(); double[] values = new double[index.length]; for (int i = 0; i < index.length; i++) { values[i] = data.get(index[i]); } return values; }
/** * Merge this sparse double vector split to a map * @param indexToValueMap a index->value map */ public void mergeTo(Int2DoubleOpenHashMap indexToValueMap) { try { lock.readLock().lock(); indexToValueMap.putAll(hashMap); } finally { lock.readLock().unlock(); } }
@Test public void testUpdateDoubleDenseToDoubleSparse() throws Exception { ServerSparseDoubleRow serverSparseDoubleRow = new ServerSparseDoubleRow(rowId, startCol, endCol); ByteBuf buf = Unpooled.buffer(16); buf.writeDouble(0.00); buf.writeDouble(1.00); buf.writeDouble(2.00); rowUpdater.updateDoubleDenseToDoubleSparse(3, buf, serverSparseDoubleRow); Int2DoubleOpenHashMap hashMap = new Int2DoubleOpenHashMap(); hashMap.addTo(0, 0.00); hashMap.addTo(1, 1.00); hashMap.addTo(2, 2.00); assertEquals(serverSparseDoubleRow.getData(), hashMap); }
public LearningInstance featurize(JsonNode entity, boolean update) { Map<String, List<Feature>> feaMap = new HashMap<>(); for (FeatureExtractor extractor : featureExtractors) { feaMap.putAll(extractor.extract(entity, update, indexSpace)); } Int2DoubleMap feas = new Int2DoubleOpenHashMap(); for (String fea : features) { if (feaMap.containsKey(fea)) { for (Feature feature : feaMap.get(fea)) { feas.put(feature.getIndex(), feature.getValue()); } } } double label = StandardLearningInstance.defaultLabel; double weight = StandardLearningInstance.defaultWeight; if (entity.has(labelName)) { label = entity.get(labelName).asDouble(); } else if (feaMap.containsKey(labelName)) { label = feaMap.get(labelName).get(0).getValue(); } if (entity.has(weightName)) { weight = entity.get(weightName).asDouble(); } else if (feaMap.containsKey(weightName)) { weight = feaMap.get(weightName).get(0).getValue(); } String group = null; if (groupKeys != null && groupKeys.size() > 0) { group = FeatureExtractorUtilities.composeConcatenatedKey(entity, groupKeys); } return new StandardLearningInstance(feas, label, weight, group); }
@Override public Int2DoubleMap scores(int node) { int[] succs = super.successors(node).toIntArray(); return new Int2DoubleOpenHashMap( succs, rnd.doubles(succs.length).toArray() ); }
public Int2DoubleMap getRow(int i) { int size = computeSize(); Int2DoubleOpenHashMap row = new Int2DoubleOpenHashMap(size); for (int j = 0; j < size; j++) row.put(j, get(i, j)); return row; }
@Override public Int2DoubleMap scores(int docI) { IntSet successors = successors(docI); Int2DoubleMap results = new Int2DoubleOpenHashMap(successors.size()); IntSet catI = page2cat.get(docI); for (int docJ : successors) { IntSet catJ = page2cat.get(docJ); results.put(docJ, -expectedness(catI, catJ)); } return results; }
@Override public Int2DoubleMap scores(int docI) { Int2DoubleMap originalScores = scorer.scores(docI); ObjectIterator<Entry> iterator = originalScores.int2DoubleEntrySet().iterator(); Int2DoubleMap newScores = new Int2DoubleOpenHashMap(originalScores.size()); for (Entry entry = iterator.next(); iterator.hasNext(); entry = iterator.next()) { newScores.put(entry.getIntKey(), (entry.getDoubleValue() - mean) / stddev ); } return newScores; }
/** * Tests the global probability method (BinomialModel::p). */ @Test public void testGlobalModel() { Int2DoubleMap expectedGlobalP = new Int2DoubleOpenHashMap(); expectedGlobalP.put(1, 0.8); expectedGlobalP.put(2, 0.2); BinomialModel<Integer, Integer, Integer> bm = new BinomialModel<>(false, Stream.empty(), preferences, featureData, 0.0); assertEquals(expectedGlobalP.keySet(), bm.getFeatures()); expectedGlobalP.forEach((f, p) -> assertEquals(p, bm.p(f), 0.0001)); }
/** * Tests the local (user) probability method with alpha = 0.0. */ @Test public void testLocalModelAlpha00() { Int2DoubleMap expectedLocalP = new Int2DoubleOpenHashMap(); expectedLocalP.put(1, 0.8); expectedLocalP.put(2, 0.2); checkLocalModel(0.0, expectedLocalP); }
/** * Tests the local (user) probability method with alpha = 0.5. */ @Test public void testLocalModelAlpha05() { Int2DoubleMap expectedLocalP = new Int2DoubleOpenHashMap(); expectedLocalP.put(1, 0.9); expectedLocalP.put(2, 0.1); checkLocalModel(0.5, expectedLocalP); }
/** * Tests the local (user) probability method with alpha = 1.0. */ @Test public void testLocalModelAlpha10() { Int2DoubleMap expectedLocalP = new Int2DoubleOpenHashMap(); expectedLocalP.put(1, 1.0); checkLocalModel(1.0, expectedLocalP); }
/** * Constructor. Uses maps for internal calculation. * * @param data preference data * @param dense true for array-based calculations, false to map-based */ public VectorSimilarity(FastPreferenceData<?, ?> data, boolean dense) { this.data = data; this.dense = dense; if (dense) { this.norm2Map = null; this.norm2Array = new double[data.numUsers()]; data.getUidxWithPreferences().forEach(idx -> norm2Array[idx] = getNorm2(idx)); } else { this.norm2Map = new Int2DoubleOpenHashMap(); this.norm2Array = null; norm2Map.defaultReturnValue(0.0); data.getUidxWithPreferences().forEach(idx -> norm2Map.put(idx, getNorm2(idx))); } }
@Override public IntToDoubleFunction similarity(int idx1) { Int2DoubleOpenHashMap map = new Int2DoubleOpenHashMap(); data.getUidxPreferences(idx1).forEach(iv -> map.put(iv.v1, iv.v2)); double norm2A = norm2Map.get(idx1); return idx2 -> { double product = data.getUidxPreferences(idx2) .mapToDouble(iv -> iv.v2 * map.get(iv.v1)) .sum(); return sim(product, norm2A, norm2Map.get(idx2)); }; }
/** * Returns a map of item-score pairs. * * @param uidx index of the user whose scores are predicted * @return a map of item-score pairs */ @Override public Int2DoubleMap getScoresMap(int uidx) { Int2DoubleOpenHashMap scoresMap = new Int2DoubleOpenHashMap(); scoresMap.defaultReturnValue(0.0); data.getUidxPreferences(uidx) .forEach(jp -> neighborhood.getNeighbors(jp.v1) .forEach(is -> { double w = pow(is.v2, q); scoresMap.addTo(is.v1, w * jp.v2); })); return scoresMap; }
/** * Returns a map of item-score pairs. * * @param uidx index of the user whose scores are predicted * @return a map of item-score pairs */ @Override public Int2DoubleMap getScoresMap(int uidx) { Int2DoubleOpenHashMap scoresMap = new Int2DoubleOpenHashMap(); scoresMap.defaultReturnValue(0.0); neighborhood.getNeighbors(uidx).forEach(vs -> { double w = pow(vs.v2, q); data.getUidxPreferences(vs.v1).forEach(iv -> { double p = w * iv.v2; scoresMap.addTo(iv.v1, p); }); }); return scoresMap; }
/** * Returns a map of item-score pairs. * * @param uidx index of the user whose scores are predicted * @return a map of item-score pairs */ @Override public Int2DoubleMap getScoresMap(int uidx) { Int2DoubleOpenHashMap scoresMap = new Int2DoubleOpenHashMap(); for (Entry<FastRankingRecommender<U, I>, Double> rw : recommenders) { double w = rw.getValue(); rw.getKey().getScoresMap(uidx).int2DoubleEntrySet() .forEach(e -> scoresMap.addTo(e.getIntKey(), w * e.getDoubleValue())); } return scoresMap; }
/** * Create a SparseByteVector consisting of double values according to the * specified mapping of indices and values. * * @param values the values to be set as values of the real vector * @param dimensionality the dimensionality of this feature vector * @throws IllegalArgumentException if the given dimensionality is too small * to cover the given values (i.e., the maximum index of any value not * zero is bigger than the given dimensionality) */ public SparseByteVector(Int2DoubleOpenHashMap values, int dimensionality) throws IllegalArgumentException { if(values.size() > dimensionality) { throw new IllegalArgumentException("values.size() > dimensionality!"); } this.indexes = new int[values.size()]; this.values = new byte[values.size()]; // Import and sort the indexes { ObjectIterator<Int2DoubleMap.Entry> iter = values.int2DoubleEntrySet().fastIterator(); for(int i = 0; iter.hasNext(); i++) { this.indexes[i] = iter.next().getIntKey(); } Arrays.sort(this.indexes); } // Import the values accordingly { for(int i = 0; i < values.size(); i++) { this.values[i] = (byte) values.get(this.indexes[i]); } } this.dimensionality = dimensionality; final int maxdim = getMaxDim(); if(maxdim > dimensionality) { throw new IllegalArgumentException("Given dimensionality " + dimensionality + " is too small w.r.t. the given values (occurring maximum: " + maxdim + ")."); } }
/** * Create a SparseShortVector consisting of double values according to the * specified mapping of indices and values. * * @param values the values to be set as values of the real vector * @param dimensionality the dimensionality of this feature vector * @throws IllegalArgumentException if the given dimensionality is too small * to cover the given values (i.e., the maximum index of any value not * zero is bigger than the given dimensionality) */ public SparseShortVector(Int2DoubleOpenHashMap values, int dimensionality) throws IllegalArgumentException { if(values.size() > dimensionality) { throw new IllegalArgumentException("values.size() > dimensionality!"); } this.indexes = new int[values.size()]; this.values = new short[values.size()]; // Import and sort the indexes { ObjectIterator<Int2DoubleMap.Entry> iter = values.int2DoubleEntrySet().fastIterator(); for(int i = 0; iter.hasNext(); i++) { this.indexes[i] = iter.next().getIntKey(); } Arrays.sort(this.indexes); } // Import the values accordingly { for(int i = 0; i < values.size(); i++) { this.values[i] = (short) values.get(this.indexes[i]); } } this.dimensionality = dimensionality; final int maxdim = getMaxDim(); if(maxdim > dimensionality) { throw new IllegalArgumentException("Given dimensionality " + dimensionality + " is too small w.r.t. the given values (occurring maximum: " + maxdim + ")."); } }
@Override public BitVector newNumberVector(Int2DoubleOpenHashMap values, int maxdim) { long[] bits = BitsUtil.zero(maxdim); // Import and sort the indexes for(ObjectIterator<Int2DoubleMap.Entry> iter = values.int2DoubleEntrySet().iterator(); iter.hasNext();) { Int2DoubleMap.Entry entry = iter.next(); if(entry.getDoubleValue() != 0.) { BitsUtil.setI(bits, entry.getIntKey()); } } return new BitVector(bits, maxdim); }
/** * Create a SparseIntegerVector consisting of double values according to the * specified mapping of indices and values. * * @param values the values to be set as values of the real vector * @param dimensionality the dimensionality of this feature vector * @throws IllegalArgumentException if the given dimensionality is too small * to cover the given values (i.e., the maximum index of any value not * zero is bigger than the given dimensionality) */ public SparseIntegerVector(Int2DoubleOpenHashMap values, int dimensionality) throws IllegalArgumentException { if(values.size() > dimensionality) { throw new IllegalArgumentException("values.size() > dimensionality!"); } this.indexes = new int[values.size()]; this.values = new int[values.size()]; // Import and sort the indexes { ObjectIterator<Int2DoubleMap.Entry> iter = values.int2DoubleEntrySet().fastIterator(); for(int i = 0; iter.hasNext(); i++) { this.indexes[i] = iter.next().getIntKey(); } Arrays.sort(this.indexes); } // Import the values accordingly { for(int i = 0; i < values.size(); i++) { this.values[i] = (int) values.get(this.indexes[i]); } } this.dimensionality = dimensionality; final int maxdim = getMaxDim(); if(maxdim > dimensionality) { throw new IllegalArgumentException("Given dimensionality " + dimensionality + " is too small w.r.t. the given values (occurring maximum: " + maxdim + ")."); } }
/** * Create a SparseDoubleVector consisting of double values according to the * specified mapping of indices and values. * * @param values the values to be set as values of the real vector * @param dimensionality the dimensionality of this feature vector * @throws IllegalArgumentException if the given dimensionality is too small * to cover the given values (i.e., the maximum index of any value not * zero is bigger than the given dimensionality) */ public SparseDoubleVector(Int2DoubleOpenHashMap values, int dimensionality) throws IllegalArgumentException { if(values.size() > dimensionality) { throw new IllegalArgumentException("values.size() > dimensionality!"); } this.indexes = new int[values.size()]; this.values = new double[values.size()]; // Import and sort the indexes { ObjectIterator<Int2DoubleMap.Entry> iter = values.int2DoubleEntrySet().fastIterator(); for(int i = 0; iter.hasNext(); i++) { this.indexes[i] = iter.next().getIntKey(); } Arrays.sort(this.indexes); } // Import the values accordingly { for(int i = 0; i < values.size(); i++) { this.values[i] = values.get(this.indexes[i]); } } this.dimensionality = dimensionality; final int maxdim = getMaxDim(); if(maxdim > dimensionality) { throw new IllegalArgumentException("Given dimensionality " + dimensionality + " is too small w.r.t. the given values (occurring maximum: " + maxdim + ")."); } }
/** * Project a number vector to the specified attributes. * * @param v a NumberVector to project * @param selectedAttributes the attributes selected for projection * @param factory Vector factory * @param <V> Vector type * @return a new NumberVector as a projection on the specified attributes */ public static <V extends NumberVector> V project(V v, long[] selectedAttributes, NumberVector.Factory<V> factory) { int card = BitsUtil.cardinality(selectedAttributes); if(factory instanceof SparseNumberVector.Factory) { final SparseNumberVector.Factory<?> sfactory = (SparseNumberVector.Factory<?>) factory; Int2DoubleOpenHashMap values = new Int2DoubleOpenHashMap(card, .8f); for(int d = BitsUtil.nextSetBit(selectedAttributes, 0); d >= 0; d = BitsUtil.nextSetBit(selectedAttributes, d + 1)) { if(v.doubleValue(d) != 0.0) { values.put(d, v.doubleValue(d)); } } // We can't avoid this cast, because Java doesn't know that V is a // SparseNumberVector: @SuppressWarnings("unchecked") V projectedVector = (V) sfactory.newNumberVector(values, card); return projectedVector; } else { double[] newAttributes = new double[card]; int i = 0; for(int d = BitsUtil.nextSetBit(selectedAttributes, 0); d >= 0; d = BitsUtil.nextSetBit(selectedAttributes, d + 1)) { newAttributes[i] = v.doubleValue(d); i++; } return factory.newNumberVector(newAttributes); } }