Python numpy 模块,nans() 实例源码

我们从Python开源项目中,提取了以下3个代码示例,用于说明如何使用numpy.nans()

项目:orange3-recommendation    作者:biolab    | 项目源码 | 文件源码
def _compute_extra_terms(Y, W, items_u, trustees_u):
    # Implicit information
    norm_Iu = math.sqrt(len(items_u))

    # TODO: Clean this. Hint: np.nans
    y_term = 0
    if norm_Iu > 0:
        y_sum = np.sum(Y[items_u, :], axis=0)
        y_term = y_sum / norm_Iu

    # Trust information
    w_term = 0
    norm_Tu = math.sqrt(len(trustees_u))
    if norm_Tu > 0:
        w_sum = np.sum(W[trustees_u, :], axis=0)
        w_term = w_sum / norm_Tu

    return y_term, w_term, norm_Iu, norm_Tu
项目:cmapPy    作者:cmap    | 项目源码 | 文件源码
def write_metadata(hdf5_out, dim, metadata_df, convert_back_to_neg_666):
    """
    Writes either column or row metadata to proper node of gctx out (hdf5) file.

    Input:
        - hdf5_out (h5py): open hdf5 file to write to
        - dim (str; must be "row" or "col"): dimension of metadata to write to 
        - metadata_df (pandas DataFrame): metadata DataFrame to write to file 
        - convert_back_to_neg_666 (bool): Whether to convert numpy.nans back to "-666",
                as per CMap metadata null convention 
    """
    if dim == "col":
        hdf5_out.create_group(col_meta_group_node)
        metadata_node_name = col_meta_group_node
    elif dim == "row":
        hdf5_out.create_group(row_meta_group_node)
        metadata_node_name = row_meta_group_node
    else:
        logger.error("'dim' argument must be either 'row' or 'col'!")

    # write id field to expected node
    hdf5_out.create_dataset(metadata_node_name + "/id", data=[str(x) for x in metadata_df.index])

    metadata_fields = list(metadata_df.columns.copy())

    # if specified, convert numpy.nans in metadata back to -666
    if convert_back_to_neg_666:
        for c in metadata_fields:
            metadata_df[[c]] = metadata_df[[c]].replace([numpy.nan], ["-666"])

    # write metadata columns to their own arrays
    for field in [entry for entry in metadata_fields if entry != "ind"]:
        hdf5_out.create_dataset(metadata_node_name + "/" + field,
                                data=numpy.array(list(metadata_df.loc[:, field])))
项目:Theano-Deep-learning    作者:GeekLiB    | 项目源码 | 文件源码
def test_NanGuardMode():
    # Tests if NanGuardMode is working by feeding in numpy.inf and numpy.nans
    # intentionally. A working implementation should be able to capture all
    # the abnormalties.
    x = T.matrix()
    w = theano.shared(numpy.random.randn(5, 7).astype(theano.config.floatX))
    y = T.dot(x, w)

    fun = theano.function(
        [x], y,
        mode=NanGuardMode(nan_is_error=True, inf_is_error=True)
    )
    a = numpy.random.randn(3, 5).astype(theano.config.floatX)
    infa = numpy.tile(
        (numpy.asarray(100.) ** 1000000).astype(theano.config.floatX), (3, 5))
    nana = numpy.tile(
        numpy.asarray(numpy.nan).astype(theano.config.floatX), (3, 5))
    biga = numpy.tile(
        numpy.asarray(1e20).astype(theano.config.floatX), (3, 5))

    fun(a)  # normal values

    # Temporarily silence logger
    _logger = logging.getLogger("theano.compile.nanguardmode")
    try:
        _logger.propagate = False
        assert_raises(AssertionError, fun, infa)  # INFs
        assert_raises(AssertionError, fun, nana)  # NANs
        assert_raises(AssertionError, fun, biga)  # big values
    finally:
        _logger.propagate = True

    # slices
    a = numpy.random.randn(3, 4, 5).astype(theano.config.floatX)
    infa = numpy.tile(
        (numpy.asarray(100.) ** 1000000).astype(theano.config.floatX),
        (3, 4, 5))
    nana = numpy.tile(
        numpy.asarray(numpy.nan).astype(theano.config.floatX), (3, 4, 5))
    biga = numpy.tile(
        numpy.asarray(1e20).astype(theano.config.floatX), (3, 4, 5))

    x = T.tensor3()
    y = x[:, T.arange(2), T.arange(2)]
    fun = theano.function(
        [x], y,
        mode=NanGuardMode(nan_is_error=True, inf_is_error=True)
    )
    fun(a)  # normal values
    try:
        _logger.propagate = False
        assert_raises(AssertionError, fun, infa)  # INFs
        assert_raises(AssertionError, fun, nana)  # NANs
        assert_raises(AssertionError, fun, biga)  # big values
    finally:
        _logger.propagate = True