Python numpy.ma 模块,masked_all() 实例源码

我们从Python开源项目中,提取了以下12个代码示例,用于说明如何使用numpy.ma.masked_all()

项目:keras-toolbox    作者:hadim    | 项目源码 | 文件源码
def make_mosaic(im, nrows, ncols, border=1):
    """From http://nbviewer.jupyter.org/github/julienr/ipynb_playground/blob/master/keras/convmnist/keras_cnn_mnist.ipynb
    """
    import numpy.ma as ma

    nimgs = len(im)
    imshape = im[0].shape

    mosaic = ma.masked_all((nrows * imshape[0] + (nrows - 1) * border,
                            ncols * imshape[1] + (ncols - 1) * border),
                            dtype=np.float32)

    paddedh = imshape[0] + border
    paddedw = imshape[1] + border
    im
    for i in range(nimgs):

        row = int(np.floor(i / ncols))
        col = i % ncols

        mosaic[row * paddedh:row * paddedh + imshape[0],
               col * paddedw:col * paddedw + imshape[1]] = im[i]

    return mosaic
项目:oceansdb    作者:castelao    | 项目源码 | 文件源码
def nearest(self, doy, depth, lat, lon, var):
        output = {}
        dims, idx = cropIndices(self.dims, lat, lon, depth, doy)
        for v in var:
            output[v] = ma.masked_all((doy.size, depth.size, lat.size,
                lon.size), dtype='f')
            for tn_out, t in enumerate(doy):
                tn_in = np.absolute(dims['time']-t).argmin()
                subset = self.ncs[tn_in][v][0, idx['zn'], idx['yn'], idx['xn']]
                for yn_out, y in enumerate(lat):
                    yn_in = np.absolute(dims['lat']-y).argmin()
                    for xn_out, x in enumerate(lon):
                        xn_in = np.absolute(dims['lon']-x).argmin()
                        for zn_out, z in enumerate(depth):
                            zn_in = np.absolute(dims['depth']-z).argmin()
                            output[v][tn_out, zn_out, yn_out, xn_out] = \
                                    subset[zn_in, yn_in, xn_in]
        return output
项目:oceansdb    作者:castelao    | 项目源码 | 文件源码
def nearest(self, doy, depth, lat, lon, var):
        output = {}
        for v in var:
            output[v] = ma.masked_all((doy.size, depth.size, lat.size,
                lon.size), dtype='f')
            for tn_out, t in enumerate(doy):
                subset, dims = self.crop(np.array([t]), depth, lat, lon, [v])
                for yn_out, y in enumerate(lat):
                    yn_in = np.absolute(dims['lat']-y).argmin()
                    for xn_out, x in enumerate(lon):
                        xn_in = np.absolute(dims['lon']-x).argmin()
                        for zn_out, z in enumerate(depth):
                            zn_in = np.absolute(dims['depth']-z).argmin()
                            output[v][tn_out, zn_out, yn_out, xn_out] = \
                                    subset[v][0,zn_in, yn_in, xn_in]
        return output
项目:dlcv05    作者:telecombcn-dl    | 项目源码 | 文件源码
def make_mosaic(imgs, nrows, ncols, border=1):
    """
    Given a set of images with all the same shape, makes a
    mosaic with nrows and ncols
    """
    nimgs = imgs.shape[0]
    imshape = imgs.shape[1:]

    mosaic = ma.masked_all((nrows * imshape[0] + (nrows - 1) * border,
                            ncols * imshape[1] + (ncols - 1) * border),
                            dtype=np.float32)

    paddedh = imshape[0] + border
    paddedw = imshape[1] + border
    for i in xrange(nimgs):
        row = int(np.floor(i / ncols))
        col = i % ncols

        mosaic[row * paddedh:row * paddedh + imshape[0],
               col * paddedw:col * paddedw + imshape[1]] = imgs[i]
    return mosaic

# Data Loading and Preprocessing

# The data, shuffled and split between train and test sets
项目:SkinLesionNeuralNetwork    作者:Neurality    | 项目源码 | 文件源码
def make_mosaic(imgs, nrows, ncols, border=1):
    import numpy.ma as ma
    """
    Given a set of images with all the same shape, makes a
    mosaic with nrows and ncols
    """
    nimgs = imgs.shape[0]
    imshape = imgs.shape[1:]

    mosaic = ma.masked_all((nrows * imshape[0] + (nrows - 1) * border,
                            ncols * imshape[1] + (ncols - 1) * border),
                            dtype=np.float32)

    paddedh = imshape[0] + border
    paddedw = imshape[1] + border
    for i in xrange(nimgs):
        row = int(np.floor(i / ncols))
        col = i % ncols

        mosaic[row * paddedh:row * paddedh + imshape[0],
               col * paddedw:col * paddedw + imshape[1]] = imgs[i]
    return mosaic
项目:SkinLesionNeuralNetwork    作者:Neurality    | 项目源码 | 文件源码
def make_mosaic(im, nrows, ncols, border=1):
    """From http://nbviewer.jupyter.org/github/julienr/ipynb_playground/blob/master/keras/convmnist/keras_cnn_mnist.ipynb
    """
    import numpy.ma as ma

    nimgs = len(im)
    imshape = (im.shape[-1],im.shape[-2])
    #imshape = im[0].shape

    mosaic = ma.masked_all((nrows * imshape[0] + (nrows - 1) * border,
                            ncols * imshape[1] + (ncols - 1) * border),
                            dtype=np.float32)

    paddedh = imshape[0] + border
    paddedw = imshape[1] + border
    im
    for i in range(nimgs):

        row = int(np.floor(i / ncols))
        col = i % ncols

        mosaic[row * paddedh:row * paddedh + imshape[0],
               col * paddedw:col * paddedw + imshape[1]] = im[i]

    return mosaic
项目:deep-learning-experiments    作者:raghakot    | 项目源码 | 文件源码
def make_mosaic(imgs, nrows, ncols, border=1):
    """
    Given a set of images with all the same shape, makes a
    mosaic with nrows and ncols
    """
    nimgs = imgs.shape[0]
    imshape = imgs.shape[1:]

    mosaic = ma.masked_all((nrows * imshape[0] + (nrows - 1) * border,
                            ncols * imshape[1] + (ncols - 1) * border),
                           dtype=np.float32)

    paddedh = imshape[0] + border
    paddedw = imshape[1] + border
    for i in xrange(nimgs):
        row = int(np.floor(i / ncols))
        col = i % ncols

        mosaic[row * paddedh:row * paddedh + imshape[0],
        col * paddedw:col * paddedw + imshape[1]] = imgs[i]
    return mosaic
项目:PyDataLondon29-EmbarrassinglyParallelDAWithAWSLambda    作者:SignalMedia    | 项目源码 | 文件源码
def test_constructor_maskedarray(self):
        self._check_basic_constructor(ma.masked_all)

        # Check non-masked values
        mat = ma.masked_all((2, 3), dtype=float)
        mat[0, 0] = 1.0
        mat[1, 2] = 2.0
        frame = DataFrame(mat, columns=['A', 'B', 'C'], index=[1, 2])
        self.assertEqual(1.0, frame['A'][1])
        self.assertEqual(2.0, frame['C'][2])

        # what is this even checking??
        mat = ma.masked_all((2, 3), dtype=float)
        frame = DataFrame(mat, columns=['A', 'B', 'C'], index=[1, 2])
        self.assertTrue(np.all(~np.asarray(frame == frame)))
项目:oceansdb    作者:castelao    | 项目源码 | 文件源码
def nearest(self, lat, lon, var):
        output = {}
        dims, idx = cropIndices(self.dims, lat, lon)
        for v in var:
            if v == 'height':
                v = 'z'
            subset = self.ncs[0].variables[v][idx['yn'], idx['xn']]
            output[v] = ma.masked_all((lat.size, lon.size), dtype='f')
            for yn_out, y in enumerate(lat):
                yn_in = np.absolute(dims['lat']-y).argmin()
                for xn_out, x in enumerate(lon):
                    xn_in = np.absolute(dims['lon']-x).argmin()
                    output[v][yn_out, xn_out] = subset[yn_in, xn_in]
        return output
项目:PyDataLondon29-EmbarrassinglyParallelDAWithAWSLambda    作者:SignalMedia    | 项目源码 | 文件源码
def test_constructor_mrecarray(self):
        # Ensure mrecarray produces frame identical to dict of masked arrays
        # from GH3479

        assert_fr_equal = functools.partial(assert_frame_equal,
                                            check_index_type=True,
                                            check_column_type=True,
                                            check_frame_type=True)
        arrays = [
            ('float', np.array([1.5, 2.0])),
            ('int', np.array([1, 2])),
            ('str', np.array(['abc', 'def'])),
        ]
        for name, arr in arrays[:]:
            arrays.append(('masked1_' + name,
                           np.ma.masked_array(arr, mask=[False, True])))
        arrays.append(('masked_all', np.ma.masked_all((2,))))
        arrays.append(('masked_none',
                       np.ma.masked_array([1.0, 2.5], mask=False)))

        # call assert_frame_equal for all selections of 3 arrays
        for comb in itertools.combinations(arrays, 3):
            names, data = zip(*comb)
            mrecs = mrecords.fromarrays(data, names=names)

            # fill the comb
            comb = dict([(k, v.filled()) if hasattr(
                v, 'filled') else (k, v) for k, v in comb])

            expected = DataFrame(comb, columns=names)
            result = DataFrame(mrecs)
            assert_fr_equal(result, expected)

            # specify columns
            expected = DataFrame(comb, columns=names[::-1])
            result = DataFrame(mrecs, columns=names[::-1])
            assert_fr_equal(result, expected)

            # specify index
            expected = DataFrame(comb, columns=names, index=[1, 2])
            result = DataFrame(mrecs, index=[1, 2])
            assert_fr_equal(result, expected)
项目:oceansdb    作者:castelao    | 项目源码 | 文件源码
def woa_profile_from_dap(var, d, lat, lon, depth, cfg):
    """
    Monthly Climatologic Mean and Standard Deviation from WOA,
    used either for temperature or salinity.

    INPUTS
        time: [day of the year]
        lat: [-90<lat<90]
        lon: [-180<lon<180]
        depth: [meters]

    Reads the WOA Monthly Climatology NetCDF file and
    returns the corresponding WOA values of salinity or temperature mean and
    standard deviation for the given time, lat, lon, depth.
    """
    if lon < 0:
        lon = lon+360

    url = cfg['url']

    doy = int(d.strftime('%j'))
    dataset = open_url(url)

    dn = (np.abs(doy-dataset['time'][:])).argmin()
    xn = (np.abs(lon-dataset['lon'][:])).argmin()
    yn = (np.abs(lat-dataset['lat'][:])).argmin()

    if re.match("temperature\d?$", var):
        mn = ma.masked_values(dataset.t_mn.t_mn[dn, :, yn, xn].reshape(
            dataset['depth'].shape[0]), dataset.t_mn.attributes['_FillValue'])
        sd = ma.masked_values(dataset.t_sd.t_sd[dn, :, yn, xn].reshape(
            dataset['depth'].shape[0]), dataset.t_sd.attributes['_FillValue'])
        # se = ma.masked_values(dataset.t_se.t_se[dn, :, yn, xn].reshape(
        #    dataset['depth'].shape[0]), dataset.t_se.attributes['_FillValue'])
        # Use this in the future. A minimum # of samples
        # dd = ma.masked_values(dataset.t_dd.t_dd[dn, :, yn, xn].reshape(
        #    dataset['depth'].shape[0]), dataset.t_dd.attributes['_FillValue'])
    elif re.match("salinity\d?$", var):
        mn = ma.masked_values(dataset.s_mn.s_mn[dn, :, yn, xn].reshape(
            dataset['depth'].shape[0]), dataset.s_mn.attributes['_FillValue'])
        sd = ma.masked_values(dataset.s_sd.s_sd[dn, :, yn, xn].reshape(
            dataset['depth'].shape[0]), dataset.s_sd.attributes['_FillValue'])
        # dd = ma.masked_values(dataset.s_dd.s_dd[dn, :, yn, xn].reshape(
        #    dataset['depth'].shape[0]), dataset.s_dd.attributes['_FillValue'])
    zwoa = ma.array(dataset.depth[:])

    ind = (depth <= zwoa.max()) & (depth >= zwoa.min())
    # Mean value profile
    f = interp1d(zwoa[~ma.getmaskarray(mn)].compressed(), mn.compressed())
    mn_interp = ma.masked_all(depth.shape)
    mn_interp[ind] = f(depth[ind])
    # The stdev profile
    f = interp1d(zwoa[~ma.getmaskarray(sd)].compressed(), sd.compressed())
    sd_interp = ma.masked_all(depth.shape)
    sd_interp[ind] = f(depth[ind])

    output = {'woa_an': mn_interp, 'woa_sd': sd_interp}

    return output
项目:oceansdb    作者:castelao    | 项目源码 | 文件源码
def interpolate(self, lat, lon, var):
        """ Interpolate each var on the coordinates requested

        """

        subset, dims = self.crop(lat, lon, var)

        if np.all([y in dims['lat'] for y in lat]) & \
                np.all([x in dims['lon'] for x in lon]):
                    yn = np.nonzero([y in lat for y in dims['lat']])[0]
                    xn = np.nonzero([x in lon for x in dims['lon']])[0]
                    output = {}
                    for v in subset:
                        # output[v] = subset[v][dn, zn, yn, xn]
                        # Seriously that this is the way to do it?!!??
                        output[v] = subset[v][:, xn][yn]
                    return output

        # The output coordinates shall be created only once.
        points_out = []
        for latn in lat:
            for lonn in lon:
                points_out.append([latn, lonn])
        points_out = np.array(points_out)

        output = {}
        for v in var:
            output[v] = ma.masked_all(
                    (lat.size, lon.size),
                    dtype=subset[v].dtype)

            # The valid data
            idx = np.nonzero(~ma.getmaskarray(subset[v]))

            if idx[0].size > 0:
                points = np.array([
                    dims['lat'][idx[0]], dims['lon'][idx[1]]]).T
                values = subset[v][idx]

                # Interpolate along the dimensions that have more than one
                #   position, otherwise it means that the output is exactly
                #   on that coordinate.
                ind = np.array(
                        [np.unique(points[:, i]).size > 1 for i in
                            range(points.shape[1])])
                assert ind.any()

                values_out = griddata(
                        np.atleast_1d(np.squeeze(points[:, ind])),
                        values,
                        np.atleast_1d(np.squeeze(points_out[:, ind]))
                        )

                # Remap the interpolated value back into a 4D array
                idx = np.isfinite(values_out)
                for [y, x], out in zip(points_out[idx], values_out[idx]):
                    output[v][y==lat, x==lon] = out

        return output