Python scipy 模块,unique() 实例源码

我们从Python开源项目中,提取了以下10个代码示例,用于说明如何使用scipy.unique()

项目:glmnet_py    作者:hanfang    | 项目源码 | 文件源码
def auc(y, prob, w):
    if len(w) == 0:
        mindiff = scipy.amin(scipy.diff(scipy.unique(prob)))
        pert = scipy.random.uniform(0, mindiff/3, prob.size)
        t, rprob = scipy.unique(prob + pert, return_inverse = True)
        n1 = scipy.sum(y, keepdims = True)
        n0 = y.shape[0] - n1
        u = scipy.sum(rprob[y == 1]) - n1*(n1 + 1)/2
        result = u/(n1*n0)
    else:
        op = scipy.argsort(prob)
        y = y[op]
        w = w[op]
        cw = scipy.cumsum(w)
        w1 = w[y == 1]
        cw1 = scipy.cumsum(w1)
        wauc = scipy.sum(w1*(cw[y == 1] - cw1))
        sumw = cw1[-1]
        sumw = sumw*(c1[-1] - sumw)
        result = wauc/sumw
    return(result)    
#=========================
项目:PleioPred    作者:yiminghu    | 项目源码 | 文件源码
def pred_accuracy(y_true, y_pred):
    y_true = sp.copy(y_true)
    if len(sp.unique(y_true))==2:
        print 'dichotomous trait, calculating AUC'
        y_min = y_true.min()
        y_max = y_true.max()
        if y_min!= 0 or y_max!=1:
            y_true[y_true==y_min]=0
            y_true[y_true==y_max]=1
        fpr, tpr, thresholds = metrics.roc_curve(y_true, y_pred)
        auc = metrics.auc(fpr, tpr)
        return auc
    else:
        print 'continuous trait, calculating COR'
        cor = sp.corrcoef(y_true,y_pred)[0,1]
        return cor
项目:glmnet_py    作者:hanfang    | 项目源码 | 文件源码
def auc(y, prob, w):
    if len(w) == 0:
        mindiff = scipy.amin(scipy.diff(scipy.unique(prob)))
        pert = scipy.random.uniform(0, mindiff/3, prob.size)
        t, rprob = scipy.unique(prob + pert, return_inverse = True)
        n1 = scipy.sum(y, keepdims = True)
        n0 = y.shape[0] - n1
        u = scipy.sum(rprob[y == 1]) - n1*(n1 + 1)/2
        result = u/(n1*n0)
    else:
        op = scipy.argsort(prob)
        y = y[op]
        w = w[op]
        cw = scipy.cumsum(w)
        w1 = w[y == 1]
        cw1 = scipy.cumsum(w1)
        wauc = scipy.sum(w1*(cw[y == 1] - cw1))
        sumw = cw1[-1]
        sumw = sumw*(c1[-1] - sumw)
        result = wauc/sumw
    return(result)    
#=========================
项目:glmnet_py    作者:hanfang    | 项目源码 | 文件源码
def auc(y, prob, w):
    if len(w) == 0:
        mindiff = scipy.amin(scipy.diff(scipy.unique(prob)))
        pert = scipy.random.uniform(0, mindiff/3, prob.size)
        t, rprob = scipy.unique(prob + pert, return_inverse = True)
        n1 = scipy.sum(y, keepdims = True)
        n0 = y.shape[0] - n1
        u = scipy.sum(rprob[y == 1]) - n1*(n1 + 1)/2
        result = u/(n1*n0)
    else:
        op = scipy.argsort(prob)
        y = y[op]
        w = w[op]
        cw = scipy.cumsum(w)
        w1 = w[y == 1]
        cw1 = scipy.cumsum(w1)
        wauc = scipy.sum(w1*(cw[y == 1] - cw1))
        sumw = cw1[-1]
        sumw = sumw*(c1[-1] - sumw)
        result = wauc/sumw
    return(result)    
#=========================
项目:glmnet_py    作者:hanfang    | 项目源码 | 文件源码
def auc(y, prob, w):
    if len(w) == 0:
        mindiff = scipy.amin(scipy.diff(scipy.unique(prob)))
        pert = scipy.random.uniform(0, mindiff/3, prob.size)
        t, rprob = scipy.unique(prob + pert, return_inverse = True)
        n1 = scipy.sum(y, keepdims = True)
        n0 = y.shape[0] - n1
        u = scipy.sum(rprob[y == 1]) - n1*(n1 + 1)/2
        result = u/(n1*n0)
    else:
        op = scipy.argsort(prob)
        y = y[op]
        w = w[op]
        cw = scipy.cumsum(w)
        w1 = w[y == 1]
        cw1 = scipy.cumsum(w1)
        wauc = scipy.sum(w1*(cw[y == 1] - cw1))
        sumw = cw1[-1]
        sumw = sumw*(c1[-1] - sumw)
        result = wauc/sumw
    return(result)    
#=========================
项目:RFCN    作者:zengxianyu    | 项目源码 | 文件源码
def __MR_boundary_indictor(self,labels):
        s = sp.amax(labels)+1
        up_indictor = (sp.zeros((s,1))).astype(float)
        right_indictor = (sp.zeros((s,1))).astype(float)
        low_indictor = (sp.zeros((s,1))).astype(float)
        left_indictor = (sp.zeros((s,1))).astype(float)

        upper_ids = sp.unique(labels[0,:]).astype(int)
        right_ids = sp.unique(labels[:,labels.shape[1]-1]).astype(int)
        low_ids = sp.unique(labels[labels.shape[0]-1,:]).astype(int)
        left_ids = sp.unique(labels[:,0]).astype(int)

        up_indictor[upper_ids] = 1.0
        right_indictor[right_ids] = 1.0
        low_indictor[low_ids] = 1.0
        left_indictor[left_ids] = 1.0

        return up_indictor,right_indictor,low_indictor,left_indictor
项目:HistoricalMap    作者:lennepkade    | 项目源码 | 文件源码
def learn(self,x,y):
        '''
        Function that learns the GMM with ridge regularizationb from training samples
        Input:
            x : the training samples
            y :  the labels
        Output:
            the mean, covariance and proportion of each class, as well as the spectral decomposition of the covariance matrix
        '''

        ## Get information from the data
        C = sp.unique(y).shape[0]
        #C = int(y.max(0))  # Number of classes
        n = x.shape[0]  # Number of samples
        d = x.shape[1]  # Number of variables
        eps = sp.finfo(sp.float64).eps

        ## Initialization
        self.ni = sp.empty((C,1))    # Vector of number of samples for each class
        self.prop = sp.empty((C,1))  # Vector of proportion
        self.mean = sp.empty((C,d))  # Vector of means
        self.cov = sp.empty((C,d,d)) # Matrix of covariance
        self.Q = sp.empty((C,d,d)) # Matrix of eigenvectors
        self.L = sp.empty((C,d)) # Vector of eigenvalues
        self.classnum = sp.empty(C).astype('uint8')

        ## Learn the parameter of the model for each class
        for c,cR in enumerate(sp.unique(y)):

            j = sp.where(y==(cR))[0]

            self.classnum[c] = cR # Save the right label
            self.ni[c] = float(j.size)    
            self.prop[c] = self.ni[c]/n
            self.mean[c,:] = sp.mean(x[j,:],axis=0)
            self.cov[c,:,:] = sp.cov(x[j,:],bias=1,rowvar=0)  # Normalize by ni to be consistent with the update formulae

            # Spectral decomposition
            L,Q = linalg.eigh(self.cov[c,:,:])
            idx = L.argsort()[::-1]
            self.L[c,:] = L[idx]
            self.Q[c,:,:]=Q[:,idx]
项目:RFCN    作者:zengxianyu    | 项目源码 | 文件源码
def __MR_second_stage_indictor(self,saliency_img_mask,labels):
        s = sp.amax(labels)+1
        # get ids from labels image
        ids = sp.unique(labels[saliency_img_mask]).astype(int)
        # indictor
        indictor = sp.zeros((s,1)).astype(float)
        indictor[ids] = 1.0
        return indictor
项目:RFCN    作者:zengxianyu    | 项目源码 | 文件源码
def __MR_get_adj_loop(self, labels):
        s = sp.amax(labels) + 1
        adj = np.ones((s, s), np.bool)

        for i in range(labels.shape[0] - 1):
            for j in range(labels.shape[1] - 1):
                if labels[i, j]<>labels[i+1, j]:
                    adj[labels[i, j],       labels[i+1, j]]              = False
                    adj[labels[i+1, j],   labels[i, j]]                  = False
                if labels[i, j]<>labels[i, j + 1]:
                    adj[labels[i, j],       labels[i, j+1]]              = False
                    adj[labels[i, j+1],   labels[i, j]]                  = False
                if labels[i, j]<>labels[i + 1, j + 1]:
                    adj[labels[i, j]        ,  labels[i+1, j+1]]       = False
                    adj[labels[i+1, j+1],  labels[i, j]]               = False
                if labels[i + 1, j]<>labels[i, j + 1]:
                    adj[labels[i+1, j],   labels[i, j+1]]              = False
                    adj[labels[i, j+1],   labels[i+1, j]]              = False

        upper_ids = sp.unique(labels[0,:]).astype(int)
        right_ids = sp.unique(labels[:,labels.shape[1]-1]).astype(int)
        low_ids = sp.unique(labels[labels.shape[0]-1,:]).astype(int)
        left_ids = sp.unique(labels[:,0]).astype(int)

        bd = np.append(upper_ids, right_ids)
        bd = np.append(bd, low_ids)
        bd = sp.unique(np.append(bd, left_ids))

        for i in range(len(bd)):
            for j in range(i + 1, len(bd)):
                adj[bd[i], bd[j]] = False
                adj[bd[j], bd[i]] = False

        return adj
项目:dzetsaka    作者:lennepkade    | 项目源码 | 文件源码
def learn(self,x,y):
        '''
        Function that learns the GMM with ridge regularizationb from training samples
        Input:
            x : the training samples
            y :  the labels
        Output:
            the mean, covariance and proportion of each class, as well as the spectral decomposition of the covariance matrix
        '''

        ## Get information from the data
        C = sp.unique(y).shape[0]
        #C = int(y.max(0))  # Number of classes
        n = x.shape[0]  # Number of samples
        d = x.shape[1]  # Number of variables
        eps = sp.finfo(sp.float64).eps

        ## Initialization
        self.ni = sp.empty((C,1))    # Vector of number of samples for each class
        self.prop = sp.empty((C,1))  # Vector of proportion
        self.mean = sp.empty((C,d))  # Vector of means
        self.cov = sp.empty((C,d,d)) # Matrix of covariance
        self.Q = sp.empty((C,d,d)) # Matrix of eigenvectors
        self.L = sp.empty((C,d)) # Vector of eigenvalues
        self.classnum = sp.empty(C).astype('uint8')

        ## Learn the parameter of the model for each class
        for c,cR in enumerate(sp.unique(y)):

            j = sp.where(y==(cR))[0]

            self.classnum[c] = cR # Save the right label
            self.ni[c] = float(j.size)    
            self.prop[c] = self.ni[c]/n
            self.mean[c,:] = sp.mean(x[j,:],axis=0)
            self.cov[c,:,:] = sp.cov(x[j,:],bias=1,rowvar=0)  # Normalize by ni to be consistent with the update formulae

            # Spectral decomposition
            L,Q = linalg.eigh(self.cov[c,:,:])
            idx = L.argsort()[::-1]
            self.L[c,:] = L[idx]
            self.Q[c,:,:]=Q[:,idx]