Python networkx 模块,shortest_path_length() 实例源码

我们从Python开源项目中,提取了以下33个代码示例,用于说明如何使用networkx.shortest_path_length()

项目:raiden    作者:raiden-network    | 项目源码 | 文件源码
def ordered_neighbors(nx_graph, our_address, target_address):
    paths = list()

    try:
        all_neighbors = networkx.all_neighbors(nx_graph, our_address)
    except networkx.NetworkXError:
        # If `our_address` is not in the graph, no channels opened with the
        # address
        return []

    for neighbor in all_neighbors:
        try:
            length = networkx.shortest_path_length(
                nx_graph,
                neighbor,
                target_address,
            )
            heappush(paths, (length, neighbor))
        except (networkx.NetworkXNoPath, networkx.NodeNotFound):
            pass

    return paths
项目:breaking_cycles_in_noisy_hierarchies    作者:zhenv5    | 项目源码 | 文件源码
def add_cycle_edges_by_path(g,number_of_edges,path_length = 5):
    number = 0
    num_nodes = g.number_of_nodes()
    nodes = g.nodes()
    extra_edges = []
    while number < number_of_edges:
        u,v = np.random.randint(0,num_nodes,2)
        u = nodes[u]
        v = nodes[v]
        if nx.has_path(g,u,v):
            length = nx.shortest_path_length(g,source = u,target = v)
            if length <= path_length:
                extra_edges.append((v,u))
                number += 1
        if nx.has_path(g,v,u):
            length = nx.shortest_path_length(g,source = v,target = u)
            if length <= path_length:
                extra_edges.append((u,v))
                number += 1
    print("# extra edges added with path length <= %d: %d" % (path_length,len(extra_edges)))
    return extra_edges
项目:anomalous-vertices-detection    作者:Kagandi    | 项目源码 | 文件源码
def get_shortest_path_length(self, vertex1, vertex2):
        """
        Parameters
        ----------
        vertex1:
        vertex2:

        Returns
        -------
        NxGraph: Graph object

        Examples
        --------
        >>>
        """
        try:
            return nx.shortest_path_length(self._graph, source=vertex1, target=vertex2, weight=self._weight_field)
        except nx.NetworkXNoPath:
            return 0
项目:Quadflor    作者:quadflor    | 项目源码 | 文件源码
def bell_reweighting(tree, root, sublinear=False):
    # convert the hierarchy to a tree if make_bfs_tree is true

    distance_by_target = nx.shortest_path_length(tree, source=root)

    level_count = defaultdict(int)
    for val in distance_by_target.values():
        level_count[val] += 1

    for edge in tree.edges():
        parent, child = edge
        if sublinear:
            # use smoothed logarithm
            tree[parent][child]['weight'] = 1.0 / log(1 + level_count[distance_by_target[child]], 10)
        else:
            tree[parent][child]['weight'] = 1.0 / level_count[distance_by_target[child]]

    return tree
项目:lomap    作者:wasserfeder    | 项目源码 | 文件源码
def prune(self):
        """TODO:
        """
        # set of allowed symbols, i.e. singletons and emptyset
        symbols = set([0] + self.props.values())
        # update transitions and mark for deletion
        del_transitions = deque()
        for u, v, d in self.g.edges_iter(data=True):
            sym = d['input'] & symbols
            if sym:
                d['input'] = sym
            else:
                del_transitions.append((u, v))
        self.g.remove_edges_from(del_transitions)
        # delete states unreachable from the initial state
        init = next(self.init.iterkeys())
        reachable_states = nx.shortest_path_length(self.g, source=init).keys()
        del_states = [n for n in self.g.nodes_iter() if n not in reachable_states]
        self.g.remove_nodes_from(del_states)
        # update accepting pairs
        for f, b in self.final:
            f.difference_update(del_states)
            b.difference_update(del_states)
        return del_states, del_transitions
项目:lomap    作者:wasserfeder    | 项目源码 | 文件源码
def prune(self):
        """TODO:

        Note: Does not update the final data structure, because it is dependent
        on the automaton type.
        """
        # set of allowed symbols, i.e. singletons and emptyset
        symbols = set([0] + self.props.values())
        # update transitions and mark for deletion
        del_transitions = deque()
        for u, v, d in self.g.edges_iter(data=True):
            sym = d['input'] & symbols
            if sym:
                d['input'] = sym
            else:
                del_transitions.append((u, v))
        self.g.remove_edges_from(del_transitions)
        # delete states unreachable from the initial state
        init = next(self.init.iterkeys())
        reachable_states = nx.shortest_path_length(self.g, source=init).keys()
        del_states = [n for n in self.g.nodes_iter() if n not in reachable_states]
        self.g.remove_nodes_from(del_states)
        return del_states, del_transitions
项目:QTop    作者:jacobmarks    | 项目源码 | 文件源码
def InternalRecovery(code, terminal1, terminal2, type, charge_type):
    measure_chain = nx.shortest_path(code.Dual[type], terminal1, terminal2)
    chain_length = nx.shortest_path_length(code.Dual[type], terminal1, terminal2)
    for link in range(chain_length):
        vertex1 = measure_chain[link]
        vertex2 = measure_chain[link + 1]
        for data_qubit in code.Stabilizers[type][vertex1]['data']:
            if data_qubit in code.Stabilizers[type][vertex2]['data']:
                prior_charge = code.Primal.node[data_qubit]['charge'][charge_type]
                code.Primal.node[data_qubit]['charge'][charge_type] = (prior_charge + 1) % 2

    return code
项目:QTop    作者:jacobmarks    | 项目源码 | 文件源码
def AssociatedExternal(node, Dual, External):
    associate = External.iterkeys().next()
    min_dist = nx.shortest_path_length(Dual, node, External[associate]['measure']) + 1

    for candidate in External:
        distance = nx.shortest_path_length(Dual, node, External[candidate]['measure']) + 1
        if distance < min_dist:
            min_dist = distance
            associate = candidate
    return associate
项目:QTop    作者:jacobmarks    | 项目源码 | 文件源码
def distance(self, type, node1, node2):
        if node1 in self.Dual[type].nodes() and node2 in self.Dual[type].nodes():
            return nx.shortest_path_length(self.Dual[type], node1, node2)
        elif node1 in self.Dual[type].nodes() and node2 not in self.Dual[type].nodes():
            node2 = self.External[type][node2]['measure']
            return nx.shortest_path_length(self.Dual[type], node1, node2) + 1
        elif node1 not in self.Dual[type].nodes() and node2 in self.Dual[type].nodes():
            node1 = self.External[type][node1]['measure']
            return nx.shortest_path_length(self.Dual[type], node1, node2) + 1
        else:
            node1 = self.External[type][node1]['measure']
            node2 = self.External[type][node2]['measure']
            return nx.shortest_path_length(self.Dual[type], node1, node2) + 2


    # Re-initializes Measurement qubits
项目:cnn-graph-classification    作者:giannisnik    | 项目源码 | 文件源码
def sp_kernel(g1, g2=None):
    if g2 != None:
        graphs = []
        for g in g1:
            graphs.append(g)
        for g in g2:
            graphs.append(g)
    else:
        graphs = g1

    N = len(graphs)
    all_paths = {}
    sp_counts = {}
    for i in range(N):
        sp_lengths = nx.shortest_path_length(graphs[i])
        sp_counts[i] = {}
        nodes = graphs[i].nodes()
        for v1 in nodes:
            for v2 in nodes:
                if v2 in sp_lengths[v1]:
                    label = tuple(sorted([graphs[i].node[v1]['label'], graphs[i].node[v2]['label']]) + [sp_lengths[v1][v2]])
                    if label in sp_counts[i]:
                        sp_counts[i][label] += 1
                    else:
                        sp_counts[i][label] = 1

                    if label not in all_paths:
                        all_paths[label] = len(all_paths)

    phi = np.zeros((N,len(all_paths)))

    for i in range(N):
        for label in sp_counts[i]:
            phi[i,all_paths[label]] = sp_counts[i][label]

    if g2 != None:
        K = np.dot(phi[:len(g1),:],phi[len(g1):,:].T)
    else:
        K = np.dot(phi,phi.T)

    return K
项目:robograph    作者:csparpa    | 项目源码 | 文件源码
def prepare_plot(graph):
    """
    Prepares a Matplotlib plot for further handling
    :param graph: datamodel.base.Graph instance
    :return: None
    """
    G = graph.nxgraph

    # Color map for nodes: color is proportional to depth level
    # http://matplotlib.org/examples/color/colormaps_reference.html
    depth_levels_from_root = nx.shortest_path_length(G, graph.root_node)
    vmax = 1.
    colormap = plt.get_cmap('BuGn')
    step = 1./len(graph)
    node_colors = [vmax - step * depth_levels_from_root[n] for n in G.nodes()]

    # Draw!
    # https://networkx.github.io/documentation/networkx-1.10/reference/drawing.html
    pos = nx.spectral_layout(G)
    nx.draw_networkx_labels(G, pos,
                            labels=dict([(n, n.name) for n in G.nodes()]),
                            font_weight='bold',
                            font_color='orangered')
    nx.draw_networkx_nodes(G, pos,
                           node_size=2000,
                           cmap=colormap,
                           vmin=0.,
                           vmax=vmax,
                           node_color=node_colors)
    nx.draw_networkx_edge_labels(G, pos,
                                 edge_labels=dict([((u, v,), d['name']) for u, v, d in G.edges(data=True)]))
    nx.draw_networkx_edges(G, pos,
                           edgelist=[edge for edge in G.edges()],
                           arrows=True)
项目:dankdungeon    作者:d4rch0n    | 项目源码 | 文件源码
def fully_connected(self):
        if self.graph is None:
            return False
        for i in range(1, self.num_rooms):
            try:
                path = networkx.shortest_path_length(
                    self.graph, source=0, target=i,
                )
            except networkx.NetworkXNoPath:
                print('Graph isnt fully connected! Regenerating.')
                return False
        return True
项目:synchrony    作者:cknd    | 项目源码 | 文件源码
def inputASL(network,inputc):
    """
    Returns the average shortest path length within the input-receiving subgraph.

    Args:
        network: networkx graph
        inputc: MxN array, all nonzero positions are treated as 'input receiving'
    """
    inputnodes = [tuple(ind) for ind in np.transpose(inputc.nonzero())]
    lengths = [nx.shortest_path_length(network,src,trg) for src in inputnodes for trg in inputnodes]
    return np.mean(lengths)
项目:synchrony    作者:cknd    | 项目源码 | 文件源码
def distances_to_roi(network,inputc,roi):
    """
    Returns a list of shortest path lengths from each
    input-receiving cell to all measured cells.

    Args:
        network: networkx graph
        inputc: MxN array, nonzero positions are treated as 'input receiving'
        inputc: MxN array, nonzero positions are treated as 'measured'
    """
    inputnodes = [tuple(ind) for ind in np.transpose(inputc.nonzero())]
    roinodes   = [tuple(ind) for ind in np.transpose(roi.nonzero())]
    lengths = [nx.shortest_path_length(network,src,trg) for src in inputnodes for trg in roinodes]
    return lengths
项目:synchrony    作者:cknd    | 项目源码 | 文件源码
def createroiidxs(network,distance):
    """
    Choose two central nodes, some distance apart, and return their (i,j) indices.

    Args:
        network: networkx graph
        distance: how far apart the two nodes should be.

    Returns:
        A tuple of two (i,j) indices / node labels
    """
    nodes,centralities = zip(*nx.closeness_centrality(network).items())
    # sort nodes from most central to least central:
    centr_arxs = np.argsort(centralities)
    nodes_sorted = [n for n in reversed(np.array(nodes)[centr_arxs])]
    k = 0
    while k<len(nodes_sorted):
        # pick some node in the middle of the graph (high centrality)
        middlenode = tuple(nodes_sorted[k])
        # now pick the most central node that meets the given distance criterion.
        # [since we dont want to end up near the boundaries)
        for n in nodes_sorted:
            if nx.shortest_path_length(network,middlenode,tuple(n)) == distance:
                return middlenode,tuple(n)
        # if that didnt work, try starting with a different, less central middlenode.
        k = k+1
    raise Exception("speficied distance to high for this network")
项目:Taskpacker    作者:Edinburgh-Genome-Foundry    | 项目源码 | 文件源码
def plot_tasks_dependency_graph(tasks, ax=None):
    """Plot the graph of all inter-dependencies in the provided tasks list."""
    if not NX_AVAILABLE:
        raise ImportError("Install Networkx to plot task dependency graphs.")
    if not MATPLOTLIB_AVAILABLE:
        raise ImportError("Install Matplotlib to plot task dependency graphs.")
    g = nx.DiGraph()
    tasks_dict = {
        task.id: task
        for task in tasks
    }
    for task_id, task in tasks_dict.items():
        for parent_task in task.follows:
            g.add_edge(parent_task.id, task_id)

    nodes_depths = {
        node: 0
        for node in g.nodes()
    }
    for source, lengths in nx.shortest_path_length(g):
        for target, length in lengths.items():
            nodes_depths[target] = max(nodes_depths[target], length)
    levels = [
        sorted([
            node
            for node, depth in nodes_depths.items()
            if depth == this_depth
        ])[::-1]
        for this_depth in range(max(nodes_depths.values()) + 1)
    ]

    def draw_node(x, y, node, ax):
        task = tasks_dict[node]
        text = task.name.replace("_", "\n") + "\nduration: %d" % task.duration
        ax.text(x, y, text, verticalalignment="center",
                horizontalalignment="center",
                bbox={'facecolor': 'white', 'lw': 0})
    return plot_tree_graph(levels, g.edges(), draw_node, width_factor=2, ax=ax)
项目:osmnx    作者:gboeing    | 项目源码 | 文件源码
def truncate_graph_dist(G, source_node, max_distance=1000, weight='length', retain_all=False):
    """
    Remove everything further than some network distance from a specified node
    in graph.

    Parameters
    ----------
    G : networkx multidigraph
    source_node : int
        the node in the graph from which to measure network distances to other
        nodes
    max_distance : int
        remove every node in the graph greater than this distance from the
        source_node
    weight : string
        how to weight the graph when measuring distance (default 'length' is
        how many meters long the edge is)
    retain_all : bool
        if True, return the entire graph even if it is not connected

    Returns
    -------
    networkx multidigraph
    """

    # get the shortest distance between the node and every other node, then
    # remove every node further than max_distance away
    start_time = time.time()
    G = G.copy()
    distances = nx.shortest_path_length(G, source=source_node, weight=weight)
    distant_nodes = {key:value for key, value in dict(distances).items() if value > max_distance}
    G.remove_nodes_from(distant_nodes.keys())
    log('Truncated graph by weighted network distance in {:,.2f} seconds'.format(time.time()-start_time))

    # remove any isolated nodes and retain only the largest component (if
    # retain_all is True)
    if not retain_all:
        G = remove_isolated_nodes(G)
        G = get_largest_component(G)

    return G
项目:RasPiBot202    作者:DrGFreeman    | 项目源码 | 文件源码
def getNearestUnvisited(self, currentNode):
        shortestLength = self.farAway
        for node in self.g.nodes():
            if self.g.degree(node) < node.nbPathsOut + 1:
                length = nx.shortest_path_length(self.g, currentNode, node, weight = 'weight')
                print "Length to node ", node.uid, ": ", length
                if length < shortestLength:
                    nearestUnvisited = node
                    shortestLength = length
        print "Distance to nearest node with unvisited paths: ", shortestLength
        return nearestUnvisited
项目:SyConn    作者:StructuralNeurobiologyLab    | 项目源码 | 文件源码
def calc_arc_choord(anno, nxg=None):
    """
    Calculates the arc choord length of an annotation object as a measure of
    the tortuosity of an annotation. The return value is 1 for a completely
    straight skeleton. It uses the two most distant end nodes in an
    annotation and divides it by the graph path length between them.
    :param anno:
    :param nxg:
    :return: float
    """

    if not nxg:
        nxg = su.annoToNXGraph(anno)

    # find end nodes
    e_nodes = list({k for k, v in nxg.degree().iteritems() if v == 1})
    # get euclidian distance between end nodes max away
    # this can be done more efficiently by computing a convex hull first,
    # but the naive implementation should be fast enough for now
    dists = []
    for pair in itertools.permutations(e_nodes, 2):
        dists.append((pair, pair[0].distance_scaled(pair[1])))
    dists = sorted(dists, key=lambda x: x[1])
    try:
        path_len = nx.shortest_path_length(nxg, source=dists[-1][0][0],
                                           target=dists[-1][0][1],
                                           weight='weight')
    except:
        # print('No path between nodes for tortuosity calculation for neuron %s' %
        #       anno.filename)
        return 0
    return dists[-1][1] / path_len
项目:Quadflor    作者:quadflor    | 项目源码 | 文件源码
def _score(self, freq, scores, row, col, memoization=None):
        mem = memoization if memoization is not None else [False] * scores.shape[1]

        # memoization hit
        if mem[col]: return scores[row, col]

        children = self.hierarchy.successors(self.feature_names[col] if self.feature_names else col)
        if len(children) == 0:
            # Base case for leaves
            scores[row, col] = freq[row, col]
            mem[col] = True
            return scores[row, col]

        # recursively compute children score
        score = float(0)
        for child in children:
            child_idx = self.vocabulary[child] if self.vocabulary else child
            score += self._score(freq, scores, row, child_idx, memoization=mem)

        # scale them with some method specific factor
        if self.method in ["bell", "belllog"]:
            k = nx.shortest_path_length(self.hierarchy, self.root, self.feature_names[col] if self.feature_names else col)
            print(k+1, self.levels[k+1])
            print("Count of children:", len(children))
            denom = self.levels[k+1]
            if self.method == "belllog": denom = log(denom, 10) #TODO problem when zero
            score *= 1.0 / denom
        elif self.method == "children":
            score *= 1.0 / len(children)
        elif self.method == "basic":
            score *= self.decay 

        # add the freq of the concept just now since it should not be scaled
        score += freq[row, col]


        scores[row, col] = score
        mem[col] = True

        return scores[row, col]
项目:Quadflor    作者:quadflor    | 项目源码 | 文件源码
def fit(self, X, y=None):
        # the bell methods require additional information
        if self.method in ["bell", "belllog"]:
            # precompute node count by level
            self.levels = defaultdict(int)
            for node in self.hierarchy.nodes():
                l = nx.shortest_path_length(self.hierarchy, self.root, node)
                self.levels[l] += 1


            print(self.levels)
        return self
项目:lomap    作者:wasserfeder    | 项目源码 | 文件源码
def compute_potentials(pa):
    '''Computes the potential function for each state of the product automaton.
    The potential function represents the minimum distance to a self-reachable
    final state in the product automaton.
    '''
    assert 'v' not in pa.g
    # add virtual node which connects to all initial states in the product
    pa.g.add_node('v')
    pa.g.add_edges_from([('v', p) for p in pa.init])
    # create strongly connected components of the product automaton w/ 'v'
    scc = list(nx.strongly_connected_components(pa.g))
    dag = nx.condensation(pa.g, scc)
    # get strongly connected component which contains 'v'
    for k, sc in enumerate(scc[::-1]):
        if 'v' in sc:
            start = len(scc) - k - 1
            break
    assert 'v' in scc[start]
    assert map(lambda sc: 'v' in sc, scc).count(True) == 1
    # get self-reachable final states
    pa.srfs = self_reachable_final_states_dag(pa, dag, scc, start)
    # remove virtual node from product automaton
    pa.g.remove_node('v')
    assert 'v' not in pa.g
    if not pa.srfs:
        return False
    # add artificial node 'v' and edges from the set of self reachable
    # states (pa.srfs) to 'v'
    pa.g.add_node('v')
    for p in pa.srfs:
        pa.g.add_edge(p, 'v', **{'weight': 0})
    # compute the potentials for each state of the product automaton
    lengths = nx.shortest_path_length(pa.g, target='v', weight='weight')
    for p in pa.g:
        pa.g.node[p]['potential'] = lengths[p]
    # remove virtual state 'v'
    pa.g.remove_node('v')
    return True
项目:lomap    作者:wasserfeder    | 项目源码 | 文件源码
def remove_trap_states(self):
        '''
        Removes all states of the automaton which do not reach a final state.
        Returns True whenever trap states have been removed from the automaton.
        '''
        # add virtual state which has incoming edges from all final states
        self.g.add_edges_from([(state, 'virtual') for state in self.final])
        # compute trap states
        trap_states = set(self.g.nodes_iter())
        trap_states -= set(nx.shortest_path_length(self.g, target='virtual'))
        # remove trap state and virtual state
        self.g.remove_nodes_from(trap_states | set(['virtual']))
        return len(trap_states - set(['virtual'])) == 0
项目:lomap    作者:wasserfeder    | 项目源码 | 文件源码
def remove_trap_states(self):
        '''
        Removes all states of the automaton which do not reach a final state.
        Returns True whenever trap states have been removed from the automaton.
        '''
        # add virtual state which has incoming edges from all final states
        self.g.add_edges_from([(state, 'virtual') for state in self.final])
        # compute trap states
        trap_states = set(self.g.nodes_iter())
        trap_states -= set(nx.shortest_path_length(self.g, target='virtual'))
        # remove trap state and virtual state
        self.g.remove_nodes_from(trap_states | set(['virtual']))
        return len(trap_states - set(['virtual'])) == 0
项目:lomap    作者:wasserfeder    | 项目源码 | 文件源码
def remove_trap_states(self):
        '''
        Removes all states of the automaton which do not reach a final state.
        Returns True whenever trap states have been removed from the automaton.
        '''
        # add virtual state which has incoming edges from all final states
        self.g.add_edges_from([(state, 'virtual') for state in self.final])
        # compute trap states
        trap_states = set(self.g.nodes_iter())
        trap_states -= set(nx.shortest_path_length(self.g, target='virtual'))
        # remove trap state and virtual state
        self.g.remove_nodes_from(trap_states | set(['virtual']))
        return len(trap_states - set(['virtual'])) == 0
项目:ez-segway    作者:thanh-nguyen-dang    | 项目源码 | 文件源码
def deploy_controller(self, sw, sw_to_ctrl_delays):
        sw_to_ctrl_delays[sw] = 0
        shortest_paths = nx.shortest_path(self.graph, target=sw, weight='delay')
        shortest_path_lengths = nx.shortest_path_length(self.graph, target=sw, weight='delay')
        log.info(shortest_paths)
        for n in self.graph.nodes():
            if n == sw:
                continue
            if n in shortest_path_lengths.keys():
                sw_to_ctrl_delays[n] = shortest_path_lengths[n]
            else:
                sw_to_ctrl_delays[n] = 1
        log.debug("sw to ctrl delays: %s" % str(sw_to_ctrl_delays))
项目:psst    作者:power-system-simulation-toolbox    | 项目源码 | 文件源码
def create_profile_graph(self, y_values):
        self.regenerate_network(load_names=False, gen_names=False)

        swing_bus = self.swing_bus
        bus_distance_matrix_df = pd.DataFrame(nx.shortest_path_length(self.graph))

        pos = dict()
        for k, v in bus_distance_matrix_df.loc[swing_bus].sort_values().to_dict().items():
            pos[k] = (v, y_values[k])

        self.positions = pos
项目:cnn-graph-classification    作者:giannisnik    | 项目源码 | 文件源码
def sp_kernel(g1, g2=None):
    if g2 != None:
        graphs = []
        for g in g1:
            graphs.append(g)
        for g in g2:
            graphs.append(g)
    else:
        graphs = g1

    sp_lengths = []

    for graph in graphs:
        sp_lengths.append(nx.shortest_path_length(graph))

    N = len(graphs)
    all_paths = {}
    sp_counts = {}
    for i in range(N):
        sp_counts[i] = {}
        nodes = graphs[i].nodes()
        for v1 in nodes:
            for v2 in nodes:
                if v2 in sp_lengths[i][v1]:
                    label = sp_lengths[i][v1][v2]
                    if label in sp_counts[i]:
                        sp_counts[i][label] += 1
                    else:
                        sp_counts[i][label] = 1

                    if label not in all_paths:
                        all_paths[label] = len(all_paths)

    phi = lil_matrix((N,len(all_paths)))

    for i in range(N):
        for label in sp_counts[i]:
            phi[i,all_paths[label]] = sp_counts[i][label]

    if g2 != None:
        K = np.dot(phi[:len(g1),:],phi[len(g1):,:].T)
    else:
        K = np.dot(phi,phi.T)

    K = np.asarray(K.todense())

    return K
项目:mapmatching    作者:simonscheider    | 项目源码 | 文件源码
def getNetworkTransP(s1, s2, graph, endpoints, segmentlengths, pathnodes, decayconstantNet):
    """
    Returns transition probability of going from segment s1 to s2, based on network distance of segments, as well as corresponding path
    """
    subpath = []
    s1_point = None
    s2_point = None

    if s1 == s2:
        dist = 0
    else:
        #Obtain edges (tuples of endpoints) for segment identifiers
        s1_edge = endpoints[s1]
        s2_edge = endpoints[s2]

        s1_l = segmentlengths[s1]
        s2_l = segmentlengths[s2]

        #This determines segment endpoints of the two segments that are closest to each other
        minpair = [0,0,100000]
        for i in range(0,2):
            for j in range(0,2):
                d = round(pointdistance(s1_edge[i],s2_edge[j]),2)
                if d<minpair[2]:
                    minpair = [i,j,d]
        s1_point = s1_edge[minpair[0]]
        s2_point = s2_edge[minpair[1]]

##        if (s1_point in pathnodes or s2_point in pathnodes): # Avoid paths reusing an old pathnode (to prevent self-crossings)
##            dist = 100
##        else:
        if s1_point == s2_point:
                #If segments are touching, use a small network distance
                    dist = 5
        else:
                try:
                    #Compute a shortest path (using segment length) on graph where segment endpoints are nodes and segments are (undirected) edges
                    if graph.has_node(s1_point) and graph.has_node(s2_point):
                        dist = nx.shortest_path_length(graph, s1_point, s2_point, weight='length')
                        path = nx.shortest_path(graph, s1_point, s2_point, weight='length')
                        #get path edges
                        path_edges = zip(path,path[1:])
                        #print "edges: "+str(path_edges)
                        subpath = []
                        # get object ids for path edges
                        for e in path_edges:
                            oid = graph.edge[e[0]][e[1]]["OBJECTID"]
                            subpath.append(oid)
                        #print "oid path:"+str(subpath)
                    else:
                        #print "node not in segment graph!"
                        dist = 3*decayconstantNet #600
                except nx.NetworkXNoPath:
                    #print 'no path available, assume a large distance'
                    dist = 3*decayconstantNet #700
    #print "network distance between "+str(s1) + ' and '+ str(s2) + ' = '+str(dist)
    return (getNDProbability(dist,decayconstantNet),subpath,s2_point)
项目:mapmatching    作者:simonscheider    | 项目源码 | 文件源码
def getNetworkTransP(s1, s2, graph, endpoints, segmentlengths, pathnodes, decayconstantNet):
    """
    Returns transition probability of going from segment s1 to s2, based on network distance of segments, as well as corresponding path
    """
    subpath = []
    s1_point = None
    s2_point = None

    if s1 == s2:
        dist = 0
    else:
        #Obtain edges (tuples of endpoints) for segment identifiers
        s1_edge = endpoints[s1]
        s2_edge = endpoints[s2]

        s1_l = segmentlengths[s1]
        s2_l = segmentlengths[s2]

        #This determines segment endpoints of the two segments that are closest to each other
        minpair = [0,0,100000]
        for i in range(0,2):
            for j in range(0,2):
                d = round(pointdistance(s1_edge[i],s2_edge[j]),2)
                if d<minpair[2]:
                    minpair = [i,j,d]
        s1_point = s1_edge[minpair[0]]
        s2_point = s2_edge[minpair[1]]

##        if (s1_point in pathnodes or s2_point in pathnodes): # Avoid paths reusing an old pathnode (to prevent self-crossings)
##            dist = 100
##        else:
        if s1_point == s2_point:
                #If segments are touching, use a small network distance
                    dist = 5
        else:
                try:
                    #Compute a shortest path (using segment length) on graph where segment endpoints are nodes and segments are (undirected) edges
                    if graph.has_node(s1_point) and graph.has_node(s2_point):
                        dist = nx.shortest_path_length(graph, s1_point, s2_point, weight='length')
                        path = nx.shortest_path(graph, s1_point, s2_point, weight='length')
                        #get path edges
                        path_edges = zip(path,path[1:])
                        #print "edges: "+str(path_edges)
                        subpath = []
                        # get object ids for path edges
                        for e in path_edges:
                            oid = graph.edge[e[0]][e[1]]["OBJECTID"]
                            subpath.append(oid)
                        #print "oid path:"+str(subpath)
                    else:
                        #print "node not in segment graph!"
                        dist = 3*decayconstantNet #600
                except nx.NetworkXNoPath:
                    #print 'no path available, assume a large distance'
                    dist = 3*decayconstantNet #700
    #print "network distance between "+str(s1) + ' and '+ str(s2) + ' = '+str(dist)
    return (getNDProbability(dist,decayconstantNet),subpath,s2_point)
项目:PyMaid    作者:schlegelp    | 项目源码 | 文件源码
def dist_between(x, a, b):
    """ Returns the geodesic distance between nodes in nanometers.

    Parameters
    ----------
    x :             {CatmaidNeuron, CatmaidNeuronList}
                    Neuron containing the nodes
    a, b :          treenode IDs
                    Treenodes to check.

    Returns
    -------
    int
                    distance in nm

    See Also
    --------
    :func:`~pymaid.distal_to`
        Check if a node A is distal to node B.
    :func:`~pymaid.geodesic_matrix`
        Get all-by-all geodesic distance matrix.

    """

    if isinstance( x, core.CatmaidNeuronList ):
        if len(x) == 1:
            x = x[0]
        else:
            raise ValueError('Need a single CatmaidNeuron')
    elif isinstance( x, core.CatmaidNeuron ):
        pass
    else:
        raise ValueError('Unable to process data of type {0}'.format(type(x)))

    try:
        _ = int(a)
        _ = int(b)
    except:
        raise ValueError('a, b need to be treenode IDs')  


    return int( nx.algorithms.shortest_path_length( g.to_undirected(as_view=True), 
                                                    a, b, 
                                                    weight='weight') )
项目:TextAsGraphClassification    作者:NightmareNyx    | 项目源码 | 文件源码
def sp_kernel(g1, g2=None):
    if g2 is not None:
        graphs = []
        for g in g1:
            graphs.append(g)
        for g in g2:
            graphs.append(g)
    else:
        graphs = g1

    sp_lengths = []

    for graph in graphs:
        sp_lengths.append(nx.shortest_path_length(graph))

    N = len(graphs)
    all_paths = {}
    sp_counts = {}
    for i in range(N):
        sp_counts[i] = {}
        nodes = graphs[i].nodes()
        for v1 in nodes:
            for v2 in nodes:
                if v2 in sp_lengths[i][v1]:
                    label = sp_lengths[i][v1][v2]
                    if label in sp_counts[i]:
                        sp_counts[i][label] += 1
                    else:
                        sp_counts[i][label] = 1

                    if label not in all_paths:
                        all_paths[label] = len(all_paths)

    phi = np.zeros((N, len(all_paths)))

    for i in range(N):
        for label in sp_counts[i]:
            phi[i, all_paths[label]] = sp_counts[i][label]

    if g2 is not None:
        K = np.dot(phi[:len(g1), :], phi[len(g1):, :].T)
    else:
        K = np.dot(phi, phi.T)

    return K
项目:TextAsGraphClassification    作者:NightmareNyx    | 项目源码 | 文件源码
def sp_kernel(g1, g2=None):
    with Timer("SP kernel"):
        if g2 is not None:
            graphs = []
            for g in g1:
                graphs.append(g)
            for g in g2:
                graphs.append(g)
        else:
            graphs = g1

        sp_lengths = []

        for graph in graphs:
            sp_lengths.append(nx.shortest_path_length(graph))

        N = len(graphs)
        all_paths = {}
        sp_counts = {}
        for i in range(N):
            sp_counts[i] = {}
            nodes = graphs[i].nodes()
            for v1 in nodes:
                for v2 in nodes:
                    if v2 in sp_lengths[i][v1]:
                        label = tuple(
                            sorted([graphs[i].node[v1]['label'], graphs[i].node[v2]['label']]) + [
                                sp_lengths[i][v1][v2]])
                        if label in sp_counts[i]:
                            sp_counts[i][label] += 1
                        else:
                            sp_counts[i][label] = 1

                        if label not in all_paths:
                            all_paths[label] = len(all_paths)

        phi = lil_matrix((N, len(all_paths)))

        for i in range(N):
            for label in sp_counts[i]:
                phi[i, all_paths[label]] = sp_counts[i][label]

        if g2 is not None:
            K = np.dot(phi[:len(g1), :], phi[len(g1):, :].T)
        else:
            K = np.dot(phi, phi.T)

    return K.todense()