diff --git a/graph/.gitignore b/graph/.gitignore new file mode 100644 index 00000000..86e0d240 --- /dev/null +++ b/graph/.gitignore @@ -0,0 +1 @@ +test.out \ No newline at end of file diff --git a/graph/.travis.yml b/graph/.travis.yml new file mode 100644 index 00000000..9cfc4e4b --- /dev/null +++ b/graph/.travis.yml @@ -0,0 +1,24 @@ +sudo: false + +language: go + +# Versions of go that are explicitly supported by gonum. +go: + - 1.5.4 + - 1.6.3 + - 1.7.3 + +# Required for coverage. +before_install: + - go get golang.org/x/tools/cmd/cover + - go get github.com/mattn/goveralls + +# Get deps, build, test, and ensure the code is gofmt'ed. +# If we are building as gonum, then we have access to the coveralls api key, so we can run coverage as well. +script: + - go get -d -t -v ./... + - go build -v ./... + - go test -v -a ./... + - go test -v -a -tags appengine ./... + - test -z "$(gofmt -d .)" + - if [[ $TRAVIS_SECURE_ENV_VARS = "true" ]]; then bash ./.travis/test-coverage.sh; fi diff --git a/graph/.travis/test-coverage.sh b/graph/.travis/test-coverage.sh new file mode 100755 index 00000000..7df8aa6a --- /dev/null +++ b/graph/.travis/test-coverage.sh @@ -0,0 +1,35 @@ +#!/bin/bash + +PROFILE_OUT=$PWD/profile.out +ACC_OUT=$PWD/acc.out + +testCover() { + # set the return value to 0 (succesful) + retval=0 + # get the directory to check from the parameter. Default to '.' + d=${1:-.} + # skip if there are no Go files here + ls $d/*.go &> /dev/null || return $retval + # switch to the directory to check + pushd $d > /dev/null + # create the coverage profile + coverageresult=`go test -v -coverprofile=$PROFILE_OUT` + # output the result so we can check the shell output + echo ${coverageresult} + # append the results to acc.out if coverage didn't fail, else set the retval to 1 (failed) + ( [[ ${coverageresult} == *FAIL* ]] && retval=1 ) || ( [ -f $PROFILE_OUT ] && grep -v "mode: set" $PROFILE_OUT >> $ACC_OUT ) + # return to our working dir + popd > /dev/null + # return our return value + return $retval +} + +# Init acc.out +echo "mode: set" > $ACC_OUT + +# Run test coverage on all directories containing go files +find . -maxdepth 10 -type d | while read d; do testCover $d || exit; done + +# Upload the coverage profile to coveralls.io +[ -n "$COVERALLS_TOKEN" ] && goveralls -coverprofile=$ACC_OUT -service=travis-ci -repotoken $COVERALLS_TOKEN + diff --git a/graph/README.md b/graph/README.md new file mode 100644 index 00000000..469a8f60 --- /dev/null +++ b/graph/README.md @@ -0,0 +1,15 @@ +# Gonum Graph [![Build Status](https://travis-ci.org/gonum/graph.svg?branch=master)](https://travis-ci.org/gonum/graph) [![Coverage Status](https://coveralls.io/repos/gonum/graph/badge.svg?branch=master&service=github)](https://coveralls.io/github/gonum/graph?branch=master) [![GoDoc](https://godoc.org/github.com/gonum/graph?status.svg)](https://godoc.org/github.com/gonum/graph) + +This is a generalized graph package for the Go language. It aims to provide a clean, transparent API for common algorithms on arbitrary graphs such as finding the graph's strongly connected components, dominators, or searces. + +The package is currently in testing, and the API is "semi-stable". The signatures of any functions like AStar are unlikely to change much, but the Graph, Node, and Edge interfaces may change a bit. + +## Issues + +If you find any bugs, feel free to file an issue on the github issue tracker. Discussions on API changes, added features, code review, or similar requests are preferred on the Gonum-dev Google Group. + +https://groups.google.com/forum/#!forum/gonum-dev + +## License + +Please see github.com/gonum/license for general license information, contributors, authors, etc on the Gonum suite of packages. diff --git a/graph/community/bisect.go b/graph/community/bisect.go new file mode 100644 index 00000000..7cd1dbd7 --- /dev/null +++ b/graph/community/bisect.go @@ -0,0 +1,248 @@ +// Copyright ©2016 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package community + +import ( + "errors" + "fmt" + "math" + "math/rand" + + "github.com/gonum/graph" +) + +// Interval is an interval of resolutions with a common score. +type Interval struct { + // Low and High delimit the interval + // such that the interval is [low, high). + Low, High float64 + + // Score is the score of the interval. + Score float64 + + // Reduced is the best scoring + // community membership found for the + // interval. + Reduced +} + +// Reduced is a graph reduction. +type Reduced interface { + // Communities returns the community + // structure of the reduction. + Communities() [][]graph.Node +} + +// Size is a score function that is the reciprocal of the number of communities. +func Size(g ReducedGraph) float64 { return 1 / float64(len(g.Structure())) } + +// Weight is a score function that is the sum of community weights. The concrete +// type of g must be a pointer to a ReducedUndirected or a ReducedDirected, otherwise +// Weight will panic. +func Weight(g ReducedGraph) float64 { + var w float64 + switch g := g.(type) { + case *ReducedUndirected: + for _, n := range g.nodes { + w += n.weight + } + case *ReducedDirected: + for _, n := range g.nodes { + w += n.weight + } + default: + panic(fmt.Sprintf("community: invalid graph type: %T", g)) + } + return w +} + +// ModularScore returns a modularized scoring function for Profile based on the +// graph g and the given score function. The effort parameter determines how +// many attempts will be made to get an improved score for any given resolution. +func ModularScore(g graph.Graph, score func(ReducedGraph) float64, effort int, src *rand.Rand) func(float64) (float64, Reduced) { + return func(resolution float64) (float64, Reduced) { + max := math.Inf(-1) + var best Reduced + for i := 0; i < effort; i++ { + r := Modularize(g, resolution, src) + s := score(r) + if s > max { + max = s + best = r + } + } + return max, best + } +} + +// SizeMultiplex is a score function that is the reciprocal of the number of communities. +func SizeMultiplex(g ReducedMultiplex) float64 { return 1 / float64(len(g.Structure())) } + +// WeightMultiplex is a score function that is the sum of community weights. The concrete +// type of g must be pointer to a ReducedUndirectedMultiplex or a ReducedDirectedMultiplex, +// otherwise WeightMultiplex will panic. +func WeightMultiplex(g ReducedMultiplex) float64 { + var w float64 + switch g := g.(type) { + case *ReducedUndirectedMultiplex: + for _, n := range g.nodes { + for _, lw := range n.weights { + w += lw + } + } + case *ReducedDirectedMultiplex: + for _, n := range g.nodes { + for _, lw := range n.weights { + w += lw + } + } + default: + panic(fmt.Sprintf("community: invalid graph type: %T", g)) + } + return w +} + +// ModularMultiplexScore returns a modularized scoring function for Profile based +// on the graph g and the given score function. The effort parameter determines how +// many attempts will be made to get an improved score for any given resolution. +func ModularMultiplexScore(g Multiplex, weights []float64, all bool, score func(ReducedMultiplex) float64, effort int, src *rand.Rand) func(float64) (float64, Reduced) { + return func(resolution float64) (float64, Reduced) { + max := math.Inf(-1) + var best Reduced + for i := 0; i < effort; i++ { + r := ModularizeMultiplex(g, weights, []float64{resolution}, all, src) + s := score(r) + if s > max { + max = s + best = r + } + } + return max, best + } +} + +// Profile returns an approximate profile of score values in the resolution domain [low,high) +// at the given granularity. The score is calculated by bisecting calls to fn. If log is true, +// log space bisection is used, otherwise bisection is linear. The function fn should be +// monotonically decreasing in at least 1/grain evaluations. Profile will attempt to detect +// non-monotonicity during the bisection. +// +// Since exact modularity optimization is known to be NP-hard and Profile calls modularization +// routines repeatedly, it is unlikely to return the exact resolution profile. +func Profile(fn func(float64) (float64, Reduced), log bool, grain, low, high float64) (profile []Interval, err error) { + if low >= high { + return nil, errors.New("community: zero or negative width domain") + } + + defer func() { + r := recover() + e, ok := r.(nonDecreasing) + if ok { + err = e + return + } + if r != nil { + panic(r) + } + }() + left, comm := fn(low) + right, _ := fn(high) + for i := 1; i < int(1/grain); i++ { + rt, _ := fn(high) + right = math.Max(right, rt) + } + profile = bisect(fn, log, grain, low, left, high, right, comm) + + // We may have missed some non-monotonicity, + // so merge low score discordant domains into + // their lower resolution neighbours. + return fixUp(profile), nil +} + +type nonDecreasing int + +func (n nonDecreasing) Error() string { + return fmt.Sprintf("community: profile does not reliably monotonically decrease: tried %d times", n) +} + +func bisect(fn func(float64) (float64, Reduced), log bool, grain, low, scoreLow, high, scoreHigh float64, comm Reduced) []Interval { + if low >= high { + panic("community: zero or negative width domain") + } + if math.IsNaN(scoreLow) || math.IsNaN(scoreHigh) { + return nil + } + + // Heuristically determine a reasonable number + // of times to try to get a higher value. + maxIter := int(1 / grain) + + lowComm := comm + for n := 0; scoreLow < scoreHigh; n++ { + if n > maxIter { + panic(nonDecreasing(n)) + } + scoreLow, lowComm = fn(low) + } + + if scoreLow == scoreHigh || tooSmall(low, high, grain, log) { + return []Interval{{Low: low, High: high, Score: scoreLow, Reduced: lowComm}} + } + + var mid float64 + if log { + mid = math.Sqrt(low * high) + } else { + mid = (low + high) / 2 + } + + scoreMid := math.Inf(-1) + var midComm Reduced + for n := 0; scoreMid < scoreHigh; n++ { + if n > maxIter { + panic(nonDecreasing(n)) + } + scoreMid, midComm = fn(mid) + } + + lower := bisect(fn, log, grain, low, scoreLow, mid, scoreMid, lowComm) + higher := bisect(fn, log, grain, mid, scoreMid, high, scoreHigh, midComm) + for n := 0; lower[len(lower)-1].Score < higher[0].Score; n++ { + if n > maxIter { + panic(nonDecreasing(n)) + } + lower[len(lower)-1].Score, lower[len(lower)-1].Reduced = fn(low) + } + + if lower[len(lower)-1].Score == higher[0].Score { + higher[0].Low = lower[len(lower)-1].Low + lower = lower[:len(lower)-1] + if len(lower) == 0 { + return higher + } + } + return append(lower, higher...) +} + +// fixUp non-monotonically decreasing interval scores. +func fixUp(profile []Interval) []Interval { + max := profile[len(profile)-1].Score + for i := len(profile) - 2; i >= 0; i-- { + if profile[i].Score > max { + max = profile[i].Score + continue + } + profile[i+1].Low = profile[i].Low + profile = append(profile[:i], profile[i+1:]...) + } + return profile +} + +func tooSmall(low, high, grain float64, log bool) bool { + if log { + return math.Log(high/low) < grain + } + return high-low < grain +} diff --git a/graph/community/bisect_test.go b/graph/community/bisect_test.go new file mode 100644 index 00000000..6f12a62a --- /dev/null +++ b/graph/community/bisect_test.go @@ -0,0 +1,269 @@ +// Copyright ©2016 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package community + +import ( + "fmt" + "log" + "sort" + "testing" + + "github.com/gonum/graph/internal/ordered" + + "github.com/gonum/graph/simple" +) + +func ExampleProfile_simple() { + // Create dumbell graph: + // + // 0 4 + // |\ /| + // | 2 - 3 | + // |/ \| + // 1 5 + // + g := simple.NewUndirectedGraph(0, 0) + for u, e := range smallDumbell { + for v := range e { + g.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v), W: 1}) + } + } + + // Get the profile of internal node weight for resolutions + // between 0.1 and 10 using logarithmic bisection. + p, err := Profile(ModularScore(g, Weight, 10, nil), true, 1e-3, 0.1, 10) + if err != nil { + log.Fatal(err) + } + + // Print out each step with communities ordered. + for _, d := range p { + comm := d.Communities() + for _, c := range comm { + sort.Sort(ordered.ByID(c)) + } + sort.Sort(ordered.BySliceIDs(comm)) + fmt.Printf("Low:%.2v High:%.2v Score:%v Communities:%v Q=%.3v\n", + d.Low, d.High, d.Score, comm, Q(g, comm, d.Low)) + } + + // Output: + // Low:0.1 High:0.29 Score:14 Communities:[[0 1 2 3 4 5]] Q=0.9 + // Low:0.29 High:2.3 Score:12 Communities:[[0 1 2] [3 4 5]] Q=0.714 + // Low:2.3 High:3.5 Score:4 Communities:[[0 1] [2] [3] [4 5]] Q=-0.31 + // Low:3.5 High:10 Score:0 Communities:[[0] [1] [2] [3] [4] [5]] Q=-0.607 +} + +var friends, enemies *simple.UndirectedGraph + +func init() { + friends = simple.NewUndirectedGraph(0, 0) + for u, e := range middleEast.friends { + // Ensure unconnected nodes are included. + if !friends.Has(simple.Node(u)) { + friends.AddNode(simple.Node(u)) + } + for v := range e { + friends.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v), W: 1}) + } + } + enemies = simple.NewUndirectedGraph(0, 0) + for u, e := range middleEast.enemies { + // Ensure unconnected nodes are included. + if !enemies.Has(simple.Node(u)) { + enemies.AddNode(simple.Node(u)) + } + for v := range e { + enemies.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v), W: -1}) + } + } +} + +func ExampleProfile_multiplex() { + // The undirected graphs, friends and enemies, are the political relationships + // in the Middle East as described in the Slate article: + // http://www.slate.com/blogs/the_world_/2014/07/17/the_middle_east_friendship_chart.html + g, err := NewUndirectedLayers(friends, enemies) + if err != nil { + log.Fatal(err) + } + weights := []float64{1, -1} + + // Get the profile of internal node weight for resolutions + // between 0.1 and 10 using logarithmic bisection. + p, err := Profile(ModularMultiplexScore(g, weights, true, WeightMultiplex, 10, nil), true, 1e-3, 0.1, 10) + if err != nil { + log.Fatal(err) + } + + // Print out each step with communities ordered. + for _, d := range p { + comm := d.Communities() + for _, c := range comm { + sort.Sort(ordered.ByID(c)) + } + sort.Sort(ordered.BySliceIDs(comm)) + fmt.Printf("Low:%.2v High:%.2v Score:%v Communities:%v Q=%.3v\n", + d.Low, d.High, d.Score, comm, QMultiplex(g, comm, weights, []float64{d.Low})) + } + + // Output: + // Low:0.1 High:0.72 Score:26 Communities:[[0] [1 7 9 12] [2 8 11] [3 4 5 10] [6]] Q=[24.7 1.97] + // Low:0.72 High:1.1 Score:24 Communities:[[0 6] [1 7 9 12] [2 8 11] [3 4 5 10]] Q=[16.9 14.1] + // Low:1.1 High:1.2 Score:18 Communities:[[0 2 6 11] [1 7 9 12] [3 4 5 8 10]] Q=[9.16 25.1] + // Low:1.2 High:1.6 Score:10 Communities:[[0 3 4 5 6 10] [1 7 9 12] [2 8 11]] Q=[11.4 24.1] + // Low:1.6 High:1.6 Score:8 Communities:[[0 1 6 7 9 12] [2 8 11] [3 4 5 10]] Q=[5.56 39.8] + // Low:1.6 High:1.8 Score:2 Communities:[[0 2 3 4 5 6 10] [1 7 8 9 11 12]] Q=[-1.82 48.6] + // Low:1.8 High:2.3 Score:-6 Communities:[[0 2 3 4 5 6 8 10 11] [1 7 9 12]] Q=[-5 57.5] + // Low:2.3 High:2.4 Score:-10 Communities:[[0 1 2 6 7 8 9 11 12] [3 4 5 10]] Q=[-11.2 79] + // Low:2.4 High:4.3 Score:-52 Communities:[[0 1 2 3 4 5 6 7 8 9 10 11 12]] Q=[-46.1 117] + // Low:4.3 High:10 Score:-54 Communities:[[0 1 2 3 4 6 7 8 9 10 11 12] [5]] Q=[-82 254] +} + +func TestProfileUndirected(t *testing.T) { + for _, test := range communityUndirectedQTests { + g := simple.NewUndirectedGraph(0, 0) + for u, e := range test.g { + // Add nodes that are not defined by an edge. + if !g.Has(simple.Node(u)) { + g.AddNode(simple.Node(u)) + } + for v := range e { + g.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v), W: 1}) + } + } + + fn := ModularScore(g, Weight, 10, nil) + p, err := Profile(fn, true, 1e-3, 0.1, 10) + if err != nil { + t.Errorf("%s: unexpected error: %v", test.name, err) + } + + const tries = 1000 + for i, d := range p { + var score float64 + for i := 0; i < tries; i++ { + score, _ = fn(d.Low) + if score >= d.Score { + break + } + } + if score < d.Score { + t.Errorf("%s: failed to recover low end score: got: %v want: %v", test.name, score, d.Score) + } + if i != 0 && d.Score >= p[i-1].Score { + t.Errorf("%s: not monotonically decreasing: ", test.name, p[i-1], d) + } + } + } +} + +func TestProfileDirected(t *testing.T) { + for _, test := range communityDirectedQTests { + g := simple.NewDirectedGraph(0, 0) + for u, e := range test.g { + // Add nodes that are not defined by an edge. + if !g.Has(simple.Node(u)) { + g.AddNode(simple.Node(u)) + } + for v := range e { + g.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v), W: 1}) + } + } + + fn := ModularScore(g, Weight, 10, nil) + p, err := Profile(fn, true, 1e-3, 0.1, 10) + if err != nil { + t.Errorf("%s: unexpected error: %v", test.name, err) + } + + const tries = 1000 + for i, d := range p { + var score float64 + for i := 0; i < tries; i++ { + score, _ = fn(d.Low) + if score >= d.Score { + break + } + } + if score < d.Score { + t.Errorf("%s: failed to recover low end score: got: %v want: %v", test.name, score, d.Score) + } + if i != 0 && d.Score >= p[i-1].Score { + t.Errorf("%s: not monotonically decreasing: ", test.name, p[i-1], d) + } + } + } +} + +func TestProfileUndirectedMultiplex(t *testing.T) { + for _, test := range communityUndirectedMultiplexQTests { + g, weights, err := undirectedMultiplexFrom(test.layers) + if err != nil { + t.Errorf("unexpected error creating multiplex: %v", err) + continue + } + + const all = true + + fn := ModularMultiplexScore(g, weights, all, WeightMultiplex, 10, nil) + p, err := Profile(fn, true, 1e-3, 0.1, 10) + if err != nil { + t.Errorf("%s: unexpected error: %v", test.name, err) + } + + const tries = 1000 + for i, d := range p { + var score float64 + for i := 0; i < tries; i++ { + score, _ = fn(d.Low) + if score >= d.Score { + break + } + } + if score < d.Score { + t.Errorf("%s: failed to recover low end score: got: %v want: %v", test.name, score, d.Score) + } + if i != 0 && d.Score >= p[i-1].Score { + t.Errorf("%s: not monotonically decreasing: ", test.name, p[i-1], d) + } + } + } +} + +func TestProfileDirectedMultiplex(t *testing.T) { + for _, test := range communityDirectedMultiplexQTests { + g, weights, err := directedMultiplexFrom(test.layers) + if err != nil { + t.Errorf("unexpected error creating multiplex: %v", err) + continue + } + + const all = true + + fn := ModularMultiplexScore(g, weights, all, WeightMultiplex, 10, nil) + p, err := Profile(fn, true, 1e-3, 0.1, 10) + if err != nil { + t.Errorf("%s: unexpected error: %v", test.name, err) + } + + const tries = 1000 + for i, d := range p { + var score float64 + for i := 0; i < tries; i++ { + score, _ = fn(d.Low) + if score >= d.Score { + break + } + } + if score < d.Score { + t.Errorf("%s: failed to recover low end score: got: %v want: %v", test.name, score, d.Score) + } + if i != 0 && d.Score >= p[i-1].Score { + t.Errorf("%s: not monotonically decreasing: ", test.name, p[i-1], d) + } + } + } +} diff --git a/graph/community/louvain.tex b/graph/community/louvain.tex new file mode 100644 index 00000000..046c02ba --- /dev/null +++ b/graph/community/louvain.tex @@ -0,0 +1,466 @@ +% Copyright ©2015 The gonum Authors. All rights reserved. +% Use of this source code is governed by a BSD-style +% license that can be found in the LICENSE file. + +\documentclass{article} + +\usepackage{amsmath,amsfonts} +\usepackage[margin=4cm]{geometry} + +\title{Louvain algorithm for undirected and directed graphs} +\author{The {\tt gonum} Authors} + +\begin{document} + +\maketitle + +The algorithm attempts to find communities (highly connected sub-graphs), +and it does this by minimising the modularity function +\begin{equation} + Q(c) = \frac{1}{2m}\sum_i\sum_j\left[ A_{ij} - \gamma \frac{k_ik_j}{2m} \right] \delta_{ij}(c), +\end{equation} +where $c$ is a partition of nodes into subsets or communities, +$A_{ij}$ is the edge weight between nodes $i$ and $j$, +$\gamma$ is a tuning parameter, +\begin{equation} +m = \frac{1}{2}\sum_i\sum_jA_{ij}, +\end{equation} +\begin{equation} +k_i = \sum_j{A_{ij}}, +\end{equation} +and +\begin{equation} + \delta_{ij}(c) = \left \{ \begin{array}{ll} + 1 & \text{if} \quad c(i) = c(j) \\ + 0 & \text{otherwise} \end{array} \right .. +\end{equation} +Here $c(i)$ denotes the community to which node $i$ belongs +in the partitioning $c$. + +The algorithm finds a hierarchical community structure by iterating +between two phases: +\begin{enumerate} + \item Find a set of communities that minimise $Q$. + \item Construct a new graph, whose nodes are the communities + found in the preceding phase one step. +\end{enumerate} +Each iteration of these two phases is called a `pass'. +In this way, the algorithm obtains a nested community structure, +where at each level $Q$ is minimised for the relevant graph. +We consider this process in more detail, in particular looking +at phase one first in the first pass, when each node is a single +node, and then how this generalises to later passes when each node +is a community. + +\section{Undirected Graphs} + +\subsection{Initial Pass} +\label{sec:initialPass} + +The initial pass is simple as the initial pass uses the original graph, +and in all following passes graphs constructed in the previous pass's +phase two are used. +Here we will consider this initial simple formulation for phase one, and +in Section~\ref{sec:laterPasses} we consider how this generalises for +passes two and onwards. +Phase one works by initially allocating each node to a separate community, +and then iterating through each node $a$ and checking if moving it into +a different community $\beta$ will reduce $Q$. +If there are possible moves that will reduce $Q$, $a$ is moved into the +the community which will generate the largest reduction in $Q$. +This process is continued until there are no moves left to reduce $Q$ +further, meaning a local minimum for $Q$ has been achieved. +Then the algorithm moves to phase two (constructing a new graph where +each node in the new graph is a community in the old graph). + +Note that we assume the original graph to be simple and undirected. +First, we introduce some notation that will be useful: +Let $c(i)$ denote the community to which node $i$ belongs, +and let $\alpha$ be the community that the node $a$ mentioned above +belongs to, i.e., $\alpha = c_a$. +Then we define +\newcommand{\Stot}[1]{\Sigma_{\text{tot}}^{#1}} +\begin{equation} + \Stot{\alpha} = \sum_{i \in \alpha}\sum_{j}A_{ij} = \sum_{i \in \alpha}k_i, +\end{equation} +\newcommand{\kin}[2]{k_{#1}^{#2}} +\begin{equation} + \kin{i}{\alpha} = \sum_{j \in \alpha}A_{ij}, +\end{equation} +and +\newcommand{\Sin}[1]{\Sigma_{\text{in}}^{#1}} +\begin{equation} + \Sin{\alpha} = \sum_{i \in \alpha}\sum_{j \in \alpha}A_{ij} = \sum_{i \in \alpha}\kin{i}{\alpha}. +\end{equation} + +We are interested in how $Q$ will change if we move a node $a$ from its +current community $\alpha$, to a new community $\beta$. +This will have two effects, it will remove the terms from $Q$ +related to $a$ in $\alpha$, which we will call $Q^-$ and it will add terms +related to $a$ in $\beta$, which we will call $Q^+$. +The total change in $Q$ caused by the movement of $a$ from $\alpha$ to $\beta$ is +\begin{equation} + \Delta Q = Q^{+} - Q^{-}, +\end{equation} +where +\begin{align*} +Q^- &= \frac{1}{2m}\left[ \left( A_{aa} - \gamma \frac{k_a^2}{2m} \right) ++ 2\sum_{i \in \alpha, \, i \neq a} \left( A_{ia} - \gamma \frac{k_ik_a}{2m} \right) \right] \\ + &= \frac{1}{2m}\left[ \left( A_{aa} - \gamma \frac{k_a^2}{2m} \right) ++ 2 \left( \kin{a}{\alpha} -A_{aa}\right) - \gamma \frac{2k_a}{2m}\sum_{i \in \alpha, \, i \neq a} k_i \right] \\ + &= \frac{1}{2m}\left[ \left( A_{aa} - \gamma \frac{k_a^2}{2m} \right) ++ 2 \left( \kin{a}{\alpha} -A_{aa}\right) - \gamma \frac{2k_a}{2m}\left( \Stot{\alpha} - k_a \right) \right], \\ +\end{align*} +and +\begin{align*} +Q^+ &= \frac{1}{2m}\left[ \left( A_{aa} - \gamma \frac{k_a^2}{2m} \right) ++ 2\sum_{i \in \beta} \left( A_{ia} - \gamma \frac{k_ik_a}{2m} \right) \right] \\ + &= \frac{1}{2m}\left[ \left( A_{aa} - \gamma \frac{k_a^2}{2m} \right) ++ 2\kin{a}{\beta} - \gamma \frac{2k_a}{2m}\sum_{i \in \beta} k_i \right] \\ + &= \frac{1}{2m}\left[ \left( A_{aa} - \gamma \frac{k_a^2}{2m} \right) ++ 2\kin{a}{\beta} - \gamma \frac{2k_a\Stot{\beta}}{2m} \right]. \\ +\end{align*} +The first term in both these expressions ($Q^-$ and $Q^+$) is the same, and so cancels: +\begin{equation} +\Delta Q = \frac{1}{2m}\left[ \left( 2\kin{a}{\beta} - \gamma \frac{2k_a\Stot{\beta}}{2m} \right) + - \left( 2 \left( \kin{a}{\alpha} -A_{aa}\right) - \gamma \frac{2k_a}{2m}\left( \Stot{\alpha} - k_a \right) \right) \right]. +\end{equation} + +\subsection{Later Passes} +\label{sec:laterPasses} + +In phase two a `meta-graph' is constructed where nodes correspond to +the communities found in the preceding phase one step, and edge weight +between two such communities (nodes, in the meta-graph) +$\alpha$ and $\beta$ are defined to be +\begin{equation} + A_{\alpha \beta}^* = \sum_{i \in \alpha}\sum_{j \in \beta}A_{ij}. + \label{eqn:Aij*} +\end{equation} +Note that $i$ and $j$ refer to nodes in the original graph, not nodes +in the previous graph, and so holds any meta-graph, not just the first. +Also note that this definition of $A^*_{\alpha \beta}$ allows for +$A^*_{\alpha \alpha}$ to be non-zero as +\begin{equation} +A_{\alpha \alpha}^* = \sum_{i \in \alpha}\sum_{j \in \alpha}A_{ij} = \Sin{\alpha}. +\end{equation} + +In this newly constructed graph, $\alpha$ and $\beta$ are nodes, but +also refer to communities (sets of nodes) in the original graph, and I +use these two interpretations interchangeably. +This should be the only ambiguous bit of notation in this document, I hope. + +The results of Section~\ref{sec:initialPass} generalise to these meta-graphs, +and the generalised results mirror those of Section~\ref{sec:initialPass} closely +-- I distinguish the new results from those of Section~\ref{sec:initialPass} by a +superscript $*$. +I use $i$ and $j$ to denote nodes of the original graph as in Section~\ref{sec:initialPass}, +and use $z$ and $w$ to denote nodes of the meta-graph (communities of the original). +I use analogous notation to Section~\ref{sec:initialPass}, $c^*(z)$, to +denote the community to which node $z$ of the meta-graph belongs, +and let $\mathfrak{a}$ be the community that the node $\alpha$ belongs to +($c^*(\alpha) = \mathfrak{a}$), i.e. +\begin{equation} + \mathfrak{a} = \{z | c^*(z) = c^*(\alpha) \}. +\end{equation} + +Given this notation, we can observe that +\begin{equation} +m^* = \frac{1}{2}\sum_{z}\sum_{w}{A_{zw}^*} = \frac{1}{2}\sum_{z}\sum_{w}{\sum_{i \in z}\sum_{j \in w}A_{ij}} = \frac{1}{2}\sum_i\sum_jA_{ij} = m, +\end{equation} +\begin{equation} +k_{z}^* = \sum_{w}{A_{zw}^*} = \sum_{w}{\sum_{i \in z}\sum_{j \in w}A_{ij}} = \sum_{i \in z}\sum_{j}A_{ij} = \Stot{z}, +\end{equation} +\begin{equation} + \Stot{\mathfrak{a} *} = \sum_{z \in \mathfrak{a}}\sum_{w}A_{zw}^* = \sum_{z \in \mathfrak{a}}k_z^* = \sum_{z \in \mathfrak{a}}\Stot{z}, +\end{equation} +\begin{equation} + \kin{z}{\mathfrak{a} *} = \sum_{w \in \mathfrak{a}}{A_{zw}^*} = \sum_{w \in \mathfrak{a}}{\sum_{i \in z}\sum_{j \in w}A_{ij}}, +\end{equation} +and +\begin{equation} +\Sin{\mathfrak{a} *} = \sum_{z \in \mathfrak{a}}\sum_{w \in \mathfrak{a}}A_{zw}^* = \sum_{z \in \mathfrak{a}}\kin{z}{\mathfrak{a} *} = \sum_{z \in \mathfrak{a}}\sum_{w \in \mathfrak{a}}{\sum_{i \in z}\sum_{j \in w}A_{ij}}. + %\label{eqn:Sin} +\end{equation} + +If we let $\mathfrak{b}$ denote the community to which we are considering moving $\alpha$, +then the expression for $\Delta Q$ from Section~\ref{sec:initialPass} trivially generalises to +\begin{equation} +\Delta Q = \frac{1}{2m}\left[ \left( 2 \kin{\alpha}{\mathfrak{b} *} - \gamma \frac{2k_{\alpha}^*\Stot{\mathfrak{b} *}}{2m} \right) + - \left( 2\left( \kin{\alpha}{\mathfrak{a} *} - A_{\alpha \alpha}^* \right) - \gamma \frac{2k_{\alpha}^*}{2m} \left( \Stot{\mathfrak{a} *} - k_{\alpha}^* \right ) \right) \right] \\ +\end{equation} + +\section{Directed Graphs} +\label{sec:directedGraphs} + +It is of interest to consider how this generalises to directed graphs. +If we are to treat incoming and outgoing nodes equally, there are several +thoughts on how to extend the algorithm to directed graphs, of which we +will explore three: +\begin{itemize} + \item Construct an undirected graph first, and then use the undirected case. + \item Generalise the expressions from the undirected case to the directed case, + we will consider two different suggestions for such generalisations. +\end{itemize} +We will show that one of the two `generalisation of expressions' approaches is +equivalent to constructing an undirected graph, and the other is not. + +\subsection{Construction of an undirected graph} +A simple approach to generalising to directed graphs is to construct +an undirected graph with edge weights +\begin{equation} +A_{ij} = B_{ij} + B_{ji}, +\label{eqn:undirectedAB} +\end{equation} +and simply use the undirected algorithm. +Another suggestion is to average the directed edges to make +an undirected graph, i.e. to use a directed graph with edge weights +\begin{equation} +A_{ij} = \frac{B_{ij} + B_{ji}}{2}. +\end{equation} +This raises an important question: does scaling all edge weights across +the entire graph by a constant affect the results of the algorithm? +Hopefully not, but worth checking. +We can follow this through the results for the undirected graph by +substituting $A_{ij}^{(1)} = pA_{ij}$, $p \in \mathbb{R}$, and +distinguishing the new expressions by a superscript ${(1)}$. These +new expressions are: +\begin{equation} +m^{(1)} = \frac{1}{2}\sum_i\sum_jpA_{ij} = p\frac{1}{2}\sum_i\sum_j A_{ij} = pm , +\end{equation} +\begin{equation} +k_i^{(1)} = \sum_j{pA_{ij}} = p\sum_j{A_{ij}} = pk_i, +\end{equation} +and so +\begin{align*} + Q^{(1)}(c) &= \frac{1}{2pm}\sum_i\sum_j\left[ pA_{ij} - \gamma \frac{pk_ipk_j}{2pm} \right] \delta_{ij}(c) \\ + &= \frac{1}{2m}\sum_i\sum_j\left[ A_{ij} - \gamma \frac{k_ik_j}{2m} \right] \delta_{ij}(c) \\ + &= Q(c) +\end{align*} +Note that as we have shown $Q^{(1)} = Q$ there is no need to go into the remainder of the terms +involved in the algorithm, as they all derive from $Q$. + +\subsection{First generalisation of expressions approach} + +One suggested extension to directed graphs is to modify the expressions +involved by adding the `from' case and the `to' case for each term. +If we let $B_{ij}$ be the edge weight between nodes $i$ and $j$ in +the directed graph, and distinguishing these extended expressions by +a superscript $(2)$, the extended expressions become: +\begin{equation} +m^{(2)} = \frac{1}{2}\left ( \sum_i\sum_jB_{ij} + \sum_i\sum_jB_{ji}\right) = \frac{1}{2}\sum_i\sum_j \left( B_{ij} + B_{ji} \right) , +\end{equation} +\begin{equation} +k_i^{(2)} = \sum_jB_{ij} + \sum_jB_{ji} = \sum_j{\left( B_{ij} + B_{ji} \right)}, +\end{equation} +and similarly +\begin{equation} + Q^{(2)}(c) = \frac{1}{2m}\sum_i\sum_j\left[ \left( B_{ij} + B_{ji} \right) - \gamma \frac{k_i^{(2)}k_j^{(2)}}{2m} \right] \delta_{ij}(c). +\end{equation} + +Note how this is equivalent to the construction of an undirected graph as +per Equation~(\ref{eqn:undirectedAB}). Similarly to above, +there is no need to go into the remainder of the terms +involved in the algorithm, as they all derive from $Q$. + + +\subsection{Second generalisation of expressions approach} + +Another approach to generalising the expressions to the +directed case, that still treats incoming and outgoing edges +as equally important, is to propose an alternative modularity +expression: +\newcommand{\dkin}[1]{k_{#1}^{\text{in}}} +\newcommand{\dkout}[1]{k_{#1}^{\text{out}}} +\begin{equation} +Q^{(3)}(c) = \frac{1}{2m}\sum_i\sum_j\left[ 2B_{ij} - 2\gamma \frac{\dkin{i}\dkout{j}}{2m} \right] \delta_{ij}(c), \\ +\end{equation} +where +\begin{equation} +\dkout{i} = \sum_j{B_{ij}} +\quad \quad \text{and} \quad \quad +\dkin{i} = \sum_j{B_{ji}}, +\end{equation} +so $k_i^{(2)} = \dkin{i} + \dkout{i}$. +Note I leave the factor of two in the expression for $Q^{(3)}$ so that it +remains as comparable to that for $Q^{(2)}$ as possible. +There is no need for alternative $m$, as it will still be the same as above. +$Q^{(3)}$ will differ from $Q^{(2)}$ in two ways. +Firstly, as $k_i^{(2)} = \dkin{i} + \dkout{i}$, +\begin{align*} +\sum_i\sum_j k_i^{(2)} k_j^{(2)} \delta_{ij}(c) &= \sum_i\sum_j (\dkin{i} + \dkout{i}) (\dkin{j} + \dkout{j}) \delta_{ij}(c) \\ + &= \sum_i\sum_j \left[ (\dkin{i}\dkin{j} + \dkout{i}\dkout{j}) + (\dkin{i}\dkout{j} + \dkin{j}\dkout{i}) \right] \delta_{ij}(c). \\ + &= \sum_i\sum_j \left[ (\dkin{i}\dkin{j} + \dkout{i}\dkout{j}) + 2\dkin{i}\dkout{j} \right] \delta_{ij}(c), \\ +\end{align*} +and similarly, +\begin{equation} +\sum_i\sum_j \left( B_{ij} + B_{ji} \right) \delta_{ij}(c) = 2\sum_i\sum_j B_{ij} \delta_{ij}(c). +\end{equation} +From these two expressions, we can see that +\begin{equation} +Q^{(3)} - Q^{(2)} = \frac{1}{2m}\sum_i\sum_j \gamma \frac{\dkin{i}\dkin{j} + \dkout{i}\dkout{j}}{2m} \delta_{ij}(c). +\end{equation} + + +\section{Directed Graphs in more detail} +\label{sec:directedGraphsDetail} + +In Section \ref{sec:directedGraphs} we essentially showed three +things: +\begin{itemize} + \item How an undirected graph could be constructed from a directed + graph, thereby allowing the undirected algorithm to be used for + directed graphs. + \item How scaling all edge weights by a non-zero constant would not + affect the modularity function. + \item An alternative approach to extending the algorithm to + directed graphs that is not equivalent to first reducing it + to an undirected graph. +\end{itemize} +It is this third point that we will explore here. +Analogously to Sections \ref{sec:initialPass} and \ref{sec:laterPasses} we will +break this up into the initial pass and the later passes. + +\subsection{Initial pass} +\label{sec:initialPassDirected} + +Continuing with the notation of Section \ref{sec:initialPass}, in which +$c(i)$ denotes the community to which node $i$ belongs, +and $\alpha = c(a)$, we define +\newcommand{\dinStot}[1]{\Sigma_{\text{tot}}^{\text{in}(#1)}} +\newcommand{\doutStot}[1]{\Sigma_{\text{tot}}^{\text{out}(#1)}} +\begin{equation} + \doutStot{\alpha} = \sum_{i \in \alpha}\sum_{j}B_{ij} = \sum_{i \in \alpha}\dkout{i} + \quad \quad \text{and} \quad \quad + \dinStot{\alpha} = \sum_{i \in \alpha}\sum_{j}B_{ji} = \sum_{i \in \alpha}\dkin{i}, +\end{equation} +\newcommand{\dinkin}[2]{k_{#1}^{\text{in}(#2)}} +\newcommand{\doutkin}[2]{k_{#1}^{\text{out}(#2)}} +\begin{equation} + \doutkin{i}{\alpha} = \sum_{j \in \alpha}B_{ij} + \quad \quad \text{and} \quad \quad + \dinkin{i}{\alpha} = \sum_{j \in \alpha}B_{ji}, +\end{equation} +and we will entertain one more ambiguous notation choice: +%\newcommand{\Sin}[1]{\Sigma_{\text{in}}^{#1}} +\begin{equation} + \Sin{\alpha} = \sum_{i \in \alpha}\sum_{j \in \alpha}B_{ij} = \sum_{i \in \alpha}\doutkin{i}{\alpha} = \sum_{i \in \alpha}\dinkin{i}{\alpha}. +\end{equation} + +Analogously to Section \ref{sec:initialPass}, we are interested in how +$Q^{(3)}$ will change if we move a node $a$ from its +current community $\alpha$, to a new community $\beta$, +and analogously this will have two effects -- it will remove the terms +from $Q^{(3)}$ related to $a$ in $\alpha$, which we will call $Q^{-(3)}$ +and it will add terms related to $a$ in $\beta$, which we will call $Q^{+(3)}$. +The total change in $Q^{(3)}$ caused by the movement of $a$ from $\alpha$ to $\beta$ is +\begin{equation} + \Delta Q^{(3)} = Q^{+(3)} - Q^{-(3)}, +\end{equation} +where +\begin{align*} +Q^{-(3)} &= \frac{1}{2m}\left[ \left( 2B_{aa} - 2\gamma \frac{\dkin{a}\dkout{a}}{2m} \right) ++ \sum_{i \in \alpha, \, i \neq a} \left( 2B_{ia} + 2B_{ai} - 2\gamma \frac{\dkin{i}\dkout{a}}{2m} - 2\gamma \frac{\dkin{a}\dkout{i}}{2m} \right) \right] \\ + &= \frac{1}{2m}\left[ \left( 2B_{aa} - 2\gamma \frac{\dkin{a}\dkout{a}}{2m} \right) ++ 2(\dinkin{a}{\alpha} - B_{aa}) + 2(\doutkin{a}{\alpha} - B_{aa}) \hdots \right . \\ + & \quad \quad \quad \quad \quad \quad \left . +- \frac{2\gamma\dkout{a}}{2m} (\dinStot{\alpha} - \dkin{a}) - \frac{2\gamma\dkin{a}}{2m} (\doutStot{\alpha} - \dkout{a}) \right] \\ +\end{align*} +and +\begin{align*} +Q^{+(3)} &= \frac{1}{2m}\left[ \left( 2B_{aa} - 2\gamma \frac{\dkin{a}\dkout{a}}{2m} \right) ++ \sum_{i \in \beta} \left( 2B_{ia} + 2B_{ai} - 2\gamma \frac{\dkin{i}\dkout{a}}{2m} - 2\gamma \frac{\dkin{a}\dkout{i}}{2m} \right) \right] \\ + &= \frac{1}{2m}\left[ \left( 2B_{aa} - 2\gamma \frac{\dkin{a}\dkout{a}}{2m} \right) ++ 2\dinkin{a}{\beta} + 2\doutkin{a}{\beta} - \frac{2\gamma\dkout{a}}{2m} \dinStot{\beta} - \frac{2\gamma\dkin{a}}{2m} \doutStot{\beta} \right] \\ +\end{align*} +Similarly to Section \ref{sec:initialPass}, the first term in both these expressions is the same, and so cancels, leaving: +\begin{align*} +\Delta Q^{(3)} &= \frac{2}{2m}\left[ +\left( \dinkin{a}{\beta} + \doutkin{a}{\beta} - \frac{\gamma\dkout{a}}{2m} \dinStot{\beta} - \frac{\gamma\dkin{a}}{2m} \doutStot{\beta} \right) \right. \\ +& \hspace{-1cm} +- \left. \left( (\dinkin{a}{\alpha} - B_{aa}) + (\doutkin{a}{\alpha} - B_{aa}) - \frac{\gamma\dkout{a}}{2m} (\dinStot{\alpha} - \dkin{a}) - \frac{\gamma\dkin{a}}{2m} (\doutStot{\alpha} - \dkout{a}) \right) \right] \\ + &= \frac{2}{2m}\left[ (\dinkin{a}{\beta}-\dinkin{a}{\alpha}) + (\doutkin{a}{\beta}-\doutkin{a}{\alpha}) + 2B_{aa} \right. \\ +& \hspace{-1cm} \left. +- \frac{\gamma\dkout{a}}{2m} (\dinStot{\beta}-\dinStot{\alpha}) - \frac{\gamma\dkin{a}}{2m} (\doutStot{\beta} - \doutStot{\alpha}) - \frac{2\gamma\dkin{a}\dkout{a}}{2m} \right] +\end{align*} + + + +\subsection{Later passes} +\label{sec:laterPassesDirected} + +In phase two a `meta-graph' is constructed where nodes correspond to +the communities found in the preceding phase one step, and edge weight +between two such communities (nodes, in the meta-graph) +$\alpha$ and $\beta$ are defined to be +\begin{equation} + B_{\alpha \beta}^* = \sum_{i \in \alpha}\sum_{j \in \beta}B_{ij}. + \label{eqn:Bij*} +\end{equation} +Note that $i$ and $j$ refer to nodes in the original graph, not nodes +in the previous graph, and so holds any meta-graph, not just the first. +Also note that this definition of $B^*_{\alpha \beta}$ allows for +$B^*_{\alpha \alpha}$ to be non-zero, in fact +\begin{equation} +B_{\alpha \alpha}^* = \sum_{i \in \alpha}\sum_{j \in \alpha}B_{ij} = \Sin{\alpha}. +\end{equation} + +In this newly constructed graph, $\alpha$ and $\beta$ are nodes, but +also refer to communities (sets of nodes) in the original graph, and I +use these two interpretations interchangeably, completely analogously to +Section \ref{sec:laterPasses}. + +The results of Section~\ref{sec:initialPassDirected} generalise to these meta-graphs, +and the generalised results mirror those of Section~\ref{sec:initialPassDirected} closely +-- I distinguish the new results from those of Section~\ref{sec:initialPassDirected} by a +superscript $*$. +I use $i$ and $j$ to denote nodes of the original graph as in Sections~\ref{sec:initialPass} +and \ref{sec:initialPassDirected}, +and use $z$ and $w$ to denote nodes of the meta-graph (communities of the original). +I use analogous notation to Section~\ref{sec:initialPass}, $c^*(z)$, to +denote the community to which node $z$ of the meta-graph belongs, +and let $\mathfrak{a}$ be the community that the node $\alpha$ belongs to, +i.e., $\mathfrak{a} = c^*(\alpha) $. + +Given this notation, we get all the same results as in \ref{sec:laterPasses}, but +each split into two cases `out' and `in', separating by direction, essentially, so +\newcommand{\dkinStar}[1]{k_{#1}^{\text{in} *}} +\newcommand{\dkoutStar}[1]{k_{#1}^{\text{out} *}} +\begin{equation} +\dkoutStar{z} = \sum_w{B_{zw}^*} = \sum_w\sum_{i \in z}\sum_{j \in w}B_{ij} = \sum_{i \in z}\sum_jB_{ij} = \doutStot{z}, +\end{equation} +\begin{equation} +\dkinStar{z} = \sum_w{B_{wz}^*} = \sum_w\sum_{i \in z}\sum_{j \in w}B_{ji} = \sum_{i \in z}\sum_jB_{ji} = \dinStot{z}, +\end{equation} +\newcommand{\dinStotStar}[1]{\Sigma_{\text{tot}}^{\text{in}(#1) *}} +\newcommand{\doutStotStar}[1]{\Sigma_{\text{tot}}^{\text{out}(#1) *}} +\begin{equation} + \doutStotStar{\mathfrak{a}} = \sum_{z \in \mathfrak{a}}\sum_{w}B_{zw}^* = \sum_{z \in \mathfrak{a}}\dkoutStar{z} = \sum_{z \in \mathfrak{a}}\doutStot{z}, +\end{equation} +\begin{equation} + \dinStotStar{\mathfrak{a}} = \sum_{z \in \mathfrak{a}}\sum_{w}B_{wz}^* = \sum_{z \in \mathfrak{a}}\dkinStar{z} = \sum_{z \in \mathfrak{a}}\dinStot{z}, +\end{equation} +\newcommand{\dinkinStar}[2]{k_{#1}^{\text{in}(#2) *}} +\newcommand{\doutkinStar}[2]{k_{#1}^{\text{out}(#2) *}} +\begin{equation} + \doutkinStar{z}{\mathfrak{a}} = \sum_{w \in \mathfrak{a}}{B_{zw}^*} = \sum_{w \in \mathfrak{a}}{\sum_{i \in z}\sum_{j \in w}B_{ij}}, +\end{equation} +\begin{equation} + \dinkinStar{z}{\mathfrak{a}} = \sum_{w \in \mathfrak{a}}{B_{wz}^*} = \sum_{w \in \mathfrak{a}}{\sum_{i \in z}\sum_{j \in w}B_{ji}}, +\end{equation} +and +\begin{equation} +\Sin{\mathfrak{a} *} = \sum_{z \in \mathfrak{a}}\sum_{w \in \mathfrak{a}}A_{zw}^* = \sum_{z \in \mathfrak{a}}\kin{z}{\mathfrak{a} *} = \sum_{z \in \mathfrak{a}}\sum_{w \in \mathfrak{a}}{\sum_{i \in z}\sum_{j \in w}A_{ij}}. + %\label{eqn:Sin} +\end{equation} + +If we let $\mathfrak{b}$ denote the community to which we are considering moving $\alpha$, +then the expression for $\Delta Q$ from Section~\ref{sec:initialPassDirected} simply generalises as +\begin{align*} +\Delta Q^{(3)} &= \frac{2}{2m}\left[ (\dinkinStar{\alpha}{\mathfrak{b}}-\dinkinStar{\alpha}{\mathfrak{a}}) + (\doutkinStar{\alpha}{\mathfrak{b}}-\doutkinStar{\alpha}{\mathfrak{a}}) + 2B_{\alpha\alpha}^* \right. \\ +& \hspace{-1cm} \left. +- \frac{\gamma\dkoutStar{\alpha}}{2m} (\dinStotStar{\mathfrak{b}}-\dinStotStar{\mathfrak{a}}) - \frac{\gamma\dkinStar{\alpha}}{2m} (\doutStotStar{\mathfrak{b}} - \doutStotStar{\mathfrak{a}}) - \frac{2\gamma\dkinStar{\alpha}\dkoutStar{\alpha}}{2m} \right] +\end{align*} + + +\end{document} diff --git a/graph/community/louvain_common.go b/graph/community/louvain_common.go new file mode 100644 index 00000000..e806096a --- /dev/null +++ b/graph/community/louvain_common.go @@ -0,0 +1,377 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package community provides graph community detection functions. +package community + +import ( + "fmt" + "math/rand" + + "github.com/gonum/graph" +) + +// Q returns the modularity Q score of the graph g subdivided into the +// given communities at the given resolution. If communities is nil, the +// unclustered modularity score is returned. The resolution parameter +// is γ as defined in Reichardt and Bornholdt doi:10.1103/PhysRevE.74.016110. +// Q will panic if g has any edge with negative edge weight. +// +// If g is undirected, Q is calculated according to +// Q = 1/2m \sum_{ij} [ A_{ij} - (\gamma k_i k_j)/2m ] \delta(c_i,c_j), +// If g is directed, it is calculated according to +// Q = 1/m \sum_{ij} [ A_{ij} - (\gamma k_i^in k_j^out)/m ] \delta(c_i,c_j). +// +// graph.Undirect may be used as a shim to allow calculation of Q for +// directed graphs with the undirected modularity function. +func Q(g graph.Graph, communities [][]graph.Node, resolution float64) float64 { + switch g := g.(type) { + case graph.Undirected: + return qUndirected(g, communities, resolution) + case graph.Directed: + return qDirected(g, communities, resolution) + default: + panic(fmt.Sprintf("community: invalid graph type: %T", g)) + } +} + +// ReducedGraph is a modularised graph. +type ReducedGraph interface { + graph.Graph + + // Communities returns the community memberships + // of the nodes in the graph used to generate + // the reduced graph. + Communities() [][]graph.Node + + // Structure returns the community structure of + // the current level of the module clustering. + // Each slice in the returned value recursively + // describes the membership of a community at + // the current level by indexing via the node + // ID into the structure of the non-nil + // ReducedGraph returned by Expanded, or when the + // ReducedGraph is nil, by containing nodes + // from the original input graph. + // + // The returned value should not be mutated. + Structure() [][]graph.Node + + // Expanded returns the next lower level of the + // module clustering or nil if at the lowest level. + // + // The returned ReducedGraph will be the same + // concrete type as the receiver. + Expanded() ReducedGraph +} + +// Modularize returns the hierarchical modularization of g at the given resolution +// using the Louvain algorithm. If src is nil, rand.Intn is used as the random +// generator. Modularize will panic if g has any edge with negative edge weight. +// +// If g is undirected it is modularised to minimise +// Q = 1/2m \sum_{ij} [ A_{ij} - (\gamma k_i k_j)/2m ] \delta(c_i,c_j), +// If g is directed it is modularised to minimise +// Q = 1/m \sum_{ij} [ A_{ij} - (\gamma k_i^in k_j^out)/m ] \delta(c_i,c_j). +// +// The concrete type of the ReducedGraph will be a pointer to either a +// ReducedUndirected or a ReducedDirected depending on the type of g. +// +// graph.Undirect may be used as a shim to allow modularization of +// directed graphs with the undirected modularity function. +func Modularize(g graph.Graph, resolution float64, src *rand.Rand) ReducedGraph { + switch g := g.(type) { + case graph.Undirected: + return louvainUndirected(g, resolution, src) + case graph.Directed: + return louvainDirected(g, resolution, src) + default: + panic(fmt.Sprintf("community: invalid graph type: %T", g)) + } +} + +// Multiplex is a multiplex graph. +type Multiplex interface { + // Nodes returns the slice of nodes + // for the multiplex graph. + // All layers must refer to the same + // set of nodes. + Nodes() []graph.Node + + // Depth returns the number of layers + // in the multiplex graph. + Depth() int +} + +// QMultiplex returns the modularity Q score of the multiplex graph layers +// subdivided into the given communities at the given resolutions and weights. Q is +// returned as the vector of weighted Q scores for each layer of the multiplex graph. +// If communities is nil, the unclustered modularity score is returned. +// If weights is nil layers are equally weighted, otherwise the length of +// weights must equal the number of layers. If resolutions is nil, a resolution +// of 1.0 is used for all layers, otherwise either a single element slice may be used +// to specify a global resolution, or the length of resolutions must equal the number +// of layers. The resolution parameter is γ as defined in Reichardt and Bornholdt +// doi:10.1103/PhysRevE.74.016110. +// QMultiplex will panic if the graph has any layer weight-scaled edge with +// negative edge weight. +// +// If g is undirected, Q is calculated according to +// Q_{layer} = w_{layer} \sum_{ij} [ A_{layer}*_{ij} - (\gamma_{layer} k_i k_j)/2m_{layer} ] \delta(c_i,c_j), +// If g is directed, it is calculated according to +// Q_{layer} = w_{layer} \sum_{ij} [ A_{layer}*_{ij} - (\gamma_{layer} k_i^in k_j^out)/m_{layer} ] \delta(c_i,c_j). +// +// Note that Q values for multiplex graphs are not scaled by the total layer edge weight. +// +// graph.Undirect may be used as a shim to allow calculation of Q for +// directed graphs. +func QMultiplex(g Multiplex, communities [][]graph.Node, weights, resolutions []float64) []float64 { + if weights != nil && len(weights) != g.Depth() { + panic("community: weights vector length mismatch") + } + if resolutions != nil && len(resolutions) != 1 && len(resolutions) != g.Depth() { + panic("community: resolutions vector length mismatch") + } + + switch g := g.(type) { + case UndirectedMultiplex: + return qUndirectedMultiplex(g, communities, weights, resolutions) + case DirectedMultiplex: + return qDirectedMultiplex(g, communities, weights, resolutions) + default: + panic(fmt.Sprintf("community: invalid graph type: %T", g)) + } +} + +// ReducedMultiplex is a modularised multiplex graph. +type ReducedMultiplex interface { + Multiplex + + // Communities returns the community memberships + // of the nodes in the graph used to generate + // the reduced graph. + Communities() [][]graph.Node + + // Structure returns the community structure of + // the current level of the module clustering. + // Each slice in the returned value recursively + // describes the membership of a community at + // the current level by indexing via the node + // ID into the structure of the non-nil + // ReducedGraph returned by Expanded, or when the + // ReducedGraph is nil, by containing nodes + // from the original input graph. + // + // The returned value should not be mutated. + Structure() [][]graph.Node + + // Expanded returns the next lower level of the + // module clustering or nil if at the lowest level. + // + // The returned ReducedGraph will be the same + // concrete type as the receiver. + Expanded() ReducedMultiplex +} + +// ModularizeMultiplex returns the hierarchical modularization of g at the given resolution +// using the Louvain algorithm. If all is true and g have negatively weighted layers, all +// communities will be searched during the modularization. If src is nil, rand.Intn is +// used as the random generator. ModularizeMultiplex will panic if g has any edge with +// edge weight that does not sign-match the layer weight. +// +// If g is undirected it is modularised to minimise +// Q = \sum w_{layer} \sum_{ij} [ A_{layer}*_{ij} - (\gamma_{layer} k_i k_j)/2m ] \delta(c_i,c_j). +// If g is directed it is modularised to minimise +// Q = \sum w_{layer} \sum_{ij} [ A_{layer}*_{ij} - (\gamma_{layer} k_i^in k_j^out)/m_{layer} ] \delta(c_i,c_j). +// +// The concrete type of the ReducedMultiplex will be a pointer to a +// ReducedUndirectedMultiplex. +// +// graph.Undirect may be used as a shim to allow modularization of +// directed graphs with the undirected modularity function. +func ModularizeMultiplex(g Multiplex, weights, resolutions []float64, all bool, src *rand.Rand) ReducedMultiplex { + if weights != nil && len(weights) != g.Depth() { + panic("community: weights vector length mismatch") + } + if resolutions != nil && len(resolutions) != 1 && len(resolutions) != g.Depth() { + panic("community: resolutions vector length mismatch") + } + + switch g := g.(type) { + case UndirectedMultiplex: + return louvainUndirectedMultiplex(g, weights, resolutions, all, src) + case DirectedMultiplex: + return louvainDirectedMultiplex(g, weights, resolutions, all, src) + default: + panic(fmt.Sprintf("community: invalid graph type: %T", g)) + } +} + +// undirectedEdges is the edge structure of a reduced undirected graph. +type undirectedEdges struct { + // edges and weights is the set + // of edges between nodes. + // weights is keyed such that + // the first element of the key + // is less than the second. + edges [][]int + weights map[[2]int]float64 +} + +// directedEdges is the edge structure of a reduced directed graph. +type directedEdges struct { + // edgesFrom, edgesTo and weights + // is the set of edges between nodes. + edgesFrom [][]int + edgesTo [][]int + weights map[[2]int]float64 +} + +// community is a reduced graph node describing its membership. +type community struct { + id int + + nodes []graph.Node + + weight float64 +} + +func (n community) ID() int { return n.id } + +// edge is a reduced graph edge. +type edge struct { + from, to community + weight float64 +} + +func (e edge) From() graph.Node { return e.from } +func (e edge) To() graph.Node { return e.to } +func (e edge) Weight() float64 { return e.weight } + +// multiplexCommunity is a reduced multiplex graph node describing its membership. +type multiplexCommunity struct { + id int + + nodes []graph.Node + + weights []float64 +} + +func (n multiplexCommunity) ID() int { return n.id } + +// multiplexEdge is a reduced graph edge for a multiplex graph. +type multiplexEdge struct { + from, to multiplexCommunity + weight float64 +} + +func (e multiplexEdge) From() graph.Node { return e.from } +func (e multiplexEdge) To() graph.Node { return e.to } +func (e multiplexEdge) Weight() float64 { return e.weight } + +// commIdx is an index of a node in a community held by a localMover. +type commIdx struct { + community int + node int +} + +// node is defined to avoid an import of .../graph/simple. +type node int + +func (n node) ID() int { return int(n) } + +// minTaker is a set iterator. +type minTaker interface { + TakeMin(p *int) bool +} + +// dense is a dense integer set iterator. +type dense struct { + pos int + n int +} + +// TakeMin mimics intsets.Sparse TakeMin for dense sets. If the dense +// iterator position is less than the iterator size, TakeMin sets *p +// to the the iterator position and increments the position and returns +// true. +// Otherwise, it returns false and *p is undefined. +func (d *dense) TakeMin(p *int) bool { + if d.pos >= d.n { + return false + } + *p = d.pos + d.pos++ + return true +} + +const ( + negativeWeight = "community: unexpected negative edge weight" + positiveWeight = "community: unexpected positive edge weight" +) + +// positiveWeightFuncFor returns a constructed weight function for the +// positively weighted g. +func positiveWeightFuncFor(g graph.Graph) func(x, y graph.Node) float64 { + if wg, ok := g.(graph.Weighter); ok { + return func(x, y graph.Node) float64 { + w, ok := wg.Weight(x, y) + if !ok { + return 0 + } + if w < 0 { + panic(negativeWeight) + } + return w + } + } + return func(x, y graph.Node) float64 { + e := g.Edge(x, y) + if e == nil { + return 0 + } + w := e.Weight() + if w < 0 { + panic(negativeWeight) + } + return w + } +} + +// negativeWeightFuncFor returns a constructed weight function for the +// negatively weighted g. +func negativeWeightFuncFor(g graph.Graph) func(x, y graph.Node) float64 { + if wg, ok := g.(graph.Weighter); ok { + return func(x, y graph.Node) float64 { + w, ok := wg.Weight(x, y) + if !ok { + return 0 + } + if w > 0 { + panic(positiveWeight) + } + return -w + } + } + return func(x, y graph.Node) float64 { + e := g.Edge(x, y) + if e == nil { + return 0 + } + w := e.Weight() + if w > 0 { + panic(positiveWeight) + } + return -w + } +} + +// depth returns max(1, len(weights)). It is used to ensure +// that multiplex community weights are properly initialised. +func depth(weights []float64) int { + if weights == nil { + return 1 + } + return len(weights) +} diff --git a/graph/community/louvain_directed.go b/graph/community/louvain_directed.go new file mode 100644 index 00000000..a5a15f94 --- /dev/null +++ b/graph/community/louvain_directed.go @@ -0,0 +1,633 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package community + +import ( + "math" + "math/rand" + "sort" + + "golang.org/x/tools/container/intsets" + + "github.com/gonum/graph" + "github.com/gonum/graph/internal/ordered" +) + +// qDirected returns the modularity Q score of the graph g subdivided into the +// given communities at the given resolution. If communities is nil, the +// unclustered modularity score is returned. The resolution parameter +// is γ as defined in Reichardt and Bornholdt doi:10.1103/PhysRevE.74.016110. +// qDirected will panic if g has any edge with negative edge weight. +// +// Q = 1/m \sum_{ij} [ A_{ij} - (\gamma k_i^in k_j^out)/m ] \delta(c_i,c_j) +// +func qDirected(g graph.Directed, communities [][]graph.Node, resolution float64) float64 { + nodes := g.Nodes() + weight := positiveWeightFuncFor(g) + + // Calculate the total edge weight of the graph + // and the table of penetrating edge weight sums. + var m float64 + k := make(map[int]directedWeights, len(nodes)) + for _, n := range nodes { + var wOut float64 + u := n + for _, v := range g.From(u) { + wOut += weight(u, v) + } + var wIn float64 + v := n + for _, u := range g.To(v) { + wIn += weight(u, v) + } + w := weight(n, n) + m += w + wOut // We only need to count edges once. + k[n.ID()] = directedWeights{out: w + wOut, in: w + wIn} + } + + if communities == nil { + var q float64 + for _, u := range nodes { + kU := k[u.ID()] + q += weight(u, u) - resolution*kU.out*kU.in/m + } + return q / m + } + + var q float64 + for _, c := range communities { + for _, u := range c { + kU := k[u.ID()] + for _, v := range c { + kV := k[v.ID()] + q += weight(u, v) - resolution*kU.out*kV.in/m + } + } + } + return q / m +} + +// louvainDirected returns the hierarchical modularization of g at the given +// resolution using the Louvain algorithm. If src is nil, rand.Intn is used +// as the random generator. louvainDirected will panic if g has any edge with negative +// edge weight. +func louvainDirected(g graph.Directed, resolution float64, src *rand.Rand) ReducedGraph { + // See louvain.tex for a detailed description + // of the algorithm used here. + + c := reduceDirected(g, nil) + rnd := rand.Intn + if src != nil { + rnd = src.Intn + } + for { + l := newDirectedLocalMover(c, c.communities, resolution) + if l == nil { + return c + } + if done := l.localMovingHeuristic(rnd); done { + return c + } + c = reduceDirected(c, l.communities) + } +} + +// ReducedDirected is a directed graph of communities derived from a +// parent graph by reduction. +type ReducedDirected struct { + // nodes is the set of nodes held + // by the graph. In a ReducedDirected + // the node ID is the index into + // nodes. + nodes []community + directedEdges + + // communities is the community + // structure of the graph. + communities [][]graph.Node + + parent *ReducedDirected +} + +var ( + _ graph.Directed = (*ReducedDirected)(nil) + _ graph.Weighter = (*ReducedDirected)(nil) + _ ReducedGraph = (*ReducedUndirected)(nil) +) + +// Communities returns the community memberships of the nodes in the +// graph used to generate the reduced graph. +func (g *ReducedDirected) Communities() [][]graph.Node { + communities := make([][]graph.Node, len(g.communities)) + if g.parent == nil { + for i, members := range g.communities { + comm := make([]graph.Node, len(members)) + for j, n := range members { + nodes := g.nodes[n.ID()].nodes + if len(nodes) != 1 { + panic("community: unexpected number of nodes in base graph community") + } + comm[j] = nodes[0] + } + communities[i] = comm + } + return communities + } + sub := g.parent.Communities() + for i, members := range g.communities { + var comm []graph.Node + for _, n := range members { + comm = append(comm, sub[n.ID()]...) + } + communities[i] = comm + } + return communities +} + +// Structure returns the community structure of the current level of +// the module clustering. The first index of the returned value +// corresponds to the index of the nodes in the next higher level if +// it exists. The returned value should not be mutated. +func (g *ReducedDirected) Structure() [][]graph.Node { + return g.communities +} + +// Expanded returns the next lower level of the module clustering or nil +// if at the lowest level. +func (g *ReducedDirected) Expanded() ReducedGraph { + return g.parent +} + +// reduceDirected returns a reduced graph constructed from g divided +// into the given communities. The communities value is mutated +// by the call to reduceDirected. If communities is nil and g is a +// ReducedDirected, it is returned unaltered. +func reduceDirected(g graph.Directed, communities [][]graph.Node) *ReducedDirected { + if communities == nil { + if r, ok := g.(*ReducedDirected); ok { + return r + } + + nodes := g.Nodes() + // TODO(kortschak) This sort is necessary really only + // for testing. In practice we would not be using the + // community provided by the user for a Q calculation. + // Probably we should use a function to map the + // communities in the test sets to the remapped order. + sort.Sort(ordered.ByID(nodes)) + communities = make([][]graph.Node, len(nodes)) + for i := range nodes { + communities[i] = []graph.Node{node(i)} + } + + weight := positiveWeightFuncFor(g) + r := ReducedDirected{ + nodes: make([]community, len(nodes)), + directedEdges: directedEdges{ + edgesFrom: make([][]int, len(nodes)), + edgesTo: make([][]int, len(nodes)), + weights: make(map[[2]int]float64), + }, + communities: communities, + } + communityOf := make(map[int]int, len(nodes)) + for i, n := range nodes { + r.nodes[i] = community{id: i, nodes: []graph.Node{n}} + communityOf[n.ID()] = i + } + for _, n := range nodes { + id := communityOf[n.ID()] + + var out []int + u := n + for _, v := range g.From(u) { + vid := communityOf[v.ID()] + if vid != id { + out = append(out, vid) + } + r.weights[[2]int{id, vid}] = weight(u, v) + } + r.edgesFrom[id] = out + + var in []int + v := n + for _, u := range g.To(v) { + uid := communityOf[u.ID()] + if uid != id { + in = append(in, uid) + } + r.weights[[2]int{uid, id}] = weight(u, v) + } + r.edgesTo[id] = in + } + return &r + } + + // Remove zero length communities destructively. + var commNodes int + for i := 0; i < len(communities); { + comm := communities[i] + if len(comm) == 0 { + communities[i] = communities[len(communities)-1] + communities[len(communities)-1] = nil + communities = communities[:len(communities)-1] + } else { + commNodes += len(comm) + i++ + } + } + + r := ReducedDirected{ + nodes: make([]community, len(communities)), + directedEdges: directedEdges{ + edgesFrom: make([][]int, len(communities)), + edgesTo: make([][]int, len(communities)), + weights: make(map[[2]int]float64), + }, + } + r.communities = make([][]graph.Node, len(communities)) + for i := range r.communities { + r.communities[i] = []graph.Node{node(i)} + } + if g, ok := g.(*ReducedDirected); ok { + // Make sure we retain the truncated + // community structure. + g.communities = communities + r.parent = g + } + weight := positiveWeightFuncFor(g) + communityOf := make(map[int]int, commNodes) + for i, comm := range communities { + r.nodes[i] = community{id: i, nodes: comm} + for _, n := range comm { + communityOf[n.ID()] = i + } + } + for id, comm := range communities { + var out, in []int + for _, n := range comm { + u := n + for _, v := range comm { + r.nodes[id].weight += weight(u, v) + } + + for _, v := range g.From(u) { + vid := communityOf[v.ID()] + found := false + for _, e := range out { + if e == vid { + found = true + break + } + } + if !found && vid != id { + out = append(out, vid) + } + // Add half weights because the other + // ends of edges are also counted. + r.weights[[2]int{id, vid}] += weight(u, v) / 2 + } + + v := n + for _, u := range g.To(v) { + uid := communityOf[u.ID()] + found := false + for _, e := range in { + if e == uid { + found = true + break + } + } + if !found && uid != id { + in = append(in, uid) + } + // Add half weights because the other + // ends of edges are also counted. + r.weights[[2]int{uid, id}] += weight(u, v) / 2 + } + } + r.edgesFrom[id] = out + r.edgesTo[id] = in + } + return &r +} + +// Has returns whether the node exists within the graph. +func (g *ReducedDirected) Has(n graph.Node) bool { + id := n.ID() + return id >= 0 || id < len(g.nodes) +} + +// Nodes returns all the nodes in the graph. +func (g *ReducedDirected) Nodes() []graph.Node { + nodes := make([]graph.Node, len(g.nodes)) + for i := range g.nodes { + nodes[i] = node(i) + } + return nodes +} + +// From returns all nodes in g that can be reached directly from u. +func (g *ReducedDirected) From(u graph.Node) []graph.Node { + out := g.edgesFrom[u.ID()] + nodes := make([]graph.Node, len(out)) + for i, vid := range out { + nodes[i] = g.nodes[vid] + } + return nodes +} + +// To returns all nodes in g that can reach directly to v. +func (g *ReducedDirected) To(v graph.Node) []graph.Node { + in := g.edgesTo[v.ID()] + nodes := make([]graph.Node, len(in)) + for i, uid := range in { + nodes[i] = g.nodes[uid] + } + return nodes +} + +// HasEdgeBetween returns whether an edge exists between nodes x and y. +func (g *ReducedDirected) HasEdgeBetween(x, y graph.Node) bool { + xid := x.ID() + yid := y.ID() + if xid == yid { + return false + } + _, ok := g.weights[[2]int{xid, yid}] + if ok { + return true + } + _, ok = g.weights[[2]int{yid, xid}] + return ok +} + +// HasEdgeFromTo returns whether an edge exists from node u to v. +func (g *ReducedDirected) HasEdgeFromTo(u, v graph.Node) bool { + uid := u.ID() + vid := v.ID() + if uid == vid { + return false + } + _, ok := g.weights[[2]int{uid, vid}] + return ok +} + +// Edge returns the edge from u to v if such an edge exists and nil otherwise. +// The node v must be directly reachable from u as defined by the From method. +func (g *ReducedDirected) Edge(u, v graph.Node) graph.Edge { + uid := u.ID() + vid := v.ID() + w, ok := g.weights[[2]int{uid, vid}] + if !ok { + return nil + } + return edge{from: g.nodes[uid], to: g.nodes[vid], weight: w} +} + +// Weight returns the weight for the edge between x and y if Edge(x, y) returns a non-nil Edge. +// If x and y are the same node the internal node weight is returned. If there is no joining +// edge between the two nodes the weight value returned is zero. Weight returns true if an edge +// exists between x and y or if x and y have the same ID, false otherwise. +func (g *ReducedDirected) Weight(x, y graph.Node) (w float64, ok bool) { + xid := x.ID() + yid := y.ID() + if xid == yid { + return g.nodes[xid].weight, true + } + w, ok = g.weights[[2]int{xid, yid}] + return w, ok +} + +// directedLocalMover is a step in graph modularity optimization. +type directedLocalMover struct { + g *ReducedDirected + + // nodes is the set of working nodes. + nodes []graph.Node + // edgeWeightsOf is the weighted degree + // of each node indexed by ID. + edgeWeightsOf []directedWeights + + // m is the total sum of edge + // weights in g. + m float64 + + // weight is the weight function + // provided by g or a function + // that returns the Weight value + // of the non-nil edge between x + // and y. + weight func(x, y graph.Node) float64 + + // communities is the current + // division of g. + communities [][]graph.Node + // memberships is a mapping between + // node ID and community membership. + memberships []int + + // resolution is the Reichardt and + // Bornholdt γ parameter as defined + // in doi:10.1103/PhysRevE.74.016110. + resolution float64 + + // moved indicates that a call to + // move has been made since the last + // call to shuffle. + moved bool + + // changed indicates that a move + // has been made since the creation + // of the local mover. + changed bool +} + +type directedWeights struct { + out, in float64 +} + +// newDirectedLocalMover returns a new directedLocalMover initialized with +// the graph g, a set of communities and a modularity resolution parameter. +// The node IDs of g must be contiguous in [0,n) where n is the number of +// nodes. +// If g has a zero edge weight sum, nil is returned. +func newDirectedLocalMover(g *ReducedDirected, communities [][]graph.Node, resolution float64) *directedLocalMover { + nodes := g.Nodes() + l := directedLocalMover{ + g: g, + nodes: nodes, + edgeWeightsOf: make([]directedWeights, len(nodes)), + communities: communities, + memberships: make([]int, len(nodes)), + resolution: resolution, + weight: positiveWeightFuncFor(g), + } + + // Calculate the total edge weight of the graph + // and degree weights for each node. + for _, n := range l.nodes { + u := n + var wOut float64 + for _, v := range g.From(u) { + wOut += l.weight(u, v) + } + + v := n + var wIn float64 + for _, u := range g.To(v) { + wIn += l.weight(u, v) + } + + w := l.weight(n, n) + l.edgeWeightsOf[n.ID()] = directedWeights{out: w + wOut, in: w + wIn} + l.m += w + wOut + } + + // Assign membership mappings. + for i, c := range communities { + for _, n := range c { + l.memberships[n.ID()] = i + } + } + + return &l +} + +// localMovingHeuristic performs the Louvain local moving heuristic until +// no further moves can be made. It returns a boolean indicating that the +// directedLocalMover has not made any improvement to the community structure and +// so the Louvain algorithm is done. +func (l *directedLocalMover) localMovingHeuristic(rnd func(int) int) (done bool) { + for { + l.shuffle(rnd) + for _, n := range l.nodes { + dQ, dst, src := l.deltaQ(n) + if dQ <= 0 { + continue + } + l.move(dst, src) + } + if !l.moved { + return !l.changed + } + } +} + +// shuffle performs a Fisher-Yates shuffle on the nodes held by the +// directedLocalMover using the random source rnd which should return an +// integer in the range [0,n). +func (l *directedLocalMover) shuffle(rnd func(n int) int) { + l.moved = false + for i := range l.nodes[:len(l.nodes)-1] { + j := i + rnd(len(l.nodes)-i) + l.nodes[i], l.nodes[j] = l.nodes[j], l.nodes[i] + } +} + +// move moves the node at src to the community at dst. +func (l *directedLocalMover) move(dst int, src commIdx) { + l.moved = true + l.changed = true + + srcComm := l.communities[src.community] + n := srcComm[src.node] + + l.memberships[n.ID()] = dst + + l.communities[dst] = append(l.communities[dst], n) + srcComm[src.node], srcComm[len(srcComm)-1] = srcComm[len(srcComm)-1], nil + l.communities[src.community] = srcComm[:len(srcComm)-1] +} + +// deltaQ returns the highest gain in modularity attainable by moving +// n from its current community to another connected community and +// the index of the chosen destination. The index into the directedLocalMover's +// communities field is returned in src if n is in communities. +func (l *directedLocalMover) deltaQ(n graph.Node) (deltaQ float64, dst int, src commIdx) { + id := n.ID() + + a_aa := l.weight(n, n) + k_a := l.edgeWeightsOf[id] + m := l.m + gamma := l.resolution + + // Find communites connected to n. + var connected intsets.Sparse + // The following for loop is equivalent to: + // + // for _, v := range l.g.From(n) { + // connected.Insert(l.memberships[v.ID()]) + // } + // for _, v := range l.g.To(n) { + // connected.Insert(l.memberships[v.ID()]) + // } + // + // This is done to avoid two allocations. + for _, vid := range l.g.edgesFrom[id] { + connected.Insert(l.memberships[vid]) + } + for _, vid := range l.g.edgesTo[id] { + connected.Insert(l.memberships[vid]) + } + // Insert the node's own community. + connected.Insert(l.memberships[id]) + + // Calculate the highest modularity gain + // from moving into another community and + // keep the index of that community. + var dQremove float64 + dQadd, dst, src := math.Inf(-1), -1, commIdx{-1, -1} + var i int + for connected.TakeMin(&i) { + c := l.communities[i] + var k_aC, sigma_totC directedWeights // C is a substitution for ^𝛼 or ^𝛽. + var removal bool + for j, u := range c { + uid := u.ID() + if uid == id { + if src.community != -1 { + panic("community: multiple sources") + } + src = commIdx{i, j} + removal = true + } + + k_aC.in += l.weight(u, n) + k_aC.out += l.weight(n, u) + // sigma_totC could be kept for each community + // and updated for moves, changing the calculation + // of sigma_totC here from O(n_c) to O(1), but + // in practice the time savings do not appear + // to be compelling and do not make up for the + // increase in code complexity and space required. + w := l.edgeWeightsOf[uid] + sigma_totC.in += w.in + sigma_totC.out += w.out + } + + // See louvain.tex for a derivation of these equations. + switch { + case removal: + // The community c was the current community, + // so calculate the change due to removal. + dQremove = (k_aC.in /*^𝛼*/ - a_aa) + (k_aC.out /*^𝛼*/ - a_aa) - + gamma*(k_a.in*(sigma_totC.out /*^𝛼*/ -k_a.out)+k_a.out*(sigma_totC.in /*^𝛼*/ -k_a.in))/m + + default: + // Otherwise calculate the change due to an addition + // to c and retain if it is the current best. + dQ := k_aC.in /*^𝛽*/ + k_aC.out /*^𝛽*/ - + gamma*(k_a.in*sigma_totC.out /*^𝛽*/ +k_a.out*sigma_totC.in /*^𝛽*/)/m + + if dQ > dQadd { + dQadd = dQ + dst = i + } + } + } + + return (dQadd - dQremove) / m, dst, src +} diff --git a/graph/community/louvain_directed_multiplex.go b/graph/community/louvain_directed_multiplex.go new file mode 100644 index 00000000..6210bd3e --- /dev/null +++ b/graph/community/louvain_directed_multiplex.go @@ -0,0 +1,880 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package community + +import ( + "fmt" + "math" + "math/rand" + "sort" + + "golang.org/x/tools/container/intsets" + + "github.com/gonum/graph" + "github.com/gonum/graph/internal/ordered" +) + +// DirectedMultiplex is a directed multiplex graph. +type DirectedMultiplex interface { + Multiplex + + // Layer returns the lth layer of the + // multiplex graph. + Layer(l int) graph.Directed +} + +// qDirectedMultiplex returns the modularity Q score of the multiplex graph layers +// subdivided into the given communities at the given resolutions and weights. Q is +// returned as the vector of weighted Q scores for each layer of the multiplex graph. +// If communities is nil, the unclustered modularity score is returned. +// If weights is nil layers are equally weighted, otherwise the length of +// weights must equal the number of layers. If resolutions is nil, a resolution +// of 1.0 is used for all layers, otherwise either a single element slice may be used +// to specify a global resolution, or the length of resolutions must equal the number +// of layers. The resolution parameter is γ as defined in Reichardt and Bornholdt +// doi:10.1103/PhysRevE.74.016110. +// qUndirectedMultiplex will panic if the graph has any layer weight-scaled edge with +// negative edge weight. +// +// Q_{layer} = w_{layer} \sum_{ij} [ A_{layer}*_{ij} - (\gamma_{layer} k_i k_j)/2m ] \delta(c_i,c_j) +// +// Note that Q values for multiplex graphs are not scaled by the total layer edge weight. +func qDirectedMultiplex(g DirectedMultiplex, communities [][]graph.Node, weights, resolutions []float64) []float64 { + q := make([]float64, g.Depth()) + nodes := g.Nodes() + layerWeight := 1.0 + layerResolution := 1.0 + if len(resolutions) == 1 { + layerResolution = resolutions[0] + } + for l := 0; l < g.Depth(); l++ { + layer := g.Layer(l) + + if weights != nil { + layerWeight = weights[l] + } + if layerWeight == 0 { + continue + } + + if len(resolutions) > 1 { + layerResolution = resolutions[l] + } + + var weight func(x, y graph.Node) float64 + if layerWeight < 0 { + weight = negativeWeightFuncFor(layer) + } else { + weight = positiveWeightFuncFor(layer) + } + + // Calculate the total edge weight of the layer + // and the table of penetrating edge weight sums. + var m float64 + k := make(map[int]directedWeights, len(nodes)) + for _, n := range nodes { + var wOut float64 + u := n + for _, v := range layer.From(u) { + wOut += weight(u, v) + } + var wIn float64 + v := n + for _, u := range layer.To(v) { + wIn += weight(u, v) + } + w := weight(n, n) + m += w + wOut // We only need to count edges once. + k[n.ID()] = directedWeights{out: w + wOut, in: w + wIn} + } + + if communities == nil { + var qLayer float64 + for _, u := range nodes { + kU := k[u.ID()] + qLayer += weight(u, u) - layerResolution*kU.out*kU.in/m + } + q[l] = layerWeight * qLayer + continue + } + + var qLayer float64 + for _, c := range communities { + for _, u := range c { + kU := k[u.ID()] + for _, v := range c { + kV := k[v.ID()] + qLayer += weight(u, v) - layerResolution*kU.out*kV.in/m + } + } + } + q[l] = layerWeight * qLayer + } + + return q +} + +// DirectedLayers implements DirectedMultiplex. +type DirectedLayers []graph.Directed + +// NewDirectedLayers returns a DirectedLayers using the provided layers +// ensuring there is a match between IDs for each layer. +func NewDirectedLayers(layers ...graph.Directed) (DirectedLayers, error) { + if len(layers) == 0 { + return nil, nil + } + var base, next intsets.Sparse + for _, n := range layers[0].Nodes() { + base.Insert(n.ID()) + } + for i, l := range layers[1:] { + next.Clear() + for _, n := range l.Nodes() { + next.Insert(n.ID()) + } + if !next.Equals(&base) { + return nil, fmt.Errorf("community: layer ID mismatch between layers: %d", i+1) + } + } + return layers, nil +} + +// Nodes returns the nodes of the receiver. +func (g DirectedLayers) Nodes() []graph.Node { + if len(g) == 0 { + return nil + } + return g[0].Nodes() +} + +// Depth returns the depth of the multiplex graph. +func (g DirectedLayers) Depth() int { return len(g) } + +// Layer returns the lth layer of the multiplex graph. +func (g DirectedLayers) Layer(l int) graph.Directed { return g[l] } + +// louvainDirectedMultiplex returns the hierarchical modularization of g at the given resolution +// using the Louvain algorithm. If all is true and g has negatively weighted layers, all +// communities will be searched during the modularization. If src is nil, rand.Intn is +// used as the random generator. louvainDirectedMultiplex will panic if g has any edge with +// edge weight that does not sign-match the layer weight. +// +// graph.Undirect may be used as a shim to allow modularization of directed graphs. +func louvainDirectedMultiplex(g DirectedMultiplex, weights, resolutions []float64, all bool, src *rand.Rand) *ReducedDirectedMultiplex { + if weights != nil && len(weights) != g.Depth() { + panic("community: weights vector length mismatch") + } + if resolutions != nil && len(resolutions) != 1 && len(resolutions) != g.Depth() { + panic("community: resolutions vector length mismatch") + } + + // See louvain.tex for a detailed description + // of the algorithm used here. + + c := reduceDirectedMultiplex(g, nil, weights) + rnd := rand.Intn + if src != nil { + rnd = src.Intn + } + for { + l := newDirectedMultiplexLocalMover(c, c.communities, weights, resolutions, all) + if l == nil { + return c + } + if done := l.localMovingHeuristic(rnd); done { + return c + } + c = reduceDirectedMultiplex(c, l.communities, weights) + } +} + +// ReducedDirectedMultiplex is a directed graph of communities derived from a +// parent graph by reduction. +type ReducedDirectedMultiplex struct { + // nodes is the set of nodes held + // by the graph. In a ReducedDirectedMultiplex + // the node ID is the index into + // nodes. + nodes []multiplexCommunity + layers []directedEdges + + // communities is the community + // structure of the graph. + communities [][]graph.Node + + parent *ReducedDirectedMultiplex +} + +var ( + _ DirectedMultiplex = (*ReducedDirectedMultiplex)(nil) + _ graph.Directed = (*directedLayerHandle)(nil) + _ graph.Weighter = (*directedLayerHandle)(nil) +) + +// Nodes returns all the nodes in the graph. +func (g *ReducedDirectedMultiplex) Nodes() []graph.Node { + nodes := make([]graph.Node, len(g.nodes)) + for i := range g.nodes { + nodes[i] = node(i) + } + return nodes +} + +// Depth returns the number of layers in the multiplex graph. +func (g *ReducedDirectedMultiplex) Depth() int { return len(g.layers) } + +// Layer returns the lth layer of the multiplex graph. +func (g *ReducedDirectedMultiplex) Layer(l int) graph.Directed { + return directedLayerHandle{multiplex: g, layer: l} +} + +// Communities returns the community memberships of the nodes in the +// graph used to generate the reduced graph. +func (g *ReducedDirectedMultiplex) Communities() [][]graph.Node { + communities := make([][]graph.Node, len(g.communities)) + if g.parent == nil { + for i, members := range g.communities { + comm := make([]graph.Node, len(members)) + for j, n := range members { + nodes := g.nodes[n.ID()].nodes + if len(nodes) != 1 { + panic("community: unexpected number of nodes in base graph community") + } + comm[j] = nodes[0] + } + communities[i] = comm + } + return communities + } + sub := g.parent.Communities() + for i, members := range g.communities { + var comm []graph.Node + for _, n := range members { + comm = append(comm, sub[n.ID()]...) + } + communities[i] = comm + } + return communities +} + +// Structure returns the community structure of the current level of +// the module clustering. The first index of the returned value +// corresponds to the index of the nodes in the next higher level if +// it exists. The returned value should not be mutated. +func (g *ReducedDirectedMultiplex) Structure() [][]graph.Node { + return g.communities +} + +// Expanded returns the next lower level of the module clustering or nil +// if at the lowest level. +func (g *ReducedDirectedMultiplex) Expanded() ReducedMultiplex { + return g.parent +} + +// reduceDirectedMultiplex returns a reduced graph constructed from g divided +// into the given communities. The communities value is mutated +// by the call to reduceDirectedMultiplex. If communities is nil and g is a +// ReducedDirectedMultiplex, it is returned unaltered. +func reduceDirectedMultiplex(g DirectedMultiplex, communities [][]graph.Node, weights []float64) *ReducedDirectedMultiplex { + if communities == nil { + if r, ok := g.(*ReducedDirectedMultiplex); ok { + return r + } + + nodes := g.Nodes() + // TODO(kortschak) This sort is necessary really only + // for testing. In practice we would not be using the + // community provided by the user for a Q calculation. + // Probably we should use a function to map the + // communities in the test sets to the remapped order. + sort.Sort(ordered.ByID(nodes)) + communities = make([][]graph.Node, len(nodes)) + for i := range nodes { + communities[i] = []graph.Node{node(i)} + } + + r := ReducedDirectedMultiplex{ + nodes: make([]multiplexCommunity, len(nodes)), + layers: make([]directedEdges, g.Depth()), + communities: communities, + } + communityOf := make(map[int]int, len(nodes)) + for i, n := range nodes { + r.nodes[i] = multiplexCommunity{id: i, nodes: []graph.Node{n}, weights: make([]float64, depth(weights))} + communityOf[n.ID()] = i + } + for i := range r.layers { + r.layers[i] = directedEdges{ + edgesFrom: make([][]int, len(nodes)), + edgesTo: make([][]int, len(nodes)), + weights: make(map[[2]int]float64), + } + } + w := 1.0 + for l := 0; l < g.Depth(); l++ { + layer := g.Layer(l) + if weights != nil { + w = weights[l] + } + if w == 0 { + continue + } + var sign float64 + var weight func(x, y graph.Node) float64 + if w < 0 { + sign, weight = -1, negativeWeightFuncFor(layer) + } else { + sign, weight = 1, positiveWeightFuncFor(layer) + } + for _, n := range nodes { + id := communityOf[n.ID()] + + var out []int + u := n + for _, v := range layer.From(u) { + vid := communityOf[v.ID()] + if vid != id { + out = append(out, vid) + } + r.layers[l].weights[[2]int{id, vid}] = sign * weight(u, v) + } + r.layers[l].edgesFrom[id] = out + + var in []int + v := n + for _, u := range layer.To(v) { + uid := communityOf[u.ID()] + if uid != id { + in = append(in, uid) + } + r.layers[l].weights[[2]int{uid, id}] = sign * weight(u, v) + } + r.layers[l].edgesTo[id] = in + } + } + return &r + } + + // Remove zero length communities destructively. + var commNodes int + for i := 0; i < len(communities); { + comm := communities[i] + if len(comm) == 0 { + communities[i] = communities[len(communities)-1] + communities[len(communities)-1] = nil + communities = communities[:len(communities)-1] + } else { + commNodes += len(comm) + i++ + } + } + + r := ReducedDirectedMultiplex{ + nodes: make([]multiplexCommunity, len(communities)), + layers: make([]directedEdges, g.Depth()), + } + communityOf := make(map[int]int, commNodes) + for i, comm := range communities { + r.nodes[i] = multiplexCommunity{id: i, nodes: comm, weights: make([]float64, depth(weights))} + for _, n := range comm { + communityOf[n.ID()] = i + } + } + for i := range r.layers { + r.layers[i] = directedEdges{ + edgesFrom: make([][]int, len(communities)), + edgesTo: make([][]int, len(communities)), + weights: make(map[[2]int]float64), + } + } + r.communities = make([][]graph.Node, len(communities)) + for i := range r.communities { + r.communities[i] = []graph.Node{node(i)} + } + if g, ok := g.(*ReducedDirectedMultiplex); ok { + // Make sure we retain the truncated + // community structure. + g.communities = communities + r.parent = g + } + w := 1.0 + for l := 0; l < g.Depth(); l++ { + layer := g.Layer(l) + if weights != nil { + w = weights[l] + } + if w == 0 { + continue + } + var sign float64 + var weight func(x, y graph.Node) float64 + if w < 0 { + sign, weight = -1, negativeWeightFuncFor(layer) + } else { + sign, weight = 1, positiveWeightFuncFor(layer) + } + for id, comm := range communities { + var out, in []int + for _, n := range comm { + u := n + for _, v := range comm { + r.nodes[id].weights[l] += sign * weight(u, v) + } + + for _, v := range layer.From(u) { + vid := communityOf[v.ID()] + found := false + for _, e := range out { + if e == vid { + found = true + break + } + } + if !found && vid != id { + out = append(out, vid) + } + // Add half weights because the other + // ends of edges are also counted. + r.layers[l].weights[[2]int{id, vid}] += sign * weight(u, v) / 2 + } + + v := n + for _, u := range layer.To(v) { + uid := communityOf[u.ID()] + found := false + for _, e := range in { + if e == uid { + found = true + break + } + } + if !found && uid != id { + in = append(in, uid) + } + // Add half weights because the other + // ends of edges are also counted. + r.layers[l].weights[[2]int{uid, id}] += sign * weight(u, v) / 2 + } + + } + r.layers[l].edgesFrom[id] = out + r.layers[l].edgesTo[id] = in + } + } + return &r +} + +// directedLayerHandle is a handle to a multiplex graph layer. +type directedLayerHandle struct { + // multiplex is the complete + // multiplex graph. + multiplex *ReducedDirectedMultiplex + + // layer is an index into the + // multiplex for the current + // layer. + layer int +} + +// Has returns whether the node exists within the graph. +func (g directedLayerHandle) Has(n graph.Node) bool { + id := n.ID() + return id >= 0 || id < len(g.multiplex.nodes) +} + +// Nodes returns all the nodes in the graph. +func (g directedLayerHandle) Nodes() []graph.Node { + nodes := make([]graph.Node, len(g.multiplex.nodes)) + for i := range g.multiplex.nodes { + nodes[i] = node(i) + } + return nodes +} + +// From returns all nodes in g that can be reached directly from u. +func (g directedLayerHandle) From(u graph.Node) []graph.Node { + out := g.multiplex.layers[g.layer].edgesFrom[u.ID()] + nodes := make([]graph.Node, len(out)) + for i, vid := range out { + nodes[i] = g.multiplex.nodes[vid] + } + return nodes +} + +// To returns all nodes in g that can reach directly to v. +func (g directedLayerHandle) To(v graph.Node) []graph.Node { + in := g.multiplex.layers[g.layer].edgesTo[v.ID()] + nodes := make([]graph.Node, len(in)) + for i, uid := range in { + nodes[i] = g.multiplex.nodes[uid] + } + return nodes +} + +// HasEdgeBetween returns whether an edge exists between nodes x and y. +func (g directedLayerHandle) HasEdgeBetween(x, y graph.Node) bool { + xid := x.ID() + yid := y.ID() + if xid == yid { + return false + } + _, ok := g.multiplex.layers[g.layer].weights[[2]int{xid, yid}] + if ok { + return true + } + _, ok = g.multiplex.layers[g.layer].weights[[2]int{yid, xid}] + return ok +} + +// HasEdgeFromTo returns whether an edge exists from node u to v. +func (g directedLayerHandle) HasEdgeFromTo(u, v graph.Node) bool { + uid := u.ID() + vid := v.ID() + if uid == vid { + return false + } + _, ok := g.multiplex.layers[g.layer].weights[[2]int{uid, vid}] + return ok +} + +// Edge returns the edge from u to v if such an edge exists and nil otherwise. +// The node v must be directly reachable from u as defined by the From method. +func (g directedLayerHandle) Edge(u, v graph.Node) graph.Edge { + uid := u.ID() + vid := v.ID() + w, ok := g.multiplex.layers[g.layer].weights[[2]int{uid, vid}] + if !ok { + return nil + } + return multiplexEdge{from: g.multiplex.nodes[u.ID()], to: g.multiplex.nodes[v.ID()], weight: w} +} + +// EdgeBetween returns the edge between nodes x and y. +func (g directedLayerHandle) EdgeBetween(x, y graph.Node) graph.Edge { + return g.Edge(x, y) +} + +// Weight returns the weight for the edge between x and y if Edge(x, y) returns a non-nil Edge. +// If x and y are the same node the internal node weight is returned. If there is no joining +// edge between the two nodes the weight value returned is zero. Weight returns true if an edge +// exists between x and y or if x and y have the same ID, false otherwise. +func (g directedLayerHandle) Weight(x, y graph.Node) (w float64, ok bool) { + xid := x.ID() + yid := y.ID() + if xid == yid { + return g.multiplex.nodes[xid].weights[g.layer], true + } + w, ok = g.multiplex.layers[g.layer].weights[[2]int{xid, yid}] + return w, ok +} + +// directedMultiplexLocalMover is a step in graph modularity optimization. +type directedMultiplexLocalMover struct { + g *ReducedDirectedMultiplex + + // nodes is the set of working nodes. + nodes []graph.Node + // edgeWeightsOf is the weighted degree + // of each node indexed by ID. + edgeWeightsOf [][]directedWeights + + // m is the total sum of + // edge weights in g. + m []float64 + + // weight is the weight function + // provided by g or a function + // that returns the Weight value + // of the non-nil edge between x + // and y. + weight []func(x, y graph.Node) float64 + + // communities is the current + // division of g. + communities [][]graph.Node + // memberships is a mapping between + // node ID and community membership. + memberships []int + + // resolution is the Reichardt and + // Bornholdt γ parameter as defined + // in doi:10.1103/PhysRevE.74.016110. + resolutions []float64 + + // weights is the layer weights for + // the modularisation. + weights []float64 + + // searchAll specifies whether the local + // mover should consider non-connected + // communities during the local moving + // heuristic. + searchAll bool + + // moved indicates that a call to + // move has been made since the last + // call to shuffle. + moved bool + + // changed indicates that a move + // has been made since the creation + // of the local mover. + changed bool +} + +// newDirectedMultiplexLocalMover returns a new directedMultiplexLocalMover initialized with +// the graph g, a set of communities and a modularity resolution parameter. The +// node IDs of g must be contiguous in [0,n) where n is the number of nodes. +// If g has a zero edge weight sum, nil is returned. +func newDirectedMultiplexLocalMover(g *ReducedDirectedMultiplex, communities [][]graph.Node, weights, resolutions []float64, all bool) *directedMultiplexLocalMover { + nodes := g.Nodes() + l := directedMultiplexLocalMover{ + g: g, + nodes: nodes, + edgeWeightsOf: make([][]directedWeights, g.Depth()), + m: make([]float64, g.Depth()), + communities: communities, + memberships: make([]int, len(nodes)), + resolutions: resolutions, + weights: weights, + weight: make([]func(x, y graph.Node) float64, g.Depth()), + } + + // Calculate the total edge weight of the graph + // and degree weights for each node. + var zero int + for i := 0; i < g.Depth(); i++ { + l.edgeWeightsOf[i] = make([]directedWeights, len(nodes)) + var weight func(x, y graph.Node) float64 + + if weights != nil { + if weights[i] == 0 { + zero++ + continue + } + if weights[i] < 0 { + weight = negativeWeightFuncFor(g.Layer(i)) + l.searchAll = all + } else { + weight = positiveWeightFuncFor(g.Layer(i)) + } + } else { + weight = positiveWeightFuncFor(g.Layer(i)) + } + + l.weight[i] = weight + layer := g.Layer(i) + for _, n := range l.nodes { + u := n + var wOut float64 + for _, v := range layer.From(u) { + wOut += weight(u, v) + } + + v := n + var wIn float64 + for _, u := range layer.To(v) { + wIn += weight(u, v) + } + + w := weight(n, n) + l.edgeWeightsOf[i][u.ID()] = directedWeights{out: w + wOut, in: w + wIn} + l.m[i] += w + wOut + } + if l.m[i] == 0 { + zero++ + } + } + if zero == g.Depth() { + return nil + } + + // Assign membership mappings. + for i, c := range communities { + for _, n := range c { + l.memberships[n.ID()] = i + } + } + + return &l +} + +// localMovingHeuristic performs the Louvain local moving heuristic until +// no further moves can be made. It returns a boolean indicating that the +// directedMultiplexLocalMover has not made any improvement to the community +// structure and so the Louvain algorithm is done. +func (l *directedMultiplexLocalMover) localMovingHeuristic(rnd func(int) int) (done bool) { + for { + l.shuffle(rnd) + for _, n := range l.nodes { + dQ, dst, src := l.deltaQ(n) + if dQ <= 0 { + continue + } + l.move(dst, src) + } + if !l.moved { + return !l.changed + } + } +} + +// shuffle performs a Fisher-Yates shuffle on the nodes held by the +// directedMultiplexLocalMover using the random source rnd which should return +// an integer in the range [0,n). +func (l *directedMultiplexLocalMover) shuffle(rnd func(n int) int) { + l.moved = false + for i := range l.nodes[:len(l.nodes)-1] { + j := i + rnd(len(l.nodes)-i) + l.nodes[i], l.nodes[j] = l.nodes[j], l.nodes[i] + } +} + +// move moves the node at src to the community at dst. +func (l *directedMultiplexLocalMover) move(dst int, src commIdx) { + l.moved = true + l.changed = true + + srcComm := l.communities[src.community] + n := srcComm[src.node] + + l.memberships[n.ID()] = dst + + l.communities[dst] = append(l.communities[dst], n) + srcComm[src.node], srcComm[len(srcComm)-1] = srcComm[len(srcComm)-1], nil + l.communities[src.community] = srcComm[:len(srcComm)-1] +} + +// deltaQ returns the highest gain in modularity attainable by moving +// n from its current community to another connected community and +// the index of the chosen destination. The index into the +// directedMultiplexLocalMover's communities field is returned in src if n +// is in communities. +func (l *directedMultiplexLocalMover) deltaQ(n graph.Node) (deltaQ float64, dst int, src commIdx) { + id := n.ID() + + var iterator minTaker + if l.searchAll { + iterator = &dense{n: len(l.communities)} + } else { + // Find communities connected to n. + var connected intsets.Sparse + // The following for loop is equivalent to: + // + // for i := 0; i < l.g.Depth(); i++ { + // for _, v := range l.g.Layer(i).From(n) { + // connected.Insert(l.memberships[v.ID()]) + // } + // for _, v := range l.g.Layer(i).To(n) { + // connected.Insert(l.memberships[v.ID()]) + // } + // } + // + // This is done to avoid an allocation for + // each layer. + for _, layer := range l.g.layers { + for _, vid := range layer.edgesFrom[id] { + connected.Insert(l.memberships[vid]) + } + for _, vid := range layer.edgesTo[id] { + connected.Insert(l.memberships[vid]) + } + } + // Insert the node's own community. + connected.Insert(l.memberships[id]) + iterator = &connected + } + + // Calculate the highest modularity gain + // from moving into another community and + // keep the index of that community. + var dQremove float64 + dQadd, dst, src := math.Inf(-1), -1, commIdx{-1, -1} + var i int + for iterator.TakeMin(&i) { + c := l.communities[i] + var removal bool + var _dQadd float64 + for layer := 0; layer < l.g.Depth(); layer++ { + m := l.m[layer] + if m == 0 { + // Do not consider layers with zero sum edge weight. + continue + } + w := 1.0 + if l.weights != nil { + w = l.weights[layer] + } + if w == 0 { + // Do not consider layers with zero weighting. + continue + } + + var k_aC, sigma_totC directedWeights // C is a substitution for ^𝛼 or ^𝛽. + removal = false + for j, u := range c { + uid := u.ID() + if uid == id { + // Only mark and check src community on the first layer. + if layer == 0 { + if src.community != -1 { + panic("community: multiple sources") + } + src = commIdx{i, j} + } + removal = true + } + + k_aC.in += l.weight[layer](n, u) + k_aC.out += l.weight[layer](u, n) + // sigma_totC could be kept for each community + // and updated for moves, changing the calculation + // of sigma_totC here from O(n_c) to O(1), but + // in practice the time savings do not appear + // to be compelling and do not make up for the + // increase in code complexity and space required. + w := l.edgeWeightsOf[layer][uid] + sigma_totC.in += w.in + sigma_totC.out += w.out + } + + a_aa := l.weight[layer](n, n) + k_a := l.edgeWeightsOf[layer][id] + gamma := 1.0 + if l.resolutions != nil { + if len(l.resolutions) == 1 { + gamma = l.resolutions[0] + } else { + gamma = l.resolutions[layer] + } + } + + // See louvain.tex for a derivation of these equations. + // The weighting term, w, is described in V Traag, + // "Algorithms and dynamical models for communities and + // reputation in social networks", chapter 5. + // http://www.traag.net/wp/wp-content/papercite-data/pdf/traag_algorithms_2013.pdf + switch { + case removal: + // The community c was the current community, + // so calculate the change due to removal. + dQremove += w * ((k_aC.in /*^𝛼*/ - a_aa) + (k_aC.out /*^𝛼*/ - a_aa) - + gamma*(k_a.in*(sigma_totC.out /*^𝛼*/ -k_a.out)+k_a.out*(sigma_totC.in /*^𝛼*/ -k_a.in))/m) + + default: + // Otherwise calculate the change due to an addition + // to c. + _dQadd += w * (k_aC.in /*^𝛽*/ + k_aC.out /*^𝛽*/ - + gamma*(k_a.in*sigma_totC.out /*^𝛽*/ +k_a.out*sigma_totC.in /*^𝛽*/)/m) + } + } + if !removal && _dQadd > dQadd { + dQadd = _dQadd + dst = i + } + } + + return dQadd - dQremove, dst, src +} diff --git a/graph/community/louvain_directed_multiplex_test.go b/graph/community/louvain_directed_multiplex_test.go new file mode 100644 index 00000000..1e4e4fe5 --- /dev/null +++ b/graph/community/louvain_directed_multiplex_test.go @@ -0,0 +1,700 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package community + +import ( + "math" + "math/rand" + "reflect" + "sort" + "testing" + + "github.com/gonum/floats" + "github.com/gonum/graph" + "github.com/gonum/graph/internal/ordered" + "github.com/gonum/graph/simple" +) + +var communityDirectedMultiplexQTests = []struct { + name string + layers []layer + structures []structure + + wantLevels []level +}{ + { + name: "unconnected", + layers: []layer{{g: unconnected, weight: 1}}, + structures: []structure{ + { + resolution: 1, + memberships: []set{ + 0: linksTo(0), + 1: linksTo(1), + 2: linksTo(2), + 3: linksTo(3), + 4: linksTo(4), + 5: linksTo(5), + }, + want: math.NaN(), + }, + }, + wantLevels: []level{ + { + q: math.Inf(-1), // Here math.Inf(-1) is used as a place holder for NaN to allow use of reflect.DeepEqual. + communities: [][]graph.Node{ + {simple.Node(0)}, + {simple.Node(1)}, + {simple.Node(2)}, + {simple.Node(3)}, + {simple.Node(4)}, + {simple.Node(5)}, + }, + }, + }, + }, + { + name: "simple_directed", + layers: []layer{{g: simpleDirected, weight: 1}}, + // community structure and modularity calculated by C++ implementation: louvain igraph. + // Note that louvain igraph returns Q as an unscaled value. + structures: []structure{ + { + resolution: 1, + memberships: []set{ + 0: linksTo(0, 1), + 1: linksTo(2, 3, 4), + }, + want: 0.5714285714285716, + tol: 1e-10, + }, + }, + wantLevels: []level{ + { + communities: [][]graph.Node{ + {simple.Node(0), simple.Node(1)}, + {simple.Node(2), simple.Node(3), simple.Node(4)}, + }, + q: 0.5714285714285716, + }, + { + communities: [][]graph.Node{ + {simple.Node(0)}, + {simple.Node(1)}, + {simple.Node(2)}, + {simple.Node(3)}, + {simple.Node(4)}, + }, + q: -1.2857142857142856, + }, + }, + }, + { + name: "simple_directed_twice", + layers: []layer{ + {g: simpleDirected, weight: 0.5}, + {g: simpleDirected, weight: 0.5}, + }, + // community structure and modularity calculated by C++ implementation: louvain igraph. + // Note that louvain igraph returns Q as an unscaled value. + structures: []structure{ + { + resolution: 1, + memberships: []set{ + 0: linksTo(0, 1), + 1: linksTo(2, 3, 4), + }, + want: 0.5714285714285716, + tol: 1e-10, + }, + }, + wantLevels: []level{ + { + q: 0.5714285714285716, + communities: [][]graph.Node{ + {simple.Node(0), simple.Node(1)}, + {simple.Node(2), simple.Node(3), simple.Node(4)}, + }, + }, + { + q: -1.2857142857142856, + communities: [][]graph.Node{ + {simple.Node(0)}, + {simple.Node(1)}, + {simple.Node(2)}, + {simple.Node(3)}, + {simple.Node(4)}, + }, + }, + }, + }, + { + name: "small_dumbell", + layers: []layer{ + {g: smallDumbell, edgeWeight: 1, weight: 1}, + {g: dumbellRepulsion, edgeWeight: -1, weight: -1}, + }, + structures: []structure{ + { + resolution: 1, + memberships: []set{ + 0: linksTo(0, 1, 2), + 1: linksTo(3, 4, 5), + }, + want: 2.5714285714285716, tol: 1e-10, + }, + { + resolution: 1, + memberships: []set{ + 0: linksTo(0, 1, 2, 3, 4, 5), + }, + want: 0, tol: 1e-14, + }, + }, + wantLevels: []level{ + { + q: 2.5714285714285716, + communities: [][]graph.Node{ + {simple.Node(0), simple.Node(1), simple.Node(2)}, + {simple.Node(3), simple.Node(4), simple.Node(5)}, + }, + }, + { + q: -0.857142857142857, + communities: [][]graph.Node{ + {simple.Node(0)}, + {simple.Node(1)}, + {simple.Node(2)}, + {simple.Node(3)}, + {simple.Node(4)}, + {simple.Node(5)}, + }, + }, + }, + }, + { + name: "repulsion", + layers: []layer{{g: repulsion, edgeWeight: -1, weight: -1}}, + structures: []structure{ + { + resolution: 1, + memberships: []set{ + 0: linksTo(0, 1, 2), + 1: linksTo(3, 4, 5), + }, + want: 9.0, tol: 1e-10, + }, + { + resolution: 1, + memberships: []set{ + 0: linksTo(0), + 1: linksTo(1), + 2: linksTo(2), + 3: linksTo(3), + 4: linksTo(4), + 5: linksTo(5), + }, + want: 3, tol: 1e-14, + }, + }, + wantLevels: []level{ + { + q: 9.0, + communities: [][]graph.Node{ + {simple.Node(0), simple.Node(1), simple.Node(2)}, + {simple.Node(3), simple.Node(4), simple.Node(5)}, + }, + }, + { + q: 3.0, + communities: [][]graph.Node{ + {simple.Node(0)}, + {simple.Node(1)}, + {simple.Node(2)}, + {simple.Node(3)}, + {simple.Node(4)}, + {simple.Node(5)}, + }, + }, + }, + }, + { + name: "middle_east", + layers: []layer{ + {g: middleEast.friends, edgeWeight: 1, weight: 1}, + {g: middleEast.enemies, edgeWeight: -1, weight: -1}, + }, + structures: []structure{ + { + resolution: 1, + memberships: []set{ + 0: linksTo(0, 6), + 1: linksTo(1, 7, 9, 12), + 2: linksTo(2, 8, 11), + 3: linksTo(3, 4, 5, 10), + }, + want: 33.818057455540355, tol: 1e-9, + }, + { + resolution: 1, + memberships: []set{ + 0: linksTo(0, 2, 3, 4, 5, 10), + 1: linksTo(1, 7, 9, 12), + 2: linksTo(6), + 3: linksTo(8, 11), + }, + want: 30.92749658, tol: 1e-7, + }, + { + resolution: 1, + memberships: []set{ + 0: linksTo(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12), + }, + want: 0, tol: 1e-14, + }, + }, + wantLevels: []level{ + { + q: 33.818057455540355, + communities: [][]graph.Node{ + {simple.Node(0), simple.Node(6)}, + {simple.Node(1), simple.Node(7), simple.Node(9), simple.Node(12)}, + {simple.Node(2), simple.Node(8), simple.Node(11)}, + {simple.Node(3), simple.Node(4), simple.Node(5), simple.Node(10)}, + }, + }, + { + q: 3.8071135430916545, + communities: [][]graph.Node{ + {simple.Node(0)}, + {simple.Node(1)}, + {simple.Node(2)}, + {simple.Node(3)}, + {simple.Node(4)}, + {simple.Node(5)}, + {simple.Node(6)}, + {simple.Node(7)}, + {simple.Node(8)}, + {simple.Node(9)}, + {simple.Node(10)}, + {simple.Node(11)}, + {simple.Node(12)}, + }, + }, + }, + }, +} + +func TestCommunityQDirectedMultiplex(t *testing.T) { + for _, test := range communityDirectedMultiplexQTests { + g, weights, err := directedMultiplexFrom(test.layers) + if err != nil { + t.Errorf("unexpected error creating multiplex: %v", err) + continue + } + + for _, structure := range test.structures { + communities := make([][]graph.Node, len(structure.memberships)) + for i, c := range structure.memberships { + for n := range c { + communities[i] = append(communities[i], simple.Node(n)) + } + } + q := QMultiplex(g, communities, weights, []float64{structure.resolution}) + got := floats.Sum(q) + if !floats.EqualWithinAbsOrRel(got, structure.want, structure.tol, structure.tol) && !math.IsNaN(structure.want) { + for _, c := range communities { + sort.Sort(ordered.ByID(c)) + } + t.Errorf("unexpected Q value for %q %v: got: %v %.3v want: %v", + test.name, communities, got, q, structure.want) + } + } + } +} + +func TestCommunityDeltaQDirectedMultiplex(t *testing.T) { +tests: + for _, test := range communityDirectedMultiplexQTests { + g, weights, err := directedMultiplexFrom(test.layers) + if err != nil { + t.Errorf("unexpected error creating multiplex: %v", err) + continue + } + + rnd := rand.New(rand.NewSource(1)).Intn + for _, structure := range test.structures { + communityOf := make(map[int]int) + communities := make([][]graph.Node, len(structure.memberships)) + for i, c := range structure.memberships { + for n := range c { + communityOf[n] = i + communities[i] = append(communities[i], simple.Node(n)) + } + sort.Sort(ordered.ByID(communities[i])) + } + resolution := []float64{structure.resolution} + + before := QMultiplex(g, communities, weights, resolution) + + // We test exhaustively. + const all = true + + l := newDirectedMultiplexLocalMover( + reduceDirectedMultiplex(g, nil, weights), + communities, weights, resolution, all) + if l == nil { + if !math.IsNaN(floats.Sum(before)) { + t.Errorf("unexpected nil localMover with non-NaN Q graph: Q=%.4v", before) + } + continue tests + } + + // This is done to avoid run-to-run + // variation due to map iteration order. + sort.Sort(ordered.ByID(l.nodes)) + + l.shuffle(rnd) + + for _, target := range l.nodes { + got, gotDst, gotSrc := l.deltaQ(target) + + want, wantDst := math.Inf(-1), -1 + migrated := make([][]graph.Node, len(structure.memberships)) + for i, c := range structure.memberships { + for n := range c { + if n == target.ID() { + continue + } + migrated[i] = append(migrated[i], simple.Node(n)) + } + sort.Sort(ordered.ByID(migrated[i])) + } + + for i, c := range structure.memberships { + if i == communityOf[target.ID()] { + continue + } + if !(all && hasNegative(weights)) { + connected := false + search: + for l := 0; l < g.Depth(); l++ { + if weights[l] < 0 { + connected = true + break search + } + layer := g.Layer(l) + for n := range c { + if layer.HasEdgeBetween(simple.Node(n), target) { + connected = true + break search + } + } + } + if !connected { + continue + } + } + migrated[i] = append(migrated[i], target) + after := QMultiplex(g, migrated, weights, resolution) + migrated[i] = migrated[i][:len(migrated[i])-1] + if delta := floats.Sum(after) - floats.Sum(before); delta > want { + want = delta + wantDst = i + } + } + + if !floats.EqualWithinAbsOrRel(got, want, structure.tol, structure.tol) || gotDst != wantDst { + t.Errorf("unexpected result moving n=%d in c=%d of %s/%.4v: got: %.4v,%d want: %.4v,%d"+ + "\n\t%v\n\t%v", + target.ID(), communityOf[target.ID()], test.name, structure.resolution, got, gotDst, want, wantDst, + communities, migrated) + } + if gotSrc.community != communityOf[target.ID()] { + t.Errorf("unexpected source community index: got: %d want: %d", gotSrc, communityOf[target.ID()]) + } else if communities[gotSrc.community][gotSrc.node].ID() != target.ID() { + wantNodeIdx := -1 + for i, n := range communities[gotSrc.community] { + if n.ID() == target.ID() { + wantNodeIdx = i + break + } + } + t.Errorf("unexpected source node index: got: %d want: %d", gotSrc.node, wantNodeIdx) + } + } + } + } +} + +func TestReduceQConsistencyDirectedMultiplex(t *testing.T) { +tests: + for _, test := range communityDirectedMultiplexQTests { + g, weights, err := directedMultiplexFrom(test.layers) + if err != nil { + t.Errorf("unexpected error creating multiplex: %v", err) + continue + } + + for _, structure := range test.structures { + if math.IsNaN(structure.want) { + continue tests + } + + communities := make([][]graph.Node, len(structure.memberships)) + for i, c := range structure.memberships { + for n := range c { + communities[i] = append(communities[i], simple.Node(n)) + } + sort.Sort(ordered.ByID(communities[i])) + } + + gQ := QMultiplex(g, communities, weights, []float64{structure.resolution}) + gQnull := QMultiplex(g, nil, weights, nil) + + cg0 := reduceDirectedMultiplex(g, nil, weights) + cg0Qnull := QMultiplex(cg0, cg0.Structure(), weights, nil) + if !floats.EqualWithinAbsOrRel(floats.Sum(gQnull), floats.Sum(cg0Qnull), structure.tol, structure.tol) { + t.Errorf("disagreement between null Q from method: %v and function: %v", cg0Qnull, gQnull) + } + cg0Q := QMultiplex(cg0, communities, weights, []float64{structure.resolution}) + if !floats.EqualWithinAbsOrRel(floats.Sum(gQ), floats.Sum(cg0Q), structure.tol, structure.tol) { + t.Errorf("unexpected Q result after initial reduction: got: %v want :%v", cg0Q, gQ) + } + + cg1 := reduceDirectedMultiplex(cg0, communities, weights) + cg1Q := QMultiplex(cg1, cg1.Structure(), weights, []float64{structure.resolution}) + if !floats.EqualWithinAbsOrRel(floats.Sum(gQ), floats.Sum(cg1Q), structure.tol, structure.tol) { + t.Errorf("unexpected Q result after second reduction: got: %v want :%v", cg1Q, gQ) + } + } + } +} + +var localDirectedMultiplexMoveTests = []struct { + name string + layers []layer + structures []moveStructures +}{ + { + name: "blondel", + layers: []layer{{g: blondel, weight: 1}, {g: blondel, weight: 0.5}}, + structures: []moveStructures{ + { + memberships: []set{ + 0: linksTo(0, 1, 2, 4, 5), + 1: linksTo(3, 6, 7), + 2: linksTo(8, 9, 10, 12, 14, 15), + 3: linksTo(11, 13), + }, + targetNodes: []graph.Node{simple.Node(0)}, + resolution: 1, + tol: 1e-14, + }, + { + memberships: []set{ + 0: linksTo(0, 1, 2, 4, 5), + 1: linksTo(3, 6, 7), + 2: linksTo(8, 9, 10, 12, 14, 15), + 3: linksTo(11, 13), + }, + targetNodes: []graph.Node{simple.Node(3)}, + resolution: 1, + tol: 1e-14, + }, + { + memberships: []set{ + 0: linksTo(0, 1, 2, 4, 5), + 1: linksTo(3, 6, 7), + 2: linksTo(8, 9, 10, 12, 14, 15), + 3: linksTo(11, 13), + }, + // Case to demonstrate when A_aa != k_a^𝛼. + targetNodes: []graph.Node{simple.Node(3), simple.Node(2)}, + resolution: 1, + tol: 1e-14, + }, + }, + }, +} + +func TestMoveLocalDirectedMultiplex(t *testing.T) { + for _, test := range localDirectedMultiplexMoveTests { + g, weights, err := directedMultiplexFrom(test.layers) + if err != nil { + t.Errorf("unexpected error creating multiplex: %v", err) + continue + } + + for _, structure := range test.structures { + communities := make([][]graph.Node, len(structure.memberships)) + for i, c := range structure.memberships { + for n := range c { + communities[i] = append(communities[i], simple.Node(n)) + } + sort.Sort(ordered.ByID(communities[i])) + } + + r := reduceDirectedMultiplex(reduceDirectedMultiplex(g, nil, weights), communities, weights) + + l := newDirectedMultiplexLocalMover(r, r.communities, weights, []float64{structure.resolution}, true) + for _, n := range structure.targetNodes { + dQ, dst, src := l.deltaQ(n) + if dQ > 0 { + before := floats.Sum(QMultiplex(r, l.communities, weights, []float64{structure.resolution})) + l.move(dst, src) + after := floats.Sum(QMultiplex(r, l.communities, weights, []float64{structure.resolution})) + want := after - before + if !floats.EqualWithinAbsOrRel(dQ, want, structure.tol, structure.tol) { + t.Errorf("unexpected deltaQ: got: %v want: %v", dQ, want) + } + } + } + } + } +} + +func TestLouvainDirectedMultiplex(t *testing.T) { + const louvainIterations = 20 + + for _, test := range communityDirectedMultiplexQTests { + g, weights, err := directedMultiplexFrom(test.layers) + if err != nil { + t.Errorf("unexpected error creating multiplex: %v", err) + continue + } + + if test.structures[0].resolution != 1 { + panic("bad test: expect resolution=1") + } + want := make([][]graph.Node, len(test.structures[0].memberships)) + for i, c := range test.structures[0].memberships { + for n := range c { + want[i] = append(want[i], simple.Node(n)) + } + sort.Sort(ordered.ByID(want[i])) + } + sort.Sort(ordered.BySliceIDs(want)) + + var ( + got *ReducedDirectedMultiplex + bestQ = math.Inf(-1) + ) + // Modularize is randomised so we do this to + // ensure the level tests are consistent. + src := rand.New(rand.NewSource(1)) + for i := 0; i < louvainIterations; i++ { + r := ModularizeMultiplex(g, weights, nil, true, src).(*ReducedDirectedMultiplex) + if q := floats.Sum(QMultiplex(r, nil, weights, nil)); q > bestQ || math.IsNaN(q) { + bestQ = q + got = r + + if math.IsNaN(q) { + // Don't try again for non-connected case. + break + } + } + + var qs []float64 + for p := r; p != nil; p = p.Expanded().(*ReducedDirectedMultiplex) { + qs = append(qs, floats.Sum(QMultiplex(p, nil, weights, nil))) + } + + // Recovery of Q values is reversed. + if reverse(qs); !sort.Float64sAreSorted(qs) { + t.Errorf("Q values not monotonically increasing: %.5v", qs) + } + } + + gotCommunities := got.Communities() + for _, c := range gotCommunities { + sort.Sort(ordered.ByID(c)) + } + sort.Sort(ordered.BySliceIDs(gotCommunities)) + if !reflect.DeepEqual(gotCommunities, want) { + t.Errorf("unexpected community membership for %s Q=%.4v:\n\tgot: %v\n\twant:%v", + test.name, bestQ, gotCommunities, want) + continue + } + + var levels []level + for p := got; p != nil; p = p.Expanded().(*ReducedDirectedMultiplex) { + var communities [][]graph.Node + if p.parent != nil { + communities = p.parent.Communities() + for _, c := range communities { + sort.Sort(ordered.ByID(c)) + } + sort.Sort(ordered.BySliceIDs(communities)) + } else { + communities = reduceDirectedMultiplex(g, nil, weights).Communities() + } + q := floats.Sum(QMultiplex(p, nil, weights, nil)) + if math.IsNaN(q) { + // Use an equalable flag value in place of NaN. + q = math.Inf(-1) + } + levels = append(levels, level{q: q, communities: communities}) + } + if !reflect.DeepEqual(levels, test.wantLevels) { + t.Errorf("unexpected level structure:\n\tgot: %v\n\twant:%v", levels, test.wantLevels) + } + } +} + +func TestNonContiguousDirectedMultiplex(t *testing.T) { + g := simple.NewDirectedGraph(0, 0) + for _, e := range []simple.Edge{ + {F: simple.Node(0), T: simple.Node(1), W: 1}, + {F: simple.Node(4), T: simple.Node(5), W: 1}, + } { + g.SetEdge(e) + } + + func() { + defer func() { + r := recover() + if r != nil { + t.Error("unexpected panic with non-contiguous ID range") + } + }() + ModularizeMultiplex(DirectedLayers{g}, nil, nil, true, nil) + }() +} + +func BenchmarkLouvainDirectedMultiplex(b *testing.B) { + src := rand.New(rand.NewSource(1)) + for i := 0; i < b.N; i++ { + ModularizeMultiplex(DirectedLayers{dupGraphDirected}, nil, nil, true, src) + } +} + +func directedMultiplexFrom(raw []layer) (DirectedLayers, []float64, error) { + var layers []graph.Directed + var weights []float64 + for _, l := range raw { + g := simple.NewDirectedGraph(0, 0) + for u, e := range l.g { + // Add nodes that are not defined by an edge. + if !g.Has(simple.Node(u)) { + g.AddNode(simple.Node(u)) + } + for v := range e { + w := 1.0 + if l.edgeWeight != 0 { + w = l.edgeWeight + } + g.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v), W: w}) + } + } + layers = append(layers, g) + weights = append(weights, l.weight) + } + g, err := NewDirectedLayers(layers...) + if err != nil { + return nil, nil, err + } + return g, weights, nil +} diff --git a/graph/community/louvain_directed_test.go b/graph/community/louvain_directed_test.go new file mode 100644 index 00000000..0b54618f --- /dev/null +++ b/graph/community/louvain_directed_test.go @@ -0,0 +1,589 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package community + +import ( + "math" + "math/rand" + "reflect" + "sort" + "testing" + + "github.com/gonum/floats" + "github.com/gonum/graph" + "github.com/gonum/graph/internal/ordered" + "github.com/gonum/graph/simple" +) + +var communityDirectedQTests = []struct { + name string + g []set + structures []structure + + wantLevels []level +}{ + { + name: "simple_directed", + g: simpleDirected, + // community structure and modularity calculated by C++ implementation: louvain igraph. + // Note that louvain igraph returns Q as an unscaled value. + structures: []structure{ + { + resolution: 1, + memberships: []set{ + 0: linksTo(0, 1), + 1: linksTo(2, 3, 4), + }, + want: 0.5714285714285716 / 7, + tol: 1e-10, + }, + }, + wantLevels: []level{ + { + communities: [][]graph.Node{ + {simple.Node(0), simple.Node(1)}, + {simple.Node(2), simple.Node(3), simple.Node(4)}, + }, + q: 0.5714285714285716 / 7, + }, + { + communities: [][]graph.Node{ + {simple.Node(0)}, + {simple.Node(1)}, + {simple.Node(2)}, + {simple.Node(3)}, + {simple.Node(4)}, + }, + q: -1.2857142857142856 / 7, + }, + }, + }, + { + name: "zachary", + g: zachary, + // community structure and modularity calculated by C++ implementation: louvain igraph. + // Note that louvain igraph returns Q as an unscaled value. + structures: []structure{ + { + resolution: 1, + memberships: []set{ + 0: linksTo(0, 1, 2, 3, 7, 11, 12, 13, 17, 19, 21), + 1: linksTo(4, 5, 6, 10, 16), + 2: linksTo(8, 9, 14, 15, 18, 20, 22, 26, 29, 30, 32, 33), + 3: linksTo(23, 24, 25, 27, 28, 31), + }, + want: 34.3417721519 / 79 /* 5->6 and 6->5 because of co-equal rank */, tol: 1e-4, + }, + }, + wantLevels: []level{ + { + q: 0.43470597660631316, + communities: [][]graph.Node{ + {simple.Node(0), simple.Node(1), simple.Node(2), simple.Node(3), simple.Node(7), simple.Node(11), simple.Node(12), simple.Node(13), simple.Node(17), simple.Node(19), simple.Node(21)}, + {simple.Node(4), simple.Node(5), simple.Node(6), simple.Node(10), simple.Node(16)}, + {simple.Node(8), simple.Node(9), simple.Node(14), simple.Node(15), simple.Node(18), simple.Node(20), simple.Node(22), simple.Node(26), simple.Node(29), simple.Node(30), simple.Node(32), simple.Node(33)}, + {simple.Node(23), simple.Node(24), simple.Node(25), simple.Node(27), simple.Node(28), simple.Node(31)}, + }, + }, + { + q: 0.3911232174331037, + communities: [][]graph.Node{ + {simple.Node(0), simple.Node(1), simple.Node(2), simple.Node(3), simple.Node(7), simple.Node(11), simple.Node(12), simple.Node(13), simple.Node(17), simple.Node(19), simple.Node(21)}, + {simple.Node(4), simple.Node(10)}, + {simple.Node(5), simple.Node(6), simple.Node(16)}, + {simple.Node(8), simple.Node(30)}, + {simple.Node(9), simple.Node(14), simple.Node(15), simple.Node(18), simple.Node(20), simple.Node(22), simple.Node(32), simple.Node(33)}, + {simple.Node(23), simple.Node(24), simple.Node(25), simple.Node(27), simple.Node(28), simple.Node(31)}, + {simple.Node(26), simple.Node(29)}, + }, + }, + { + q: -0.014580996635154624, + communities: [][]graph.Node{ + {simple.Node(0)}, + {simple.Node(1)}, + {simple.Node(2)}, + {simple.Node(3)}, + {simple.Node(4)}, + {simple.Node(5)}, + {simple.Node(6)}, + {simple.Node(7)}, + {simple.Node(8)}, + {simple.Node(9)}, + {simple.Node(10)}, + {simple.Node(11)}, + {simple.Node(12)}, + {simple.Node(13)}, + {simple.Node(14)}, + {simple.Node(15)}, + {simple.Node(16)}, + {simple.Node(17)}, + {simple.Node(18)}, + {simple.Node(19)}, + {simple.Node(20)}, + {simple.Node(21)}, + {simple.Node(22)}, + {simple.Node(23)}, + {simple.Node(24)}, + {simple.Node(25)}, + {simple.Node(26)}, + {simple.Node(27)}, + {simple.Node(28)}, + {simple.Node(29)}, + {simple.Node(30)}, + {simple.Node(31)}, + {simple.Node(32)}, + {simple.Node(33)}, + }, + }, + }, + }, + { + name: "blondel", + g: blondel, + // community structure and modularity calculated by C++ implementation: louvain igraph. + // Note that louvain igraph returns Q as an unscaled value. + structures: []structure{ + { + resolution: 1, + memberships: []set{ + 0: linksTo(0, 1, 2, 3, 4, 5, 6, 7), + 1: linksTo(8, 9, 10, 11, 12, 13, 14, 15), + }, + want: 11.1428571429 / 28, tol: 1e-4, + }, + }, + wantLevels: []level{ + { + q: 0.3979591836734694, + communities: [][]graph.Node{ + {simple.Node(0), simple.Node(1), simple.Node(2), simple.Node(3), simple.Node(4), simple.Node(5), simple.Node(6), simple.Node(7)}, + {simple.Node(8), simple.Node(9), simple.Node(10), simple.Node(11), simple.Node(12), simple.Node(13), simple.Node(14), simple.Node(15)}, + }, + }, + { + q: 0.32525510204081637, + communities: [][]graph.Node{ + {simple.Node(0), simple.Node(3), simple.Node(5), simple.Node(7)}, + {simple.Node(1), simple.Node(2), simple.Node(4), simple.Node(6)}, + {simple.Node(8), simple.Node(10), simple.Node(11), simple.Node(13), simple.Node(15)}, + {simple.Node(9), simple.Node(12), simple.Node(14)}, + }, + }, + { + q: -0.022959183673469385, + communities: [][]graph.Node{ + {simple.Node(0)}, + {simple.Node(1)}, + {simple.Node(2)}, + {simple.Node(3)}, + {simple.Node(4)}, + {simple.Node(5)}, + {simple.Node(6)}, + {simple.Node(7)}, + {simple.Node(8)}, + {simple.Node(9)}, + {simple.Node(10)}, + {simple.Node(11)}, + {simple.Node(12)}, + {simple.Node(13)}, + {simple.Node(14)}, + {simple.Node(15)}, + }, + }, + }, + }, +} + +func TestCommunityQDirected(t *testing.T) { + for _, test := range communityDirectedQTests { + g := simple.NewDirectedGraph(0, 0) + for u, e := range test.g { + // Add nodes that are not defined by an edge. + if !g.Has(simple.Node(u)) { + g.AddNode(simple.Node(u)) + } + for v := range e { + g.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v), W: 1}) + } + } + for _, structure := range test.structures { + communities := make([][]graph.Node, len(structure.memberships)) + for i, c := range structure.memberships { + for n := range c { + communities[i] = append(communities[i], simple.Node(n)) + } + } + got := Q(g, communities, structure.resolution) + if !floats.EqualWithinAbsOrRel(got, structure.want, structure.tol, structure.tol) && !math.IsNaN(structure.want) { + for _, c := range communities { + sort.Sort(ordered.ByID(c)) + } + t.Errorf("unexpected Q value for %q %v: got: %v want: %v", + test.name, communities, got, structure.want) + } + } + } +} + +func TestCommunityDeltaQDirected(t *testing.T) { +tests: + for _, test := range communityDirectedQTests { + g := simple.NewDirectedGraph(0, 0) + for u, e := range test.g { + // Add nodes that are not defined by an edge. + if !g.Has(simple.Node(u)) { + g.AddNode(simple.Node(u)) + } + for v := range e { + g.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v), W: 1}) + } + } + + rnd := rand.New(rand.NewSource(1)).Intn + for _, structure := range test.structures { + communityOf := make(map[int]int) + communities := make([][]graph.Node, len(structure.memberships)) + for i, c := range structure.memberships { + for n := range c { + communityOf[n] = i + communities[i] = append(communities[i], simple.Node(n)) + } + sort.Sort(ordered.ByID(communities[i])) + } + + before := Q(g, communities, structure.resolution) + + l := newDirectedLocalMover(reduceDirected(g, nil), communities, structure.resolution) + if l == nil { + if !math.IsNaN(before) { + t.Errorf("unexpected nil localMover with non-NaN Q graph: Q=%.4v", before) + } + continue tests + } + + // This is done to avoid run-to-run + // variation due to map iteration order. + sort.Sort(ordered.ByID(l.nodes)) + + l.shuffle(rnd) + + for _, target := range l.nodes { + got, gotDst, gotSrc := l.deltaQ(target) + + want, wantDst := math.Inf(-1), -1 + migrated := make([][]graph.Node, len(structure.memberships)) + for i, c := range structure.memberships { + for n := range c { + if n == target.ID() { + continue + } + migrated[i] = append(migrated[i], simple.Node(n)) + } + sort.Sort(ordered.ByID(migrated[i])) + } + + for i, c := range structure.memberships { + if i == communityOf[target.ID()] { + continue + } + connected := false + for n := range c { + if g.HasEdgeBetween(simple.Node(n), target) { + connected = true + break + } + } + if !connected { + continue + } + migrated[i] = append(migrated[i], target) + after := Q(g, migrated, structure.resolution) + migrated[i] = migrated[i][:len(migrated[i])-1] + if after-before > want { + want = after - before + wantDst = i + } + } + + if !floats.EqualWithinAbsOrRel(got, want, structure.tol, structure.tol) || gotDst != wantDst { + t.Errorf("unexpected result moving n=%d in c=%d of %s/%.4v: got: %.4v,%d want: %.4v,%d"+ + "\n\t%v\n\t%v", + target.ID(), communityOf[target.ID()], test.name, structure.resolution, got, gotDst, want, wantDst, + communities, migrated) + } + if gotSrc.community != communityOf[target.ID()] { + t.Errorf("unexpected source community index: got: %d want: %d", gotSrc, communityOf[target.ID()]) + } else if communities[gotSrc.community][gotSrc.node].ID() != target.ID() { + wantNodeIdx := -1 + for i, n := range communities[gotSrc.community] { + if n.ID() == target.ID() { + wantNodeIdx = i + break + } + } + t.Errorf("unexpected source node index: got: %d want: %d", gotSrc.node, wantNodeIdx) + } + } + } + } +} + +func TestReduceQConsistencyDirected(t *testing.T) { +tests: + for _, test := range communityDirectedQTests { + g := simple.NewDirectedGraph(0, 0) + for u, e := range test.g { + // Add nodes that are not defined by an edge. + if !g.Has(simple.Node(u)) { + g.AddNode(simple.Node(u)) + } + for v := range e { + g.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v), W: 1}) + } + } + + for _, structure := range test.structures { + if math.IsNaN(structure.want) { + continue tests + } + + communities := make([][]graph.Node, len(structure.memberships)) + for i, c := range structure.memberships { + for n := range c { + communities[i] = append(communities[i], simple.Node(n)) + } + sort.Sort(ordered.ByID(communities[i])) + } + + gQ := Q(g, communities, structure.resolution) + gQnull := Q(g, nil, 1) + + cg0 := reduceDirected(g, nil) + cg0Qnull := Q(cg0, cg0.Structure(), 1) + if !floats.EqualWithinAbsOrRel(gQnull, cg0Qnull, structure.tol, structure.tol) { + t.Errorf("disagreement between null Q from method: %v and function: %v", cg0Qnull, gQnull) + } + cg0Q := Q(cg0, communities, structure.resolution) + if !floats.EqualWithinAbsOrRel(gQ, cg0Q, structure.tol, structure.tol) { + t.Errorf("unexpected Q result after initial reduction: got: %v want :%v", cg0Q, gQ) + } + + cg1 := reduceDirected(cg0, communities) + cg1Q := Q(cg1, cg1.Structure(), structure.resolution) + if !floats.EqualWithinAbsOrRel(gQ, cg1Q, structure.tol, structure.tol) { + t.Errorf("unexpected Q result after second reduction: got: %v want :%v", cg1Q, gQ) + } + } + } +} + +var localDirectedMoveTests = []struct { + name string + g []set + structures []moveStructures +}{ + { + name: "blondel", + g: blondel, + structures: []moveStructures{ + { + memberships: []set{ + 0: linksTo(0, 1, 2, 4, 5), + 1: linksTo(3, 6, 7), + 2: linksTo(8, 9, 10, 12, 14, 15), + 3: linksTo(11, 13), + }, + targetNodes: []graph.Node{simple.Node(0)}, + resolution: 1, + tol: 1e-14, + }, + { + memberships: []set{ + 0: linksTo(0, 1, 2, 4, 5), + 1: linksTo(3, 6, 7), + 2: linksTo(8, 9, 10, 12, 14, 15), + 3: linksTo(11, 13), + }, + targetNodes: []graph.Node{simple.Node(3)}, + resolution: 1, + tol: 1e-14, + }, + { + memberships: []set{ + 0: linksTo(0, 1, 2, 4, 5), + 1: linksTo(3, 6, 7), + 2: linksTo(8, 9, 10, 12, 14, 15), + 3: linksTo(11, 13), + }, + // Case to demonstrate when A_aa != k_a^𝛼. + targetNodes: []graph.Node{simple.Node(3), simple.Node(2)}, + resolution: 1, + tol: 1e-14, + }, + }, + }, +} + +func TestMoveLocalDirected(t *testing.T) { + for _, test := range localDirectedMoveTests { + g := simple.NewDirectedGraph(0, 0) + for u, e := range test.g { + // Add nodes that are not defined by an edge. + if !g.Has(simple.Node(u)) { + g.AddNode(simple.Node(u)) + } + for v := range e { + g.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v), W: 1}) + } + } + + for _, structure := range test.structures { + communities := make([][]graph.Node, len(structure.memberships)) + for i, c := range structure.memberships { + for n := range c { + communities[i] = append(communities[i], simple.Node(n)) + } + sort.Sort(ordered.ByID(communities[i])) + } + + r := reduceDirected(reduceDirected(g, nil), communities) + + l := newDirectedLocalMover(r, r.communities, structure.resolution) + for _, n := range structure.targetNodes { + dQ, dst, src := l.deltaQ(n) + if dQ > 0 { + before := Q(r, l.communities, structure.resolution) + l.move(dst, src) + after := Q(r, l.communities, structure.resolution) + want := after - before + if !floats.EqualWithinAbsOrRel(dQ, want, structure.tol, structure.tol) { + t.Errorf("unexpected deltaQ: got: %v want: %v", dQ, want) + } + } + } + } + } +} + +func TestModularizeDirected(t *testing.T) { + const louvainIterations = 20 + + for _, test := range communityDirectedQTests { + g := simple.NewDirectedGraph(0, 0) + for u, e := range test.g { + // Add nodes that are not defined by an edge. + if !g.Has(simple.Node(u)) { + g.AddNode(simple.Node(u)) + } + for v := range e { + g.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v), W: 1}) + } + } + + if test.structures[0].resolution != 1 { + panic("bad test: expect resolution=1") + } + want := make([][]graph.Node, len(test.structures[0].memberships)) + for i, c := range test.structures[0].memberships { + for n := range c { + want[i] = append(want[i], simple.Node(n)) + } + sort.Sort(ordered.ByID(want[i])) + } + sort.Sort(ordered.BySliceIDs(want)) + + var ( + got *ReducedDirected + bestQ = math.Inf(-1) + ) + // Modularize is randomised so we do this to + // ensure the level tests are consistent. + src := rand.New(rand.NewSource(1)) + for i := 0; i < louvainIterations; i++ { + r := Modularize(g, 1, src).(*ReducedDirected) + if q := Q(r, nil, 1); q > bestQ || math.IsNaN(q) { + bestQ = q + got = r + + if math.IsNaN(q) { + // Don't try again for non-connected case. + break + } + } + + var qs []float64 + for p := r; p != nil; p = p.Expanded().(*ReducedDirected) { + qs = append(qs, Q(p, nil, 1)) + } + + // Recovery of Q values is reversed. + if reverse(qs); !sort.Float64sAreSorted(qs) { + t.Errorf("Q values not monotonically increasing: %.5v", qs) + } + } + + gotCommunities := got.Communities() + for _, c := range gotCommunities { + sort.Sort(ordered.ByID(c)) + } + sort.Sort(ordered.BySliceIDs(gotCommunities)) + if !reflect.DeepEqual(gotCommunities, want) { + t.Errorf("unexpected community membership for %s Q=%.4v:\n\tgot: %v\n\twant:%v", + test.name, bestQ, gotCommunities, want) + continue + } + + var levels []level + for p := got; p != nil; p = p.Expanded().(*ReducedDirected) { + var communities [][]graph.Node + if p.parent != nil { + communities = p.parent.Communities() + for _, c := range communities { + sort.Sort(ordered.ByID(c)) + } + sort.Sort(ordered.BySliceIDs(communities)) + } else { + communities = reduceDirected(g, nil).Communities() + } + q := Q(p, nil, 1) + if math.IsNaN(q) { + // Use an equalable flag value in place of NaN. + q = math.Inf(-1) + } + levels = append(levels, level{q: q, communities: communities}) + } + if !reflect.DeepEqual(levels, test.wantLevels) { + t.Errorf("unexpected level structure:\n\tgot: %v\n\twant:%v", levels, test.wantLevels) + } + } +} + +func TestNonContiguousDirected(t *testing.T) { + g := simple.NewDirectedGraph(0, 0) + for _, e := range []simple.Edge{ + {F: simple.Node(0), T: simple.Node(1), W: 1}, + {F: simple.Node(4), T: simple.Node(5), W: 1}, + } { + g.SetEdge(e) + } + + func() { + defer func() { + r := recover() + if r != nil { + t.Error("unexpected panic with non-contiguous ID range") + } + }() + Modularize(g, 1, nil) + }() +} + +func BenchmarkLouvainDirected(b *testing.B) { + src := rand.New(rand.NewSource(1)) + for i := 0; i < b.N; i++ { + Modularize(dupGraphDirected, 1, src) + } +} diff --git a/graph/community/louvain_test.go b/graph/community/louvain_test.go new file mode 100644 index 00000000..6abf3e91 --- /dev/null +++ b/graph/community/louvain_test.go @@ -0,0 +1,277 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package community + +import ( + "fmt" + "math/rand" + + "github.com/gonum/graph" + "github.com/gonum/graph/graphs/gen" + "github.com/gonum/graph/simple" +) + +// set is an integer set. +type set map[int]struct{} + +func linksTo(i ...int) set { + if len(i) == 0 { + return nil + } + s := make(set) + for _, v := range i { + s[v] = struct{}{} + } + return s +} + +type layer struct { + g []set + edgeWeight float64 // Zero edge weight is interpreted as 1.0. + weight float64 +} + +var ( + unconnected = []set{ /* Nodes 0-4 are implicit .*/ 5: nil} + + smallDumbell = []set{ + 0: linksTo(1, 2), + 1: linksTo(2), + 2: linksTo(3), + 3: linksTo(4, 5), + 4: linksTo(5), + 5: nil, + } + dumbellRepulsion = []set{ + 0: linksTo(4), + 1: linksTo(5), + 2: nil, + 3: nil, + 4: nil, + 5: nil, + } + + repulsion = []set{ + 0: linksTo(3, 4, 5), + 1: linksTo(3, 4, 5), + 2: linksTo(3, 4, 5), + 3: linksTo(0, 1, 2), + 4: linksTo(0, 1, 2), + 5: linksTo(0, 1, 2), + } + + simpleDirected = []set{ + 0: linksTo(1), + 1: linksTo(0, 4), + 2: linksTo(1), + 3: linksTo(0, 4), + 4: linksTo(2), + } + + // http://www.slate.com/blogs/the_world_/2014/07/17/the_middle_east_friendship_chart.html + middleEast = struct{ friends, complicated, enemies []set }{ + // green cells + friends: []set{ + 0: nil, + 1: linksTo(5, 7, 9, 12), + 2: linksTo(11), + 3: linksTo(4, 5, 10), + 4: linksTo(3, 5, 10), + 5: linksTo(1, 3, 4, 8, 10, 12), + 6: nil, + 7: linksTo(1, 12), + 8: linksTo(5, 9, 11), + 9: linksTo(1, 8, 12), + 10: linksTo(3, 4, 5), + 11: linksTo(2, 8), + 12: linksTo(1, 5, 7, 9), + }, + + // yellow cells + complicated: []set{ + 0: linksTo(2, 4), + 1: linksTo(4, 8), + 2: linksTo(0, 3, 4, 5, 8, 9), + 3: linksTo(2, 8, 11), + 4: linksTo(0, 1, 2, 8), + 5: linksTo(2), + 6: nil, + 7: linksTo(9, 11), + 8: linksTo(1, 2, 3, 4, 10, 12), + 9: linksTo(2, 7, 11), + 10: linksTo(8), + 11: linksTo(3, 7, 9, 12), + 12: linksTo(8, 11), + }, + + // red cells + enemies: []set{ + 0: linksTo(1, 3, 5, 6, 7, 8, 9, 10, 11, 12), + 1: linksTo(0, 2, 3, 6, 10, 11), + 2: linksTo(1, 6, 7, 10, 12), + 3: linksTo(0, 1, 6, 7, 9, 12), + 4: linksTo(6, 7, 9, 11, 12), + 5: linksTo(0, 6, 7, 9, 11), + 6: linksTo(0, 1, 2, 3, 4, 5, 7, 8, 9, 10, 11, 12), + 7: linksTo(0, 2, 3, 4, 5, 6, 8, 10), + 8: linksTo(0, 6, 7), + 9: linksTo(0, 3, 4, 5, 6, 10), + 10: linksTo(0, 1, 2, 6, 7, 9, 11, 12), + 11: linksTo(0, 1, 4, 5, 6, 10), + 12: linksTo(0, 2, 3, 4, 6, 10), + }, + } + + // W. W. Zachary, An information flow model for conflict and fission in small groups, + // Journal of Anthropological Research 33, 452-473 (1977). + // + // The edge list here is constructed such that all link descriptions + // head from a node with lower Page Rank to a node with higher Page + // Rank. This has no impact on undirected tests, but allows a sensible + // view for directed tests. + zachary = []set{ + 0: nil, // rank=0.097 + 1: linksTo(0, 2), // rank=0.05288 + 2: linksTo(0, 32), // rank=0.05708 + 3: linksTo(0, 1, 2), // rank=0.03586 + 4: linksTo(0, 6, 10), // rank=0.02198 + 5: linksTo(0, 6), // rank=0.02911 + 6: linksTo(0, 5), // rank=0.02911 + 7: linksTo(0, 1, 2, 3), // rank=0.02449 + 8: linksTo(0, 2, 32, 33), // rank=0.02977 + 9: linksTo(2, 33), // rank=0.01431 + 10: linksTo(0, 5), // rank=0.02198 + 11: linksTo(0), // rank=0.009565 + 12: linksTo(0, 3), // rank=0.01464 + 13: linksTo(0, 1, 2, 3, 33), // rank=0.02954 + 14: linksTo(32, 33), // rank=0.01454 + 15: linksTo(32, 33), // rank=0.01454 + 16: linksTo(5, 6), // rank=0.01678 + 17: linksTo(0, 1), // rank=0.01456 + 18: linksTo(32, 33), // rank=0.01454 + 19: linksTo(0, 1, 33), // rank=0.0196 + 20: linksTo(32, 33), // rank=0.01454 + 21: linksTo(0, 1), // rank=0.01456 + 22: linksTo(32, 33), // rank=0.01454 + 23: linksTo(32, 33), // rank=0.03152 + 24: linksTo(27, 31), // rank=0.02108 + 25: linksTo(23, 24, 31), // rank=0.02101 + 26: linksTo(29, 33), // rank=0.01504 + 27: linksTo(2, 23, 33), // rank=0.02564 + 28: linksTo(2, 31, 33), // rank=0.01957 + 29: linksTo(23, 32, 33), // rank=0.02629 + 30: linksTo(1, 8, 32, 33), // rank=0.02459 + 31: linksTo(0, 32, 33), // rank=0.03716 + 32: linksTo(33), // rank=0.07169 + 33: nil, // rank=0.1009 + } + + // doi:10.1088/1742-5468/2008/10/P10008 figure 1 + // + // The edge list here is constructed such that all link descriptions + // head from a node with lower Page Rank to a node with higher Page + // Rank. This has no impact on undirected tests, but allows a sensible + // view for directed tests. + blondel = []set{ + 0: linksTo(2), // rank=0.06858 + 1: linksTo(2, 4, 7), // rank=0.05264 + 2: nil, // rank=0.08249 + 3: linksTo(0, 7), // rank=0.03884 + 4: linksTo(0, 2, 10), // rank=0.06754 + 5: linksTo(0, 2, 7, 11), // rank=0.06738 + 6: linksTo(2, 7, 11), // rank=0.0528 + 7: nil, // rank=0.07008 + 8: linksTo(10), // rank=0.09226 + 9: linksTo(8), // rank=0.05821 + 10: nil, // rank=0.1035 + 11: linksTo(8, 10), // rank=0.08538 + 12: linksTo(9, 10), // rank=0.04052 + 13: linksTo(10, 11), // rank=0.03855 + 14: linksTo(8, 9, 10), // rank=0.05621 + 15: linksTo(8), // rank=0.02506 + } +) + +type structure struct { + resolution float64 + memberships []set + want, tol float64 +} + +type level struct { + q float64 + communities [][]graph.Node +} + +type moveStructures struct { + memberships []set + targetNodes []graph.Node + + resolution float64 + tol float64 +} + +func reverse(f []float64) { + for i, j := 0, len(f)-1; i < j; i, j = i+1, j-1 { + f[i], f[j] = f[j], f[i] + } +} + +func hasNegative(f []float64) bool { + for _, v := range f { + if v < 0 { + return true + } + } + return false +} + +var ( + dupGraph = simple.NewUndirectedGraph(0, 0) + dupGraphDirected = simple.NewDirectedGraph(0, 0) +) + +func init() { + err := gen.Duplication(dupGraph, 1000, 0.8, 0.1, 0.5, rand.New(rand.NewSource(1))) + if err != nil { + panic(err) + } + + // Construct a directed graph from dupGraph + // such that every edge dupGraph is replaced + // with an edge that flows from the low node + // ID to the high node ID. + for _, e := range dupGraph.Edges() { + if e.To().ID() < e.From().ID() { + se := e.(simple.Edge) + se.F, se.T = se.T, se.F + e = se + } + dupGraphDirected.SetEdge(e) + } +} + +// This init function checks the Middle East relationship data. +func init() { + world := make([]set, len(middleEast.friends)) + for i := range world { + world[i] = make(set) + } + for _, relationships := range [][]set{middleEast.friends, middleEast.complicated, middleEast.enemies} { + for i, rel := range relationships { + for inter := range rel { + if _, ok := world[i][inter]; ok { + panic(fmt.Sprintf("unexpected relationship: %v--%v", i, inter)) + } + world[i][inter] = struct{}{} + } + } + } + for i := range world { + if len(world[i]) != len(middleEast.friends)-1 { + panic(fmt.Sprintf("missing relationship in %v: %v", i, world[i])) + } + } +} diff --git a/graph/community/louvain_undirected.go b/graph/community/louvain_undirected.go new file mode 100644 index 00000000..526cadaf --- /dev/null +++ b/graph/community/louvain_undirected.go @@ -0,0 +1,568 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package community + +import ( + "math" + "math/rand" + "sort" + + "golang.org/x/tools/container/intsets" + + "github.com/gonum/graph" + "github.com/gonum/graph/internal/ordered" +) + +// qUndirected returns the modularity Q score of the graph g subdivided into the +// given communities at the given resolution. If communities is nil, the +// unclustered modularity score is returned. The resolution parameter +// is γ as defined in Reichardt and Bornholdt doi:10.1103/PhysRevE.74.016110. +// qUndirected will panic if g has any edge with negative edge weight. +// +// Q = 1/2m \sum_{ij} [ A_{ij} - (\gamma k_i k_j)/2m ] \delta(c_i,c_j) +// +// graph.Undirect may be used as a shim to allow calculation of Q for +// directed graphs. +func qUndirected(g graph.Undirected, communities [][]graph.Node, resolution float64) float64 { + nodes := g.Nodes() + weight := positiveWeightFuncFor(g) + + // Calculate the total edge weight of the graph + // and the table of penetrating edge weight sums. + var m2 float64 + k := make(map[int]float64, len(nodes)) + for _, u := range nodes { + w := weight(u, u) + for _, v := range g.From(u) { + w += weight(u, v) + } + m2 += w + k[u.ID()] = w + } + + if communities == nil { + var q float64 + for _, u := range nodes { + kU := k[u.ID()] + q += weight(u, u) - resolution*kU*kU/m2 + } + return q / m2 + } + + // Iterate over the communities, calculating + // the non-self edge weights for the upper + // triangle and adjust the diagonal. + var q float64 + for _, c := range communities { + for i, u := range c { + kU := k[u.ID()] + q += weight(u, u) - resolution*kU*kU/m2 + for _, v := range c[i+1:] { + q += 2 * (weight(u, v) - resolution*kU*k[v.ID()]/m2) + } + } + } + return q / m2 +} + +// louvainUndirected returns the hierarchical modularization of g at the given +// resolution using the Louvain algorithm. If src is nil, rand.Intn is used as +// the random generator. louvainUndirected will panic if g has any edge with negative edge +// weight. +// +// graph.Undirect may be used as a shim to allow modularization of directed graphs. +func louvainUndirected(g graph.Undirected, resolution float64, src *rand.Rand) *ReducedUndirected { + // See louvain.tex for a detailed description + // of the algorithm used here. + + c := reduceUndirected(g, nil) + rnd := rand.Intn + if src != nil { + rnd = src.Intn + } + for { + l := newUndirectedLocalMover(c, c.communities, resolution) + if l == nil { + return c + } + if done := l.localMovingHeuristic(rnd); done { + return c + } + c = reduceUndirected(c, l.communities) + } +} + +// ReducedUndirected is an undirected graph of communities derived from a +// parent graph by reduction. +type ReducedUndirected struct { + // nodes is the set of nodes held + // by the graph. In a ReducedUndirected + // the node ID is the index into + // nodes. + nodes []community + undirectedEdges + + // communities is the community + // structure of the graph. + communities [][]graph.Node + + parent *ReducedUndirected +} + +var ( + _ graph.Undirected = (*ReducedUndirected)(nil) + _ graph.Weighter = (*ReducedUndirected)(nil) + _ ReducedGraph = (*ReducedUndirected)(nil) +) + +// Communities returns the community memberships of the nodes in the +// graph used to generate the reduced graph. +func (g *ReducedUndirected) Communities() [][]graph.Node { + communities := make([][]graph.Node, len(g.communities)) + if g.parent == nil { + for i, members := range g.communities { + comm := make([]graph.Node, len(members)) + for j, n := range members { + nodes := g.nodes[n.ID()].nodes + if len(nodes) != 1 { + panic("community: unexpected number of nodes in base graph community") + } + comm[j] = nodes[0] + } + communities[i] = comm + } + return communities + } + sub := g.parent.Communities() + for i, members := range g.communities { + var comm []graph.Node + for _, n := range members { + comm = append(comm, sub[n.ID()]...) + } + communities[i] = comm + } + return communities +} + +// Structure returns the community structure of the current level of +// the module clustering. The first index of the returned value +// corresponds to the index of the nodes in the next higher level if +// it exists. The returned value should not be mutated. +func (g *ReducedUndirected) Structure() [][]graph.Node { + return g.communities +} + +// Expanded returns the next lower level of the module clustering or nil +// if at the lowest level. +func (g *ReducedUndirected) Expanded() ReducedGraph { + return g.parent +} + +// reduceUndirected returns a reduced graph constructed from g divided +// into the given communities. The communities value is mutated +// by the call to reduceUndirected. If communities is nil and g is a +// ReducedUndirected, it is returned unaltered. +func reduceUndirected(g graph.Undirected, communities [][]graph.Node) *ReducedUndirected { + if communities == nil { + if r, ok := g.(*ReducedUndirected); ok { + return r + } + + nodes := g.Nodes() + // TODO(kortschak) This sort is necessary really only + // for testing. In practice we would not be using the + // community provided by the user for a Q calculation. + // Probably we should use a function to map the + // communities in the test sets to the remapped order. + sort.Sort(ordered.ByID(nodes)) + communities = make([][]graph.Node, len(nodes)) + for i := range nodes { + communities[i] = []graph.Node{node(i)} + } + + weight := positiveWeightFuncFor(g) + r := ReducedUndirected{ + nodes: make([]community, len(nodes)), + undirectedEdges: undirectedEdges{ + edges: make([][]int, len(nodes)), + weights: make(map[[2]int]float64), + }, + communities: communities, + } + communityOf := make(map[int]int, len(nodes)) + for i, n := range nodes { + r.nodes[i] = community{id: i, nodes: []graph.Node{n}} + communityOf[n.ID()] = i + } + for _, u := range nodes { + var out []int + uid := communityOf[u.ID()] + for _, v := range g.From(u) { + vid := communityOf[v.ID()] + if vid != uid { + out = append(out, vid) + } + if uid < vid { + // Only store the weight once. + r.weights[[2]int{uid, vid}] = weight(u, v) + } + } + r.edges[uid] = out + } + return &r + } + + // Remove zero length communities destructively. + var commNodes int + for i := 0; i < len(communities); { + comm := communities[i] + if len(comm) == 0 { + communities[i] = communities[len(communities)-1] + communities[len(communities)-1] = nil + communities = communities[:len(communities)-1] + } else { + commNodes += len(comm) + i++ + } + } + + r := ReducedUndirected{ + nodes: make([]community, len(communities)), + undirectedEdges: undirectedEdges{ + edges: make([][]int, len(communities)), + weights: make(map[[2]int]float64), + }, + } + r.communities = make([][]graph.Node, len(communities)) + for i := range r.communities { + r.communities[i] = []graph.Node{node(i)} + } + if g, ok := g.(*ReducedUndirected); ok { + // Make sure we retain the truncated + // community structure. + g.communities = communities + r.parent = g + } + weight := positiveWeightFuncFor(g) + communityOf := make(map[int]int, commNodes) + for i, comm := range communities { + r.nodes[i] = community{id: i, nodes: comm} + for _, n := range comm { + communityOf[n.ID()] = i + } + } + for uid, comm := range communities { + var out []int + for i, u := range comm { + r.nodes[uid].weight += weight(u, u) + for _, v := range comm[i+1:] { + r.nodes[uid].weight += 2 * weight(u, v) + } + for _, v := range g.From(u) { + vid := communityOf[v.ID()] + found := false + for _, e := range out { + if e == vid { + found = true + break + } + } + if !found && vid != uid { + out = append(out, vid) + } + if uid < vid { + // Only store the weight once. + r.weights[[2]int{uid, vid}] += weight(u, v) + } + } + } + r.edges[uid] = out + } + return &r +} + +// Has returns whether the node exists within the graph. +func (g *ReducedUndirected) Has(n graph.Node) bool { + id := n.ID() + return id >= 0 || id < len(g.nodes) +} + +// Nodes returns all the nodes in the graph. +func (g *ReducedUndirected) Nodes() []graph.Node { + nodes := make([]graph.Node, len(g.nodes)) + for i := range g.nodes { + nodes[i] = node(i) + } + return nodes +} + +// From returns all nodes in g that can be reached directly from u. +func (g *ReducedUndirected) From(u graph.Node) []graph.Node { + out := g.edges[u.ID()] + nodes := make([]graph.Node, len(out)) + for i, vid := range out { + nodes[i] = g.nodes[vid] + } + return nodes +} + +// HasEdgeBetween returns whether an edge exists between nodes x and y. +func (g *ReducedUndirected) HasEdgeBetween(x, y graph.Node) bool { + xid := x.ID() + yid := y.ID() + if xid == yid { + return false + } + if xid > yid { + xid, yid = yid, xid + } + _, ok := g.weights[[2]int{xid, yid}] + return ok +} + +// Edge returns the edge from u to v if such an edge exists and nil otherwise. +// The node v must be directly reachable from u as defined by the From method. +func (g *ReducedUndirected) Edge(u, v graph.Node) graph.Edge { + uid := u.ID() + vid := v.ID() + if vid < uid { + uid, vid = vid, uid + } + w, ok := g.weights[[2]int{uid, vid}] + if !ok { + return nil + } + return edge{from: g.nodes[u.ID()], to: g.nodes[v.ID()], weight: w} +} + +// EdgeBetween returns the edge between nodes x and y. +func (g *ReducedUndirected) EdgeBetween(x, y graph.Node) graph.Edge { + return g.Edge(x, y) +} + +// Weight returns the weight for the edge between x and y if Edge(x, y) returns a non-nil Edge. +// If x and y are the same node the internal node weight is returned. If there is no joining +// edge between the two nodes the weight value returned is zero. Weight returns true if an edge +// exists between x and y or if x and y have the same ID, false otherwise. +func (g *ReducedUndirected) Weight(x, y graph.Node) (w float64, ok bool) { + xid := x.ID() + yid := y.ID() + if xid == yid { + return g.nodes[xid].weight, true + } + if xid > yid { + xid, yid = yid, xid + } + w, ok = g.weights[[2]int{xid, yid}] + return w, ok +} + +// undirectedLocalMover is a step in graph modularity optimization. +type undirectedLocalMover struct { + g *ReducedUndirected + + // nodes is the set of working nodes. + nodes []graph.Node + // edgeWeightOf is the weighted degree + // of each node indexed by ID. + edgeWeightOf []float64 + + // m2 is the total sum of + // edge weights in g. + m2 float64 + + // weight is the weight function + // provided by g or a function + // that returns the Weight value + // of the non-nil edge between x + // and y. + weight func(x, y graph.Node) float64 + + // communities is the current + // division of g. + communities [][]graph.Node + // memberships is a mapping between + // node ID and community membership. + memberships []int + + // resolution is the Reichardt and + // Bornholdt γ parameter as defined + // in doi:10.1103/PhysRevE.74.016110. + resolution float64 + + // moved indicates that a call to + // move has been made since the last + // call to shuffle. + moved bool + + // changed indicates that a move + // has been made since the creation + // of the local mover. + changed bool +} + +// newUndirectedLocalMover returns a new undirectedLocalMover initialized with +// the graph g, a set of communities and a modularity resolution parameter. The +// node IDs of g must be contiguous in [0,n) where n is the number of nodes. +// If g has a zero edge weight sum, nil is returned. +func newUndirectedLocalMover(g *ReducedUndirected, communities [][]graph.Node, resolution float64) *undirectedLocalMover { + nodes := g.Nodes() + l := undirectedLocalMover{ + g: g, + nodes: nodes, + edgeWeightOf: make([]float64, len(nodes)), + communities: communities, + memberships: make([]int, len(nodes)), + resolution: resolution, + weight: positiveWeightFuncFor(g), + } + + // Calculate the total edge weight of the graph + // and degree weights for each node. + for _, u := range l.nodes { + w := l.weight(u, u) + for _, v := range g.From(u) { + w += l.weight(u, v) + } + l.edgeWeightOf[u.ID()] = w + l.m2 += w + } + if l.m2 == 0 { + return nil + } + + // Assign membership mappings. + for i, c := range communities { + for _, u := range c { + l.memberships[u.ID()] = i + } + } + + return &l +} + +// localMovingHeuristic performs the Louvain local moving heuristic until +// no further moves can be made. It returns a boolean indicating that the +// undirectedLocalMover has not made any improvement to the community +// structure and so the Louvain algorithm is done. +func (l *undirectedLocalMover) localMovingHeuristic(rnd func(int) int) (done bool) { + for { + l.shuffle(rnd) + for _, n := range l.nodes { + dQ, dst, src := l.deltaQ(n) + if dQ <= 0 { + continue + } + l.move(dst, src) + } + if !l.moved { + return !l.changed + } + } +} + +// shuffle performs a Fisher-Yates shuffle on the nodes held by the +// undirectedLocalMover using the random source rnd which should return +// an integer in the range [0,n). +func (l *undirectedLocalMover) shuffle(rnd func(n int) int) { + l.moved = false + for i := range l.nodes[:len(l.nodes)-1] { + j := i + rnd(len(l.nodes)-i) + l.nodes[i], l.nodes[j] = l.nodes[j], l.nodes[i] + } +} + +// move moves the node at src to the community at dst. +func (l *undirectedLocalMover) move(dst int, src commIdx) { + l.moved = true + l.changed = true + + srcComm := l.communities[src.community] + n := srcComm[src.node] + + l.memberships[n.ID()] = dst + + l.communities[dst] = append(l.communities[dst], n) + srcComm[src.node], srcComm[len(srcComm)-1] = srcComm[len(srcComm)-1], nil + l.communities[src.community] = srcComm[:len(srcComm)-1] +} + +// deltaQ returns the highest gain in modularity attainable by moving +// n from its current community to another connected community and +// the index of the chosen destination. The index into the +// undirectedLocalMover's communities field is returned in src if n +// is in communities. +func (l *undirectedLocalMover) deltaQ(n graph.Node) (deltaQ float64, dst int, src commIdx) { + id := n.ID() + a_aa := l.weight(n, n) + k_a := l.edgeWeightOf[id] + m2 := l.m2 + gamma := l.resolution + + // Find communites connected to n. + var connected intsets.Sparse + // The following for loop is equivalent to: + // + // for _, v := range l.g.From(n) { + // connected.Insert(l.memberships[v.ID()]) + // } + // + // This is done to avoid an allocation. + for _, vid := range l.g.edges[id] { + connected.Insert(l.memberships[vid]) + } + // Insert the node's own community. + connected.Insert(l.memberships[id]) + + // Calculate the highest modularity gain + // from moving into another community and + // keep the index of that community. + var dQremove float64 + dQadd, dst, src := math.Inf(-1), -1, commIdx{-1, -1} + var i int + for connected.TakeMin(&i) { + c := l.communities[i] + var k_aC, sigma_totC float64 // C is a substitution for ^𝛼 or ^𝛽. + var removal bool + for j, u := range c { + uid := u.ID() + if uid == id { + if src.community != -1 { + panic("community: multiple sources") + } + src = commIdx{i, j} + removal = true + } + + k_aC += l.weight(n, u) + // sigma_totC could be kept for each community + // and updated for moves, changing the calculation + // of sigma_totC here from O(n_c) to O(1), but + // in practice the time savings do not appear + // to be compelling and do not make up for the + // increase in code complexity and space required. + sigma_totC += l.edgeWeightOf[uid] + } + + // See louvain.tex for a derivation of these equations. + switch { + case removal: + // The community c was the current community, + // so calculate the change due to removal. + dQremove = k_aC /*^𝛼*/ - a_aa - gamma*k_a*(sigma_totC /*^𝛼*/ -k_a)/m2 + + default: + // Otherwise calculate the change due to an addition + // to c and retain if it is the current best. + dQ := k_aC /*^𝛽*/ - gamma*k_a*sigma_totC /*^𝛽*/ /m2 + if dQ > dQadd { + dQadd = dQ + dst = i + } + } + } + + return 2 * (dQadd - dQremove) / m2, dst, src +} diff --git a/graph/community/louvain_undirected_multiplex.go b/graph/community/louvain_undirected_multiplex.go new file mode 100644 index 00000000..355635e5 --- /dev/null +++ b/graph/community/louvain_undirected_multiplex.go @@ -0,0 +1,811 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package community + +import ( + "fmt" + "math" + "math/rand" + "sort" + + "golang.org/x/tools/container/intsets" + + "github.com/gonum/graph" + "github.com/gonum/graph/internal/ordered" +) + +// UndirectedMultiplex is an undirected multiplex graph. +type UndirectedMultiplex interface { + Multiplex + + // Layer returns the lth layer of the + // multiplex graph. + Layer(l int) graph.Undirected +} + +// qUndirectedMultiplex returns the modularity Q score of the multiplex graph layers +// subdivided into the given communities at the given resolutions and weights. Q is +// returned as the vector of weighted Q scores for each layer of the multiplex graph. +// If communities is nil, the unclustered modularity score is returned. +// If weights is nil layers are equally weighted, otherwise the length of +// weights must equal the number of layers. If resolutions is nil, a resolution +// of 1.0 is used for all layers, otherwise either a single element slice may be used +// to specify a global resolution, or the length of resolutions must equal the number +// of layers. The resolution parameter is γ as defined in Reichardt and Bornholdt +// doi:10.1103/PhysRevE.74.016110. +// qUndirectedMultiplex will panic if the graph has any layer weight-scaled edge with +// negative edge weight. +// +// Q_{layer} = w_{layer} \sum_{ij} [ A_{layer}*_{ij} - (\gamma_{layer} k_i k_j)/2m ] \delta(c_i,c_j) +// +// Note that Q values for multiplex graphs are not scaled by the total layer edge weight. +// +// graph.Undirect may be used as a shim to allow calculation of Q for +// directed graphs. +func qUndirectedMultiplex(g UndirectedMultiplex, communities [][]graph.Node, weights, resolutions []float64) []float64 { + q := make([]float64, g.Depth()) + nodes := g.Nodes() + layerWeight := 1.0 + layerResolution := 1.0 + if len(resolutions) == 1 { + layerResolution = resolutions[0] + } + for l := 0; l < g.Depth(); l++ { + layer := g.Layer(l) + + if weights != nil { + layerWeight = weights[l] + } + if layerWeight == 0 { + continue + } + + if len(resolutions) > 1 { + layerResolution = resolutions[l] + } + + var weight func(x, y graph.Node) float64 + if layerWeight < 0 { + weight = negativeWeightFuncFor(layer) + } else { + weight = positiveWeightFuncFor(layer) + } + + // Calculate the total edge weight of the layer + // and the table of penetrating edge weight sums. + var m2 float64 + k := make(map[int]float64, len(nodes)) + for _, u := range nodes { + w := weight(u, u) + for _, v := range layer.From(u) { + w += weight(u, v) + } + m2 += w + k[u.ID()] = w + } + + if communities == nil { + var qLayer float64 + for _, u := range nodes { + kU := k[u.ID()] + qLayer += weight(u, u) - layerResolution*kU*kU/m2 + } + q[l] = layerWeight * qLayer + continue + } + + // Iterate over the communities, calculating + // the non-self edge weights for the upper + // triangle and adjust the diagonal. + var qLayer float64 + for _, c := range communities { + for i, u := range c { + kU := k[u.ID()] + qLayer += weight(u, u) - layerResolution*kU*kU/m2 + for _, v := range c[i+1:] { + qLayer += 2 * (weight(u, v) - layerResolution*kU*k[v.ID()]/m2) + } + } + } + q[l] = layerWeight * qLayer + } + + return q +} + +// UndirectedLayers implements UndirectedMultiplex. +type UndirectedLayers []graph.Undirected + +// NewUndirectedLayers returns an UndirectedLayers using the provided layers +// ensuring there is a match between IDs for each layer. +func NewUndirectedLayers(layers ...graph.Undirected) (UndirectedLayers, error) { + if len(layers) == 0 { + return nil, nil + } + var base, next intsets.Sparse + for _, n := range layers[0].Nodes() { + base.Insert(n.ID()) + } + for i, l := range layers[1:] { + next.Clear() + for _, n := range l.Nodes() { + next.Insert(n.ID()) + } + if !next.Equals(&base) { + return nil, fmt.Errorf("community: layer ID mismatch between layers: %d", i+1) + } + } + return layers, nil +} + +// Nodes returns the nodes of the receiver. +func (g UndirectedLayers) Nodes() []graph.Node { + if len(g) == 0 { + return nil + } + return g[0].Nodes() +} + +// Depth returns the depth of the multiplex graph. +func (g UndirectedLayers) Depth() int { return len(g) } + +// Layer returns the lth layer of the multiplex graph. +func (g UndirectedLayers) Layer(l int) graph.Undirected { return g[l] } + +// louvainUndirectedMultiplex returns the hierarchical modularization of g at the given resolution +// using the Louvain algorithm. If all is true and g has negatively weighted layers, all +// communities will be searched during the modularization. If src is nil, rand.Intn is +// used as the random generator. louvainUndirectedMultiplex will panic if g has any edge with +// edge weight that does not sign-match the layer weight. +// +// graph.Undirect may be used as a shim to allow modularization of directed graphs. +func louvainUndirectedMultiplex(g UndirectedMultiplex, weights, resolutions []float64, all bool, src *rand.Rand) *ReducedUndirectedMultiplex { + if weights != nil && len(weights) != g.Depth() { + panic("community: weights vector length mismatch") + } + if resolutions != nil && len(resolutions) != 1 && len(resolutions) != g.Depth() { + panic("community: resolutions vector length mismatch") + } + + // See louvain.tex for a detailed description + // of the algorithm used here. + + c := reduceUndirectedMultiplex(g, nil, weights) + rnd := rand.Intn + if src != nil { + rnd = src.Intn + } + for { + l := newUndirectedMultiplexLocalMover(c, c.communities, weights, resolutions, all) + if l == nil { + return c + } + if done := l.localMovingHeuristic(rnd); done { + return c + } + c = reduceUndirectedMultiplex(c, l.communities, weights) + } +} + +// ReducedUndirectedMultiplex is an undirected graph of communities derived from a +// parent graph by reduction. +type ReducedUndirectedMultiplex struct { + // nodes is the set of nodes held + // by the graph. In a ReducedUndirectedMultiplex + // the node ID is the index into + // nodes. + nodes []multiplexCommunity + layers []undirectedEdges + + // communities is the community + // structure of the graph. + communities [][]graph.Node + + parent *ReducedUndirectedMultiplex +} + +var ( + _ UndirectedMultiplex = (*ReducedUndirectedMultiplex)(nil) + _ graph.Undirected = (*undirectedLayerHandle)(nil) + _ graph.Weighter = (*undirectedLayerHandle)(nil) +) + +// Nodes returns all the nodes in the graph. +func (g *ReducedUndirectedMultiplex) Nodes() []graph.Node { + nodes := make([]graph.Node, len(g.nodes)) + for i := range g.nodes { + nodes[i] = node(i) + } + return nodes +} + +// Depth returns the number of layers in the multiplex graph. +func (g *ReducedUndirectedMultiplex) Depth() int { return len(g.layers) } + +// Layer returns the lth layer of the multiplex graph. +func (g *ReducedUndirectedMultiplex) Layer(l int) graph.Undirected { + return undirectedLayerHandle{multiplex: g, layer: l} +} + +// Communities returns the community memberships of the nodes in the +// graph used to generate the reduced graph. +func (g *ReducedUndirectedMultiplex) Communities() [][]graph.Node { + communities := make([][]graph.Node, len(g.communities)) + if g.parent == nil { + for i, members := range g.communities { + comm := make([]graph.Node, len(members)) + for j, n := range members { + nodes := g.nodes[n.ID()].nodes + if len(nodes) != 1 { + panic("community: unexpected number of nodes in base graph community") + } + comm[j] = nodes[0] + } + communities[i] = comm + } + return communities + } + sub := g.parent.Communities() + for i, members := range g.communities { + var comm []graph.Node + for _, n := range members { + comm = append(comm, sub[n.ID()]...) + } + communities[i] = comm + } + return communities +} + +// Structure returns the community structure of the current level of +// the module clustering. The first index of the returned value +// corresponds to the index of the nodes in the next higher level if +// it exists. The returned value should not be mutated. +func (g *ReducedUndirectedMultiplex) Structure() [][]graph.Node { + return g.communities +} + +// Expanded returns the next lower level of the module clustering or nil +// if at the lowest level. +func (g *ReducedUndirectedMultiplex) Expanded() ReducedMultiplex { + return g.parent +} + +// reduceUndirectedMultiplex returns a reduced graph constructed from g divided +// into the given communities. The communities value is mutated +// by the call to reduceUndirectedMultiplex. If communities is nil and g is a +// ReducedUndirectedMultiplex, it is returned unaltered. +func reduceUndirectedMultiplex(g UndirectedMultiplex, communities [][]graph.Node, weights []float64) *ReducedUndirectedMultiplex { + if communities == nil { + if r, ok := g.(*ReducedUndirectedMultiplex); ok { + return r + } + + nodes := g.Nodes() + // TODO(kortschak) This sort is necessary really only + // for testing. In practice we would not be using the + // community provided by the user for a Q calculation. + // Probably we should use a function to map the + // communities in the test sets to the remapped order. + sort.Sort(ordered.ByID(nodes)) + communities = make([][]graph.Node, len(nodes)) + for i := range nodes { + communities[i] = []graph.Node{node(i)} + } + + r := ReducedUndirectedMultiplex{ + nodes: make([]multiplexCommunity, len(nodes)), + layers: make([]undirectedEdges, g.Depth()), + communities: communities, + } + communityOf := make(map[int]int, len(nodes)) + for i, n := range nodes { + r.nodes[i] = multiplexCommunity{id: i, nodes: []graph.Node{n}, weights: make([]float64, depth(weights))} + communityOf[n.ID()] = i + } + for i := range r.layers { + r.layers[i] = undirectedEdges{ + edges: make([][]int, len(nodes)), + weights: make(map[[2]int]float64), + } + } + w := 1.0 + for l := 0; l < g.Depth(); l++ { + layer := g.Layer(l) + if weights != nil { + w = weights[l] + } + if w == 0 { + continue + } + var sign float64 + var weight func(x, y graph.Node) float64 + if w < 0 { + sign, weight = -1, negativeWeightFuncFor(layer) + } else { + sign, weight = 1, positiveWeightFuncFor(layer) + } + for _, u := range nodes { + var out []int + uid := communityOf[u.ID()] + for _, v := range layer.From(u) { + vid := communityOf[v.ID()] + if vid != uid { + out = append(out, vid) + } + if uid < vid { + // Only store the weight once. + r.layers[l].weights[[2]int{uid, vid}] = sign * weight(u, v) + } + } + r.layers[l].edges[uid] = out + } + } + return &r + } + + // Remove zero length communities destructively. + var commNodes int + for i := 0; i < len(communities); { + comm := communities[i] + if len(comm) == 0 { + communities[i] = communities[len(communities)-1] + communities[len(communities)-1] = nil + communities = communities[:len(communities)-1] + } else { + commNodes += len(comm) + i++ + } + } + + r := ReducedUndirectedMultiplex{ + nodes: make([]multiplexCommunity, len(communities)), + layers: make([]undirectedEdges, g.Depth()), + } + communityOf := make(map[int]int, commNodes) + for i, comm := range communities { + r.nodes[i] = multiplexCommunity{id: i, nodes: comm, weights: make([]float64, depth(weights))} + for _, n := range comm { + communityOf[n.ID()] = i + } + } + for i := range r.layers { + r.layers[i] = undirectedEdges{ + edges: make([][]int, len(communities)), + weights: make(map[[2]int]float64), + } + } + r.communities = make([][]graph.Node, len(communities)) + for i := range r.communities { + r.communities[i] = []graph.Node{node(i)} + } + if g, ok := g.(*ReducedUndirectedMultiplex); ok { + // Make sure we retain the truncated + // community structure. + g.communities = communities + r.parent = g + } + w := 1.0 + for l := 0; l < g.Depth(); l++ { + layer := g.Layer(l) + if weights != nil { + w = weights[l] + } + if w == 0 { + continue + } + var sign float64 + var weight func(x, y graph.Node) float64 + if w < 0 { + sign, weight = -1, negativeWeightFuncFor(layer) + } else { + sign, weight = 1, positiveWeightFuncFor(layer) + } + for uid, comm := range communities { + var out []int + for i, u := range comm { + r.nodes[uid].weights[l] += sign * weight(u, u) + for _, v := range comm[i+1:] { + r.nodes[uid].weights[l] += 2 * sign * weight(u, v) + } + for _, v := range layer.From(u) { + vid := communityOf[v.ID()] + found := false + for _, e := range out { + if e == vid { + found = true + break + } + } + if !found && vid != uid { + out = append(out, vid) + } + if uid < vid { + // Only store the weight once. + r.layers[l].weights[[2]int{uid, vid}] += sign * weight(u, v) + } + } + } + r.layers[l].edges[uid] = out + } + } + return &r +} + +// undirectedLayerHandle is a handle to a multiplex graph layer. +type undirectedLayerHandle struct { + // multiplex is the complete + // multiplex graph. + multiplex *ReducedUndirectedMultiplex + + // layer is an index into the + // multiplex for the current + // layer. + layer int +} + +// Has returns whether the node exists within the graph. +func (g undirectedLayerHandle) Has(n graph.Node) bool { + id := n.ID() + return id >= 0 || id < len(g.multiplex.nodes) +} + +// Nodes returns all the nodes in the graph. +func (g undirectedLayerHandle) Nodes() []graph.Node { + nodes := make([]graph.Node, len(g.multiplex.nodes)) + for i := range g.multiplex.nodes { + nodes[i] = node(i) + } + return nodes +} + +// From returns all nodes in g that can be reached directly from u. +func (g undirectedLayerHandle) From(u graph.Node) []graph.Node { + out := g.multiplex.layers[g.layer].edges[u.ID()] + nodes := make([]graph.Node, len(out)) + for i, vid := range out { + nodes[i] = g.multiplex.nodes[vid] + } + return nodes +} + +// HasEdgeBetween returns whether an edge exists between nodes x and y. +func (g undirectedLayerHandle) HasEdgeBetween(x, y graph.Node) bool { + xid := x.ID() + yid := y.ID() + if xid == yid { + return false + } + if xid > yid { + xid, yid = yid, xid + } + _, ok := g.multiplex.layers[g.layer].weights[[2]int{xid, yid}] + return ok +} + +// Edge returns the edge from u to v if such an edge exists and nil otherwise. +// The node v must be directly reachable from u as defined by the From method. +func (g undirectedLayerHandle) Edge(u, v graph.Node) graph.Edge { + uid := u.ID() + vid := v.ID() + if vid < uid { + uid, vid = vid, uid + } + w, ok := g.multiplex.layers[g.layer].weights[[2]int{uid, vid}] + if !ok { + return nil + } + return multiplexEdge{from: g.multiplex.nodes[u.ID()], to: g.multiplex.nodes[v.ID()], weight: w} +} + +// EdgeBetween returns the edge between nodes x and y. +func (g undirectedLayerHandle) EdgeBetween(x, y graph.Node) graph.Edge { + return g.Edge(x, y) +} + +// Weight returns the weight for the edge between x and y if Edge(x, y) returns a non-nil Edge. +// If x and y are the same node the internal node weight is returned. If there is no joining +// edge between the two nodes the weight value returned is zero. Weight returns true if an edge +// exists between x and y or if x and y have the same ID, false otherwise. +func (g undirectedLayerHandle) Weight(x, y graph.Node) (w float64, ok bool) { + xid := x.ID() + yid := y.ID() + if xid == yid { + return g.multiplex.nodes[xid].weights[g.layer], true + } + if xid > yid { + xid, yid = yid, xid + } + w, ok = g.multiplex.layers[g.layer].weights[[2]int{xid, yid}] + return w, ok +} + +// undirectedMultiplexLocalMover is a step in graph modularity optimization. +type undirectedMultiplexLocalMover struct { + g *ReducedUndirectedMultiplex + + // nodes is the set of working nodes. + nodes []graph.Node + // edgeWeightOf is the weighted degree + // of each node indexed by ID. + edgeWeightOf [][]float64 + + // m2 is the total sum of + // edge weights in g. + m2 []float64 + + // weight is the weight function + // provided by g or a function + // that returns the Weight value + // of the non-nil edge between x + // and y. + weight []func(x, y graph.Node) float64 + + // communities is the current + // division of g. + communities [][]graph.Node + // memberships is a mapping between + // node ID and community membership. + memberships []int + + // resolution is the Reichardt and + // Bornholdt γ parameter as defined + // in doi:10.1103/PhysRevE.74.016110. + resolutions []float64 + + // weights is the layer weights for + // the modularisation. + weights []float64 + + // searchAll specifies whether the local + // mover should consider non-connected + // communities during the local moving + // heuristic. + searchAll bool + + // moved indicates that a call to + // move has been made since the last + // call to shuffle. + moved bool + + // changed indicates that a move + // has been made since the creation + // of the local mover. + changed bool +} + +// newUndirectedMultiplexLocalMover returns a new undirectedMultiplexLocalMover initialized with +// the graph g, a set of communities and a modularity resolution parameter. The +// node IDs of g must be contiguous in [0,n) where n is the number of nodes. +// If g has a zero edge weight sum, nil is returned. +func newUndirectedMultiplexLocalMover(g *ReducedUndirectedMultiplex, communities [][]graph.Node, weights, resolutions []float64, all bool) *undirectedMultiplexLocalMover { + nodes := g.Nodes() + l := undirectedMultiplexLocalMover{ + g: g, + nodes: nodes, + edgeWeightOf: make([][]float64, g.Depth()), + m2: make([]float64, g.Depth()), + communities: communities, + memberships: make([]int, len(nodes)), + resolutions: resolutions, + weights: weights, + weight: make([]func(x, y graph.Node) float64, g.Depth()), + } + + // Calculate the total edge weight of the graph + // and degree weights for each node. + var zero int + for i := 0; i < g.Depth(); i++ { + l.edgeWeightOf[i] = make([]float64, len(nodes)) + var weight func(x, y graph.Node) float64 + + if weights != nil { + if weights[i] == 0 { + zero++ + continue + } + if weights[i] < 0 { + weight = negativeWeightFuncFor(g.Layer(i)) + l.searchAll = all + } else { + weight = positiveWeightFuncFor(g.Layer(i)) + } + } else { + weight = positiveWeightFuncFor(g.Layer(i)) + } + + l.weight[i] = weight + layer := g.Layer(i) + for _, u := range l.nodes { + w := weight(u, u) + for _, v := range layer.From(u) { + w += weight(u, v) + } + l.edgeWeightOf[i][u.ID()] = w + l.m2[i] += w + } + if l.m2[i] == 0 { + zero++ + } + } + if zero == g.Depth() { + return nil + } + + // Assign membership mappings. + for i, c := range communities { + for _, u := range c { + l.memberships[u.ID()] = i + } + } + + return &l +} + +// localMovingHeuristic performs the Louvain local moving heuristic until +// no further moves can be made. It returns a boolean indicating that the +// undirectedMultiplexLocalMover has not made any improvement to the community +// structure and so the Louvain algorithm is done. +func (l *undirectedMultiplexLocalMover) localMovingHeuristic(rnd func(int) int) (done bool) { + for { + l.shuffle(rnd) + for _, n := range l.nodes { + dQ, dst, src := l.deltaQ(n) + if dQ <= 0 { + continue + } + l.move(dst, src) + } + if !l.moved { + return !l.changed + } + } +} + +// shuffle performs a Fisher-Yates shuffle on the nodes held by the +// undirectedMultiplexLocalMover using the random source rnd which should return +// an integer in the range [0,n). +func (l *undirectedMultiplexLocalMover) shuffle(rnd func(n int) int) { + l.moved = false + for i := range l.nodes[:len(l.nodes)-1] { + j := i + rnd(len(l.nodes)-i) + l.nodes[i], l.nodes[j] = l.nodes[j], l.nodes[i] + } +} + +// move moves the node at src to the community at dst. +func (l *undirectedMultiplexLocalMover) move(dst int, src commIdx) { + l.moved = true + l.changed = true + + srcComm := l.communities[src.community] + n := srcComm[src.node] + + l.memberships[n.ID()] = dst + + l.communities[dst] = append(l.communities[dst], n) + srcComm[src.node], srcComm[len(srcComm)-1] = srcComm[len(srcComm)-1], nil + l.communities[src.community] = srcComm[:len(srcComm)-1] +} + +// deltaQ returns the highest gain in modularity attainable by moving +// n from its current community to another connected community and +// the index of the chosen destination. The index into the +// undirectedMultiplexLocalMover's communities field is returned in src if n +// is in communities. +func (l *undirectedMultiplexLocalMover) deltaQ(n graph.Node) (deltaQ float64, dst int, src commIdx) { + id := n.ID() + + var iterator minTaker + if l.searchAll { + iterator = &dense{n: len(l.communities)} + } else { + // Find communities connected to n. + var connected intsets.Sparse + // The following for loop is equivalent to: + // + // for i := 0; i < l.g.Depth(); i++ { + // for _, v := range l.g.Layer(i).From(n) { + // connected.Insert(l.memberships[v.ID()]) + // } + // } + // + // This is done to avoid an allocation for + // each layer. + for _, layer := range l.g.layers { + for _, vid := range layer.edges[id] { + connected.Insert(l.memberships[vid]) + } + } + // Insert the node's own community. + connected.Insert(l.memberships[id]) + iterator = &connected + } + + // Calculate the highest modularity gain + // from moving into another community and + // keep the index of that community. + var dQremove float64 + dQadd, dst, src := math.Inf(-1), -1, commIdx{-1, -1} + var i int + for iterator.TakeMin(&i) { + c := l.communities[i] + var removal bool + var _dQadd float64 + for layer := 0; layer < l.g.Depth(); layer++ { + m2 := l.m2[layer] + if m2 == 0 { + // Do not consider layers with zero sum edge weight. + continue + } + w := 1.0 + if l.weights != nil { + w = l.weights[layer] + } + if w == 0 { + // Do not consider layers with zero weighting. + continue + } + + var k_aC, sigma_totC float64 // C is a substitution for ^𝛼 or ^𝛽. + removal = false + for j, u := range c { + uid := u.ID() + if uid == id { + // Only mark and check src community on the first layer. + if layer == 0 { + if src.community != -1 { + panic("community: multiple sources") + } + src = commIdx{i, j} + } + removal = true + } + + k_aC += l.weight[layer](n, u) + // sigma_totC could be kept for each community + // and updated for moves, changing the calculation + // of sigma_totC here from O(n_c) to O(1), but + // in practice the time savings do not appear + // to be compelling and do not make up for the + // increase in code complexity and space required. + sigma_totC += l.edgeWeightOf[layer][uid] + } + + a_aa := l.weight[layer](n, n) + k_a := l.edgeWeightOf[layer][id] + gamma := 1.0 + if l.resolutions != nil { + if len(l.resolutions) == 1 { + gamma = l.resolutions[0] + } else { + gamma = l.resolutions[layer] + } + } + + // See louvain.tex for a derivation of these equations. + // The weighting term, w, is described in V Traag, + // "Algorithms and dynamical models for communities and + // reputation in social networks", chapter 5. + // http://www.traag.net/wp/wp-content/papercite-data/pdf/traag_algorithms_2013.pdf + switch { + case removal: + // The community c was the current community, + // so calculate the change due to removal. + dQremove += w * (k_aC /*^𝛼*/ - a_aa - gamma*k_a*(sigma_totC /*^𝛼*/ -k_a)/m2) + + default: + // Otherwise calculate the change due to an addition + // to c. + _dQadd += w * (k_aC /*^𝛽*/ - gamma*k_a*sigma_totC /*^𝛽*/ /m2) + } + } + if !removal && _dQadd > dQadd { + dQadd = _dQadd + dst = i + } + } + + return 2 * (dQadd - dQremove), dst, src +} diff --git a/graph/community/louvain_undirected_multiplex_test.go b/graph/community/louvain_undirected_multiplex_test.go new file mode 100644 index 00000000..2bf28cb5 --- /dev/null +++ b/graph/community/louvain_undirected_multiplex_test.go @@ -0,0 +1,669 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package community + +import ( + "math" + "math/rand" + "reflect" + "sort" + "testing" + + "github.com/gonum/floats" + "github.com/gonum/graph" + "github.com/gonum/graph/internal/ordered" + "github.com/gonum/graph/simple" +) + +var communityUndirectedMultiplexQTests = []struct { + name string + layers []layer + structures []structure + + wantLevels []level +}{ + { + name: "unconnected", + layers: []layer{{g: unconnected, weight: 1}}, + structures: []structure{ + { + resolution: 1, + memberships: []set{ + 0: linksTo(0), + 1: linksTo(1), + 2: linksTo(2), + 3: linksTo(3), + 4: linksTo(4), + 5: linksTo(5), + }, + want: math.NaN(), + }, + }, + wantLevels: []level{ + { + q: math.Inf(-1), // Here math.Inf(-1) is used as a place holder for NaN to allow use of reflect.DeepEqual. + communities: [][]graph.Node{ + {simple.Node(0)}, + {simple.Node(1)}, + {simple.Node(2)}, + {simple.Node(3)}, + {simple.Node(4)}, + {simple.Node(5)}, + }, + }, + }, + }, + { + name: "small_dumbell", + layers: []layer{ + {g: smallDumbell, edgeWeight: 1, weight: 1}, + {g: dumbellRepulsion, edgeWeight: -1, weight: -1}, + }, + structures: []structure{ + { + resolution: 1, + memberships: []set{ + 0: linksTo(0, 1, 2), + 1: linksTo(3, 4, 5), + }, + want: 7.0, tol: 1e-10, + }, + { + resolution: 1, + memberships: []set{ + 0: linksTo(0, 1, 2, 3, 4, 5), + }, + want: 0, tol: 1e-14, + }, + }, + wantLevels: []level{ + { + q: 7.0, + communities: [][]graph.Node{ + {simple.Node(0), simple.Node(1), simple.Node(2)}, + {simple.Node(3), simple.Node(4), simple.Node(5)}, + }, + }, + { + q: -1.4285714285714284, + communities: [][]graph.Node{ + {simple.Node(0)}, + {simple.Node(1)}, + {simple.Node(2)}, + {simple.Node(3)}, + {simple.Node(4)}, + {simple.Node(5)}, + }, + }, + }, + }, + { + name: "small_dumbell_twice", + layers: []layer{ + {g: smallDumbell, weight: 0.5}, + {g: smallDumbell, weight: 0.5}, + }, + structures: []structure{ + { + resolution: 1, + memberships: []set{ + 0: linksTo(0, 1, 2), + 1: linksTo(3, 4, 5), + }, + want: 5, tol: 1e-10, + }, + { + resolution: 1, + memberships: []set{ + 0: linksTo(0, 1, 2, 3, 4, 5), + }, + want: 0, tol: 1e-14, + }, + }, + wantLevels: []level{ + { + q: 0.35714285714285715 * 14, + communities: [][]graph.Node{ + {simple.Node(0), simple.Node(1), simple.Node(2)}, + {simple.Node(3), simple.Node(4), simple.Node(5)}, + }, + }, + { + q: -0.17346938775510204 * 14, + communities: [][]graph.Node{ + {simple.Node(0)}, + {simple.Node(1)}, + {simple.Node(2)}, + {simple.Node(3)}, + {simple.Node(4)}, + {simple.Node(5)}, + }, + }, + }, + }, + { + name: "repulsion", + layers: []layer{{g: repulsion, edgeWeight: -1, weight: -1}}, + structures: []structure{ + { + resolution: 1, + memberships: []set{ + 0: linksTo(0, 1, 2), + 1: linksTo(3, 4, 5), + }, + want: 9.0, tol: 1e-10, + }, + { + resolution: 1, + memberships: []set{ + 0: linksTo(0), + 1: linksTo(1), + 2: linksTo(2), + 3: linksTo(3), + 4: linksTo(4), + 5: linksTo(5), + }, + want: 3, tol: 1e-14, + }, + }, + wantLevels: []level{ + { + q: 9.0, + communities: [][]graph.Node{ + {simple.Node(0), simple.Node(1), simple.Node(2)}, + {simple.Node(3), simple.Node(4), simple.Node(5)}, + }, + }, + { + q: 3.0, + communities: [][]graph.Node{ + {simple.Node(0)}, + {simple.Node(1)}, + {simple.Node(2)}, + {simple.Node(3)}, + {simple.Node(4)}, + {simple.Node(5)}, + }, + }, + }, + }, + { + name: "middle_east", + layers: []layer{ + {g: middleEast.friends, edgeWeight: 1, weight: 1}, + {g: middleEast.enemies, edgeWeight: -1, weight: -1}, + }, + structures: []structure{ + { + resolution: 1, + memberships: []set{ + 0: linksTo(0, 6), + 1: linksTo(1, 7, 9, 12), + 2: linksTo(2, 8, 11), + 3: linksTo(3, 4, 5, 10), + }, + want: 33.8180574555, tol: 1e-9, + }, + { + resolution: 1, + memberships: []set{ + 0: linksTo(0, 2, 3, 4, 5, 10), + 1: linksTo(1, 7, 9, 12), + 2: linksTo(6), + 3: linksTo(8, 11), + }, + want: 30.92749658, tol: 1e-7, + }, + { + resolution: 1, + memberships: []set{ + 0: linksTo(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12), + }, + want: 0, tol: 1e-14, + }, + }, + wantLevels: []level{ + { + q: 33.818057455540355, + communities: [][]graph.Node{ + {simple.Node(0), simple.Node(6)}, + {simple.Node(1), simple.Node(7), simple.Node(9), simple.Node(12)}, + {simple.Node(2), simple.Node(8), simple.Node(11)}, + {simple.Node(3), simple.Node(4), simple.Node(5), simple.Node(10)}, + }, + }, + { + q: 3.8071135430916545, + communities: [][]graph.Node{ + {simple.Node(0)}, + {simple.Node(1)}, + {simple.Node(2)}, + {simple.Node(3)}, + {simple.Node(4)}, + {simple.Node(5)}, + {simple.Node(6)}, + {simple.Node(7)}, + {simple.Node(8)}, + {simple.Node(9)}, + {simple.Node(10)}, + {simple.Node(11)}, + {simple.Node(12)}, + }, + }, + }, + }, +} + +func TestCommunityQUndirectedMultiplex(t *testing.T) { + for _, test := range communityUndirectedMultiplexQTests { + g, weights, err := undirectedMultiplexFrom(test.layers) + if err != nil { + t.Errorf("unexpected error creating multiplex: %v", err) + continue + } + + for _, structure := range test.structures { + communities := make([][]graph.Node, len(structure.memberships)) + for i, c := range structure.memberships { + for n := range c { + communities[i] = append(communities[i], simple.Node(n)) + } + } + q := QMultiplex(g, communities, weights, []float64{structure.resolution}) + got := floats.Sum(q) + if !floats.EqualWithinAbsOrRel(got, structure.want, structure.tol, structure.tol) && !math.IsNaN(structure.want) { + for _, c := range communities { + sort.Sort(ordered.ByID(c)) + } + t.Errorf("unexpected Q value for %q %v: got: %v %.3v want: %v", + test.name, communities, got, q, structure.want) + } + } + } +} + +func TestCommunityDeltaQUndirectedMultiplex(t *testing.T) { +tests: + for _, test := range communityUndirectedMultiplexQTests { + g, weights, err := undirectedMultiplexFrom(test.layers) + if err != nil { + t.Errorf("unexpected error creating multiplex: %v", err) + continue + } + + rnd := rand.New(rand.NewSource(1)).Intn + for _, structure := range test.structures { + communityOf := make(map[int]int) + communities := make([][]graph.Node, len(structure.memberships)) + for i, c := range structure.memberships { + for n := range c { + communityOf[n] = i + communities[i] = append(communities[i], simple.Node(n)) + } + sort.Sort(ordered.ByID(communities[i])) + } + resolution := []float64{structure.resolution} + + before := QMultiplex(g, communities, weights, resolution) + + // We test exhaustively. + const all = true + + l := newUndirectedMultiplexLocalMover( + reduceUndirectedMultiplex(g, nil, weights), + communities, weights, resolution, all) + if l == nil { + if !math.IsNaN(floats.Sum(before)) { + t.Errorf("unexpected nil localMover with non-NaN Q graph: Q=%.4v", before) + } + continue tests + } + + // This is done to avoid run-to-run + // variation due to map iteration order. + sort.Sort(ordered.ByID(l.nodes)) + + l.shuffle(rnd) + + for _, target := range l.nodes { + got, gotDst, gotSrc := l.deltaQ(target) + + want, wantDst := math.Inf(-1), -1 + migrated := make([][]graph.Node, len(structure.memberships)) + for i, c := range structure.memberships { + for n := range c { + if n == target.ID() { + continue + } + migrated[i] = append(migrated[i], simple.Node(n)) + } + sort.Sort(ordered.ByID(migrated[i])) + } + + for i, c := range structure.memberships { + if i == communityOf[target.ID()] { + continue + } + if !(all && hasNegative(weights)) { + connected := false + search: + for l := 0; l < g.Depth(); l++ { + if weights[l] < 0 { + connected = true + break search + } + layer := g.Layer(l) + for n := range c { + if layer.HasEdgeBetween(simple.Node(n), target) { + connected = true + break search + } + } + } + if !connected { + continue + } + } + migrated[i] = append(migrated[i], target) + after := QMultiplex(g, migrated, weights, resolution) + migrated[i] = migrated[i][:len(migrated[i])-1] + if delta := floats.Sum(after) - floats.Sum(before); delta > want { + want = delta + wantDst = i + } + } + + if !floats.EqualWithinAbsOrRel(got, want, structure.tol, structure.tol) || gotDst != wantDst { + t.Errorf("unexpected result moving n=%d in c=%d of %s/%.4v: got: %.4v,%d want: %.4v,%d"+ + "\n\t%v\n\t%v", + target.ID(), communityOf[target.ID()], test.name, structure.resolution, got, gotDst, want, wantDst, + communities, migrated) + } + if gotSrc.community != communityOf[target.ID()] { + t.Errorf("unexpected source community index: got: %d want: %d", gotSrc, communityOf[target.ID()]) + } else if communities[gotSrc.community][gotSrc.node].ID() != target.ID() { + wantNodeIdx := -1 + for i, n := range communities[gotSrc.community] { + if n.ID() == target.ID() { + wantNodeIdx = i + break + } + } + t.Errorf("unexpected source node index: got: %d want: %d", gotSrc.node, wantNodeIdx) + } + } + } + } +} + +func TestReduceQConsistencyUndirectedMultiplex(t *testing.T) { +tests: + for _, test := range communityUndirectedMultiplexQTests { + g, weights, err := undirectedMultiplexFrom(test.layers) + if err != nil { + t.Errorf("unexpected error creating multiplex: %v", err) + continue + } + + for _, structure := range test.structures { + if math.IsNaN(structure.want) { + continue tests + } + + communities := make([][]graph.Node, len(structure.memberships)) + for i, c := range structure.memberships { + for n := range c { + communities[i] = append(communities[i], simple.Node(n)) + } + sort.Sort(ordered.ByID(communities[i])) + } + + gQ := QMultiplex(g, communities, weights, []float64{structure.resolution}) + gQnull := QMultiplex(g, nil, weights, nil) + + cg0 := reduceUndirectedMultiplex(g, nil, weights) + cg0Qnull := QMultiplex(cg0, cg0.Structure(), weights, nil) + if !floats.EqualWithinAbsOrRel(floats.Sum(gQnull), floats.Sum(cg0Qnull), structure.tol, structure.tol) { + t.Errorf("disagreement between null Q from method: %v and function: %v", cg0Qnull, gQnull) + } + cg0Q := QMultiplex(cg0, communities, weights, []float64{structure.resolution}) + if !floats.EqualWithinAbsOrRel(floats.Sum(gQ), floats.Sum(cg0Q), structure.tol, structure.tol) { + t.Errorf("unexpected Q result after initial reduction: got: %v want :%v", cg0Q, gQ) + } + + cg1 := reduceUndirectedMultiplex(cg0, communities, weights) + cg1Q := QMultiplex(cg1, cg1.Structure(), weights, []float64{structure.resolution}) + if !floats.EqualWithinAbsOrRel(floats.Sum(gQ), floats.Sum(cg1Q), structure.tol, structure.tol) { + t.Errorf("unexpected Q result after second reduction: got: %v want :%v", cg1Q, gQ) + } + } + } +} + +var localUndirectedMultiplexMoveTests = []struct { + name string + layers []layer + structures []moveStructures +}{ + { + name: "blondel", + layers: []layer{{g: blondel, weight: 1}, {g: blondel, weight: 0.5}}, + structures: []moveStructures{ + { + memberships: []set{ + 0: linksTo(0, 1, 2, 4, 5), + 1: linksTo(3, 6, 7), + 2: linksTo(8, 9, 10, 12, 14, 15), + 3: linksTo(11, 13), + }, + targetNodes: []graph.Node{simple.Node(0)}, + resolution: 1, + tol: 1e-14, + }, + { + memberships: []set{ + 0: linksTo(0, 1, 2, 4, 5), + 1: linksTo(3, 6, 7), + 2: linksTo(8, 9, 10, 12, 14, 15), + 3: linksTo(11, 13), + }, + targetNodes: []graph.Node{simple.Node(3)}, + resolution: 1, + tol: 1e-14, + }, + { + memberships: []set{ + 0: linksTo(0, 1, 2, 4, 5), + 1: linksTo(3, 6, 7), + 2: linksTo(8, 9, 10, 12, 14, 15), + 3: linksTo(11, 13), + }, + // Case to demonstrate when A_aa != k_a^𝛼. + targetNodes: []graph.Node{simple.Node(3), simple.Node(2)}, + resolution: 1, + tol: 1e-14, + }, + }, + }, +} + +func TestMoveLocalUndirectedMultiplex(t *testing.T) { + for _, test := range localUndirectedMultiplexMoveTests { + g, weights, err := undirectedMultiplexFrom(test.layers) + if err != nil { + t.Errorf("unexpected error creating multiplex: %v", err) + continue + } + + for _, structure := range test.structures { + communities := make([][]graph.Node, len(structure.memberships)) + for i, c := range structure.memberships { + for n := range c { + communities[i] = append(communities[i], simple.Node(n)) + } + sort.Sort(ordered.ByID(communities[i])) + } + + r := reduceUndirectedMultiplex(reduceUndirectedMultiplex(g, nil, weights), communities, weights) + + l := newUndirectedMultiplexLocalMover(r, r.communities, weights, []float64{structure.resolution}, true) + for _, n := range structure.targetNodes { + dQ, dst, src := l.deltaQ(n) + if dQ > 0 { + before := floats.Sum(QMultiplex(r, l.communities, weights, []float64{structure.resolution})) + l.move(dst, src) + after := floats.Sum(QMultiplex(r, l.communities, weights, []float64{structure.resolution})) + want := after - before + if !floats.EqualWithinAbsOrRel(dQ, want, structure.tol, structure.tol) { + t.Errorf("unexpected deltaQ: got: %v want: %v", dQ, want) + } + } + } + } + } +} + +func TestLouvainMultiplex(t *testing.T) { + const louvainIterations = 20 + + for _, test := range communityUndirectedMultiplexQTests { + g, weights, err := undirectedMultiplexFrom(test.layers) + if err != nil { + t.Errorf("unexpected error creating multiplex: %v", err) + continue + } + + if test.structures[0].resolution != 1 { + panic("bad test: expect resolution=1") + } + want := make([][]graph.Node, len(test.structures[0].memberships)) + for i, c := range test.structures[0].memberships { + for n := range c { + want[i] = append(want[i], simple.Node(n)) + } + sort.Sort(ordered.ByID(want[i])) + } + sort.Sort(ordered.BySliceIDs(want)) + + var ( + got *ReducedUndirectedMultiplex + bestQ = math.Inf(-1) + ) + // Modularize is randomised so we do this to + // ensure the level tests are consistent. + src := rand.New(rand.NewSource(1)) + for i := 0; i < louvainIterations; i++ { + r := ModularizeMultiplex(g, weights, nil, true, src).(*ReducedUndirectedMultiplex) + if q := floats.Sum(QMultiplex(r, nil, weights, nil)); q > bestQ || math.IsNaN(q) { + bestQ = q + got = r + + if math.IsNaN(q) { + // Don't try again for non-connected case. + break + } + } + + var qs []float64 + for p := r; p != nil; p = p.Expanded().(*ReducedUndirectedMultiplex) { + qs = append(qs, floats.Sum(QMultiplex(p, nil, weights, nil))) + } + + // Recovery of Q values is reversed. + if reverse(qs); !sort.Float64sAreSorted(qs) { + t.Errorf("Q values not monotonically increasing: %.5v", qs) + } + } + + gotCommunities := got.Communities() + for _, c := range gotCommunities { + sort.Sort(ordered.ByID(c)) + } + sort.Sort(ordered.BySliceIDs(gotCommunities)) + if !reflect.DeepEqual(gotCommunities, want) { + t.Errorf("unexpected community membership for %s Q=%.4v:\n\tgot: %v\n\twant:%v", + test.name, bestQ, gotCommunities, want) + continue + } + + var levels []level + for p := got; p != nil; p = p.Expanded().(*ReducedUndirectedMultiplex) { + var communities [][]graph.Node + if p.parent != nil { + communities = p.parent.Communities() + for _, c := range communities { + sort.Sort(ordered.ByID(c)) + } + sort.Sort(ordered.BySliceIDs(communities)) + } else { + communities = reduceUndirectedMultiplex(g, nil, weights).Communities() + } + q := floats.Sum(QMultiplex(p, nil, weights, nil)) + if math.IsNaN(q) { + // Use an equalable flag value in place of NaN. + q = math.Inf(-1) + } + levels = append(levels, level{q: q, communities: communities}) + } + if !reflect.DeepEqual(levels, test.wantLevels) { + t.Errorf("unexpected level structure:\n\tgot: %v\n\twant:%v", levels, test.wantLevels) + } + } +} + +func TestNonContiguousUndirectedMultiplex(t *testing.T) { + g := simple.NewUndirectedGraph(0, 0) + for _, e := range []simple.Edge{ + {F: simple.Node(0), T: simple.Node(1), W: 1}, + {F: simple.Node(4), T: simple.Node(5), W: 1}, + } { + g.SetEdge(e) + } + + func() { + defer func() { + r := recover() + if r != nil { + t.Error("unexpected panic with non-contiguous ID range") + } + }() + ModularizeMultiplex(UndirectedLayers{g}, nil, nil, true, nil) + }() +} + +func BenchmarkLouvainMultiplex(b *testing.B) { + src := rand.New(rand.NewSource(1)) + for i := 0; i < b.N; i++ { + ModularizeMultiplex(UndirectedLayers{dupGraph}, nil, nil, true, src) + } +} + +func undirectedMultiplexFrom(raw []layer) (UndirectedLayers, []float64, error) { + var layers []graph.Undirected + var weights []float64 + for _, l := range raw { + g := simple.NewUndirectedGraph(0, 0) + for u, e := range l.g { + // Add nodes that are not defined by an edge. + if !g.Has(simple.Node(u)) { + g.AddNode(simple.Node(u)) + } + for v := range e { + w := 1.0 + if l.edgeWeight != 0 { + w = l.edgeWeight + } + g.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v), W: w}) + } + } + layers = append(layers, g) + weights = append(weights, l.weight) + } + g, err := NewUndirectedLayers(layers...) + if err != nil { + return nil, nil, err + } + return g, weights, nil +} diff --git a/graph/community/louvain_undirected_test.go b/graph/community/louvain_undirected_test.go new file mode 100644 index 00000000..b567e839 --- /dev/null +++ b/graph/community/louvain_undirected_test.go @@ -0,0 +1,648 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package community + +import ( + "math" + "math/rand" + "reflect" + "sort" + "testing" + + "github.com/gonum/floats" + "github.com/gonum/graph" + "github.com/gonum/graph/internal/ordered" + "github.com/gonum/graph/simple" +) + +var communityUndirectedQTests = []struct { + name string + g []set + structures []structure + + wantLevels []level +}{ + // The java reference implementation is available from http://www.ludowaltman.nl/slm/. + { + name: "unconnected", + g: unconnected, + structures: []structure{ + { + resolution: 1, + memberships: []set{ + 0: linksTo(0), + 1: linksTo(1), + 2: linksTo(2), + 3: linksTo(3), + 4: linksTo(4), + 5: linksTo(5), + }, + want: math.NaN(), + }, + }, + wantLevels: []level{ + { + q: math.Inf(-1), // Here math.Inf(-1) is used as a place holder for NaN to allow use of reflect.DeepEqual. + communities: [][]graph.Node{ + {simple.Node(0)}, + {simple.Node(1)}, + {simple.Node(2)}, + {simple.Node(3)}, + {simple.Node(4)}, + {simple.Node(5)}, + }, + }, + }, + }, + { + name: "small_dumbell", + g: smallDumbell, + structures: []structure{ + { + resolution: 1, + // community structure and modularity calculated by java reference implementation. + memberships: []set{ + 0: linksTo(0, 1, 2), + 1: linksTo(3, 4, 5), + }, + want: 0.357, tol: 1e-3, + }, + { + resolution: 1, + memberships: []set{ + 0: linksTo(0, 1, 2, 3, 4, 5), + }, + // theoretical expectation. + want: 0, tol: 1e-14, + }, + }, + wantLevels: []level{ + { + q: 0.35714285714285715, + communities: [][]graph.Node{ + {simple.Node(0), simple.Node(1), simple.Node(2)}, + {simple.Node(3), simple.Node(4), simple.Node(5)}, + }, + }, + { + q: -0.17346938775510204, + communities: [][]graph.Node{ + {simple.Node(0)}, + {simple.Node(1)}, + {simple.Node(2)}, + {simple.Node(3)}, + {simple.Node(4)}, + {simple.Node(5)}, + }, + }, + }, + }, + { + name: "zachary", + g: zachary, + structures: []structure{ + { + resolution: 1, + // community structure and modularity from doi: 10.1140/epjb/e2013-40829-0 + memberships: []set{ + 0: linksTo(0, 1, 2, 3, 7, 11, 12, 13, 17, 19, 21), + 1: linksTo(4, 5, 6, 10, 16), + 2: linksTo(8, 9, 14, 15, 18, 20, 22, 26, 29, 30, 32, 33), + 3: linksTo(23, 24, 25, 27, 28, 31), + }, + // Noted to be the optimal modularisation in the paper above. + want: 0.4198, tol: 1e-4, + }, + { + resolution: 0.5, + // community structure and modularity calculated by java reference implementation. + memberships: []set{ + 0: linksTo(0, 1, 2, 3, 4, 5, 6, 7, 9, 10, 11, 12, 13, 16, 17, 19, 21), + 1: linksTo(8, 14, 15, 18, 20, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33), + }, + want: 0.6218, tol: 1e-3, + }, + { + resolution: 2, + // community structure and modularity calculated by java reference implementation. + memberships: []set{ + 0: linksTo(14, 18, 20, 22, 32, 33, 15), + 1: linksTo(0, 1, 11, 17, 19, 21), + 2: linksTo(2, 3, 7, 9, 12, 13), + 3: linksTo(4, 5, 6, 10, 16), + 4: linksTo(24, 25, 28, 31), + 5: linksTo(23, 26, 27, 29), + 6: linksTo(8, 30), + }, + want: 0.1645, tol: 1e-3, + }, + }, + wantLevels: []level{ + { + q: 0.4197896120973044, + communities: [][]graph.Node{ + {simple.Node(0), simple.Node(1), simple.Node(2), simple.Node(3), simple.Node(7), simple.Node(11), simple.Node(12), simple.Node(13), simple.Node(17), simple.Node(19), simple.Node(21)}, + {simple.Node(4), simple.Node(5), simple.Node(6), simple.Node(10), simple.Node(16)}, + {simple.Node(8), simple.Node(9), simple.Node(14), simple.Node(15), simple.Node(18), simple.Node(20), simple.Node(22), simple.Node(26), simple.Node(29), simple.Node(30), simple.Node(32), simple.Node(33)}, + {simple.Node(23), simple.Node(24), simple.Node(25), simple.Node(27), simple.Node(28), simple.Node(31)}, + }, + }, + { + q: 0.39907955292570674, + communities: [][]graph.Node{ + {simple.Node(0), simple.Node(1), simple.Node(2), simple.Node(3), simple.Node(7), simple.Node(11), simple.Node(12), simple.Node(13), simple.Node(17), simple.Node(19), simple.Node(21)}, + {simple.Node(4), simple.Node(10)}, + {simple.Node(5), simple.Node(6), simple.Node(16)}, + {simple.Node(8), simple.Node(9), simple.Node(14), simple.Node(15), simple.Node(18), simple.Node(20), simple.Node(22), simple.Node(26), simple.Node(29), simple.Node(30), simple.Node(32), simple.Node(33)}, + {simple.Node(23), simple.Node(24), simple.Node(25), simple.Node(27), simple.Node(28), simple.Node(31)}, + }, + }, + { + q: -0.04980276134122286, + communities: [][]graph.Node{ + {simple.Node(0)}, + {simple.Node(1)}, + {simple.Node(2)}, + {simple.Node(3)}, + {simple.Node(4)}, + {simple.Node(5)}, + {simple.Node(6)}, + {simple.Node(7)}, + {simple.Node(8)}, + {simple.Node(9)}, + {simple.Node(10)}, + {simple.Node(11)}, + {simple.Node(12)}, + {simple.Node(13)}, + {simple.Node(14)}, + {simple.Node(15)}, + {simple.Node(16)}, + {simple.Node(17)}, + {simple.Node(18)}, + {simple.Node(19)}, + {simple.Node(20)}, + {simple.Node(21)}, + {simple.Node(22)}, + {simple.Node(23)}, + {simple.Node(24)}, + {simple.Node(25)}, + {simple.Node(26)}, + {simple.Node(27)}, + {simple.Node(28)}, + {simple.Node(29)}, + {simple.Node(30)}, + {simple.Node(31)}, + {simple.Node(32)}, + {simple.Node(33)}, + }, + }, + }, + }, + { + name: "blondel", + g: blondel, + structures: []structure{ + { + resolution: 1, + // community structure and modularity calculated by java reference implementation. + memberships: []set{ + 0: linksTo(0, 1, 2, 3, 4, 5, 6, 7), + 1: linksTo(8, 9, 10, 11, 12, 13, 14, 15), + }, + want: 0.3922, tol: 1e-4, + }, + }, + wantLevels: []level{ + { + q: 0.39221938775510207, + communities: [][]graph.Node{ + {simple.Node(0), simple.Node(1), simple.Node(2), simple.Node(3), simple.Node(4), simple.Node(5), simple.Node(6), simple.Node(7)}, + {simple.Node(8), simple.Node(9), simple.Node(10), simple.Node(11), simple.Node(12), simple.Node(13), simple.Node(14), simple.Node(15)}, + }, + }, + { + q: 0.34630102040816324, + communities: [][]graph.Node{ + {simple.Node(0), simple.Node(1), simple.Node(2), simple.Node(4), simple.Node(5)}, + {simple.Node(3), simple.Node(6), simple.Node(7)}, + {simple.Node(8), simple.Node(9), simple.Node(10), simple.Node(12), simple.Node(14), simple.Node(15)}, + {simple.Node(11), simple.Node(13)}, + }, + }, + { + q: -0.07142857142857144, + communities: [][]graph.Node{ + {simple.Node(0)}, + {simple.Node(1)}, + {simple.Node(2)}, + {simple.Node(3)}, + {simple.Node(4)}, + {simple.Node(5)}, + {simple.Node(6)}, + {simple.Node(7)}, + {simple.Node(8)}, + {simple.Node(9)}, + {simple.Node(10)}, + {simple.Node(11)}, + {simple.Node(12)}, + {simple.Node(13)}, + {simple.Node(14)}, + {simple.Node(15)}, + }, + }, + }, + }, +} + +func TestCommunityQUndirected(t *testing.T) { + for _, test := range communityUndirectedQTests { + g := simple.NewUndirectedGraph(0, 0) + for u, e := range test.g { + // Add nodes that are not defined by an edge. + if !g.Has(simple.Node(u)) { + g.AddNode(simple.Node(u)) + } + for v := range e { + g.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v), W: 1}) + } + } + for _, structure := range test.structures { + communities := make([][]graph.Node, len(structure.memberships)) + for i, c := range structure.memberships { + for n := range c { + communities[i] = append(communities[i], simple.Node(n)) + } + } + got := Q(g, communities, structure.resolution) + if !floats.EqualWithinAbsOrRel(got, structure.want, structure.tol, structure.tol) && !math.IsNaN(structure.want) { + for _, c := range communities { + sort.Sort(ordered.ByID(c)) + } + t.Errorf("unexpected Q value for %q %v: got: %v want: %v", + test.name, communities, got, structure.want) + } + } + } +} + +func TestCommunityDeltaQUndirected(t *testing.T) { +tests: + for _, test := range communityUndirectedQTests { + g := simple.NewUndirectedGraph(0, 0) + for u, e := range test.g { + // Add nodes that are not defined by an edge. + if !g.Has(simple.Node(u)) { + g.AddNode(simple.Node(u)) + } + for v := range e { + g.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v), W: 1}) + } + } + + rnd := rand.New(rand.NewSource(1)).Intn + for _, structure := range test.structures { + communityOf := make(map[int]int) + communities := make([][]graph.Node, len(structure.memberships)) + for i, c := range structure.memberships { + for n := range c { + communityOf[n] = i + communities[i] = append(communities[i], simple.Node(n)) + } + sort.Sort(ordered.ByID(communities[i])) + } + + before := Q(g, communities, structure.resolution) + + l := newUndirectedLocalMover(reduceUndirected(g, nil), communities, structure.resolution) + if l == nil { + if !math.IsNaN(before) { + t.Errorf("unexpected nil localMover with non-NaN Q graph: Q=%.4v", before) + } + continue tests + } + + // This is done to avoid run-to-run + // variation due to map iteration order. + sort.Sort(ordered.ByID(l.nodes)) + + l.shuffle(rnd) + + for _, target := range l.nodes { + got, gotDst, gotSrc := l.deltaQ(target) + + want, wantDst := math.Inf(-1), -1 + migrated := make([][]graph.Node, len(structure.memberships)) + for i, c := range structure.memberships { + for n := range c { + if n == target.ID() { + continue + } + migrated[i] = append(migrated[i], simple.Node(n)) + } + sort.Sort(ordered.ByID(migrated[i])) + } + + for i, c := range structure.memberships { + if i == communityOf[target.ID()] { + continue + } + connected := false + for n := range c { + if g.HasEdgeBetween(simple.Node(n), target) { + connected = true + break + } + } + if !connected { + continue + } + migrated[i] = append(migrated[i], target) + after := Q(g, migrated, structure.resolution) + migrated[i] = migrated[i][:len(migrated[i])-1] + if after-before > want { + want = after - before + wantDst = i + } + } + + if !floats.EqualWithinAbsOrRel(got, want, structure.tol, structure.tol) || gotDst != wantDst { + t.Errorf("unexpected result moving n=%d in c=%d of %s/%.4v: got: %.4v,%d want: %.4v,%d"+ + "\n\t%v\n\t%v", + target.ID(), communityOf[target.ID()], test.name, structure.resolution, got, gotDst, want, wantDst, + communities, migrated) + } + if gotSrc.community != communityOf[target.ID()] { + t.Errorf("unexpected source community index: got: %d want: %d", gotSrc, communityOf[target.ID()]) + } else if communities[gotSrc.community][gotSrc.node].ID() != target.ID() { + wantNodeIdx := -1 + for i, n := range communities[gotSrc.community] { + if n.ID() == target.ID() { + wantNodeIdx = i + break + } + } + t.Errorf("unexpected source node index: got: %d want: %d", gotSrc.node, wantNodeIdx) + } + } + } + } +} + +func TestReduceQConsistencyUndirected(t *testing.T) { +tests: + for _, test := range communityUndirectedQTests { + g := simple.NewUndirectedGraph(0, 0) + for u, e := range test.g { + // Add nodes that are not defined by an edge. + if !g.Has(simple.Node(u)) { + g.AddNode(simple.Node(u)) + } + for v := range e { + g.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v), W: 1}) + } + } + + for _, structure := range test.structures { + if math.IsNaN(structure.want) { + continue tests + } + + communities := make([][]graph.Node, len(structure.memberships)) + for i, c := range structure.memberships { + for n := range c { + communities[i] = append(communities[i], simple.Node(n)) + } + sort.Sort(ordered.ByID(communities[i])) + } + + gQ := Q(g, communities, structure.resolution) + gQnull := Q(g, nil, 1) + + cg0 := reduceUndirected(g, nil) + cg0Qnull := Q(cg0, cg0.Structure(), 1) + if !floats.EqualWithinAbsOrRel(gQnull, cg0Qnull, structure.tol, structure.tol) { + t.Errorf("disagreement between null Q from method: %v and function: %v", cg0Qnull, gQnull) + } + cg0Q := Q(cg0, communities, structure.resolution) + if !floats.EqualWithinAbsOrRel(gQ, cg0Q, structure.tol, structure.tol) { + t.Errorf("unexpected Q result after initial reduction: got: %v want :%v", cg0Q, gQ) + } + + cg1 := reduceUndirected(cg0, communities) + cg1Q := Q(cg1, cg1.Structure(), structure.resolution) + if !floats.EqualWithinAbsOrRel(gQ, cg1Q, structure.tol, structure.tol) { + t.Errorf("unexpected Q result after second reduction: got: %v want :%v", cg1Q, gQ) + } + } + } +} + +var localUndirectedMoveTests = []struct { + name string + g []set + structures []moveStructures +}{ + { + name: "blondel", + g: blondel, + structures: []moveStructures{ + { + memberships: []set{ + 0: linksTo(0, 1, 2, 4, 5), + 1: linksTo(3, 6, 7), + 2: linksTo(8, 9, 10, 12, 14, 15), + 3: linksTo(11, 13), + }, + targetNodes: []graph.Node{simple.Node(0)}, + resolution: 1, + tol: 1e-14, + }, + { + memberships: []set{ + 0: linksTo(0, 1, 2, 4, 5), + 1: linksTo(3, 6, 7), + 2: linksTo(8, 9, 10, 12, 14, 15), + 3: linksTo(11, 13), + }, + targetNodes: []graph.Node{simple.Node(3)}, + resolution: 1, + tol: 1e-14, + }, + { + memberships: []set{ + 0: linksTo(0, 1, 2, 4, 5), + 1: linksTo(3, 6, 7), + 2: linksTo(8, 9, 10, 12, 14, 15), + 3: linksTo(11, 13), + }, + // Case to demonstrate when A_aa != k_a^𝛼. + targetNodes: []graph.Node{simple.Node(3), simple.Node(2)}, + resolution: 1, + tol: 1e-14, + }, + }, + }, +} + +func TestMoveLocalUndirected(t *testing.T) { + for _, test := range localUndirectedMoveTests { + g := simple.NewUndirectedGraph(0, 0) + for u, e := range test.g { + // Add nodes that are not defined by an edge. + if !g.Has(simple.Node(u)) { + g.AddNode(simple.Node(u)) + } + for v := range e { + g.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v), W: 1}) + } + } + + for _, structure := range test.structures { + communities := make([][]graph.Node, len(structure.memberships)) + for i, c := range structure.memberships { + for n := range c { + communities[i] = append(communities[i], simple.Node(n)) + } + sort.Sort(ordered.ByID(communities[i])) + } + + r := reduceUndirected(reduceUndirected(g, nil), communities) + + l := newUndirectedLocalMover(r, r.communities, structure.resolution) + for _, n := range structure.targetNodes { + dQ, dst, src := l.deltaQ(n) + if dQ > 0 { + before := Q(r, l.communities, structure.resolution) + l.move(dst, src) + after := Q(r, l.communities, structure.resolution) + want := after - before + if !floats.EqualWithinAbsOrRel(dQ, want, structure.tol, structure.tol) { + t.Errorf("unexpected deltaQ: got: %v want: %v", dQ, want) + } + } + } + } + } +} + +func TestModularizeUndirected(t *testing.T) { + const louvainIterations = 20 + + for _, test := range communityUndirectedQTests { + g := simple.NewUndirectedGraph(0, 0) + for u, e := range test.g { + // Add nodes that are not defined by an edge. + if !g.Has(simple.Node(u)) { + g.AddNode(simple.Node(u)) + } + for v := range e { + g.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v), W: 1}) + } + } + + if test.structures[0].resolution != 1 { + panic("bad test: expect resolution=1") + } + want := make([][]graph.Node, len(test.structures[0].memberships)) + for i, c := range test.structures[0].memberships { + for n := range c { + want[i] = append(want[i], simple.Node(n)) + } + sort.Sort(ordered.ByID(want[i])) + } + sort.Sort(ordered.BySliceIDs(want)) + + var ( + got *ReducedUndirected + bestQ = math.Inf(-1) + ) + // Modularize is randomised so we do this to + // ensure the level tests are consistent. + src := rand.New(rand.NewSource(1)) + for i := 0; i < louvainIterations; i++ { + r := Modularize(g, 1, src).(*ReducedUndirected) + if q := Q(r, nil, 1); q > bestQ || math.IsNaN(q) { + bestQ = q + got = r + + if math.IsNaN(q) { + // Don't try again for non-connected case. + break + } + } + + var qs []float64 + for p := r; p != nil; p = p.Expanded().(*ReducedUndirected) { + qs = append(qs, Q(p, nil, 1)) + } + + // Recovery of Q values is reversed. + if reverse(qs); !sort.Float64sAreSorted(qs) { + t.Errorf("Q values not monotonically increasing: %.5v", qs) + } + } + + gotCommunities := got.Communities() + for _, c := range gotCommunities { + sort.Sort(ordered.ByID(c)) + } + sort.Sort(ordered.BySliceIDs(gotCommunities)) + if !reflect.DeepEqual(gotCommunities, want) { + t.Errorf("unexpected community membership for %s Q=%.4v:\n\tgot: %v\n\twant:%v", + test.name, bestQ, gotCommunities, want) + continue + } + + var levels []level + for p := got; p != nil; p = p.Expanded().(*ReducedUndirected) { + var communities [][]graph.Node + if p.parent != nil { + communities = p.parent.Communities() + for _, c := range communities { + sort.Sort(ordered.ByID(c)) + } + sort.Sort(ordered.BySliceIDs(communities)) + } else { + communities = reduceUndirected(g, nil).Communities() + } + q := Q(p, nil, 1) + if math.IsNaN(q) { + // Use an equalable flag value in place of NaN. + q = math.Inf(-1) + } + levels = append(levels, level{q: q, communities: communities}) + } + if !reflect.DeepEqual(levels, test.wantLevels) { + t.Errorf("unexpected level structure:\n\tgot: %v\n\twant:%v", levels, test.wantLevels) + } + } +} + +func TestNonContiguousUndirected(t *testing.T) { + g := simple.NewUndirectedGraph(0, 0) + for _, e := range []simple.Edge{ + {F: simple.Node(0), T: simple.Node(1), W: 1}, + {F: simple.Node(4), T: simple.Node(5), W: 1}, + } { + g.SetEdge(e) + } + + func() { + defer func() { + r := recover() + if r != nil { + t.Error("unexpected panic with non-contiguous ID range") + } + }() + Modularize(g, 1, nil) + }() +} + +func BenchmarkLouvain(b *testing.B) { + src := rand.New(rand.NewSource(1)) + for i := 0; i < b.N; i++ { + Modularize(dupGraph, 1, src) + } +} diff --git a/graph/community/printgraphs.go b/graph/community/printgraphs.go new file mode 100644 index 00000000..c434a33b --- /dev/null +++ b/graph/community/printgraphs.go @@ -0,0 +1,142 @@ +// Copyright ©2016 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build ignore + +// printgraphs allows us to generate a consistent directed view of +// a set of edges that follows a reasonably real-world-meaningful +// graph. The interpretation of the links in the resulting directed +// graphs are either "suggests" in the context of a Page Ranking or +// possibly "looks up to" in the Zachary graph. +package main + +import ( + "fmt" + "sort" + + "github.com/gonum/graph" + "github.com/gonum/graph/internal/ordered" + "github.com/gonum/graph/network" + "github.com/gonum/graph/simple" +) + +// set is an integer set. +type set map[int]struct{} + +func linksTo(i ...int) set { + if len(i) == 0 { + return nil + } + s := make(set) + for _, v := range i { + s[v] = struct{}{} + } + return s +} + +var ( + zachary = []set{ + 0: linksTo(1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 17, 19, 21, 31), + 1: linksTo(2, 3, 7, 13, 17, 19, 21, 30), + 2: linksTo(3, 7, 8, 9, 13, 27, 28, 32), + 3: linksTo(7, 12, 13), + 4: linksTo(6, 10), + 5: linksTo(6, 10, 16), + 6: linksTo(16), + 8: linksTo(30, 32, 33), + 9: linksTo(33), + 13: linksTo(33), + 14: linksTo(32, 33), + 15: linksTo(32, 33), + 18: linksTo(32, 33), + 19: linksTo(33), + 20: linksTo(32, 33), + 22: linksTo(32, 33), + 23: linksTo(25, 27, 29, 32, 33), + 24: linksTo(25, 27, 31), + 25: linksTo(31), + 26: linksTo(29, 33), + 27: linksTo(33), + 28: linksTo(31, 33), + 29: linksTo(32, 33), + 30: linksTo(32, 33), + 31: linksTo(32, 33), + 32: linksTo(33), + 33: nil, + } + + blondel = []set{ + 0: linksTo(2, 3, 4, 5), + 1: linksTo(2, 4, 7), + 2: linksTo(4, 5, 6), + 3: linksTo(7), + 4: linksTo(10), + 5: linksTo(7, 11), + 6: linksTo(7, 11), + 8: linksTo(9, 10, 11, 14, 15), + 9: linksTo(12, 14), + 10: linksTo(11, 12, 13, 14), + 11: linksTo(13), + 15: nil, + } +) + +func main() { + for _, raw := range []struct { + name string + set []set + }{ + {"zachary", zachary}, + {"blondel", blondel}, + } { + g := simple.NewUndirectedGraph(0, 0) + for u, e := range raw.set { + // Add nodes that are not defined by an edge. + if !g.Has(simple.Node(u)) { + g.AddNode(simple.Node(u)) + } + for v := range e { + g.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v), W: 1}) + } + } + + nodes := g.Nodes() + sort.Sort(ordered.ByID(nodes)) + + fmt.Printf("%s = []set{\n", raw.name) + rank := network.PageRank(asDirected{g}, 0.85, 1e-8) + for _, u := range nodes { + to := g.From(nodes[u.ID()]) + sort.Sort(ordered.ByID(to)) + var links []int + for _, v := range to { + if rank[u.ID()] <= rank[v.ID()] { + links = append(links, v.ID()) + } + } + + if links == nil { + fmt.Printf("\t%d: nil, // rank=%.4v\n", u.ID(), rank[u.ID()]) + continue + } + + fmt.Printf("\t%d: linksTo(", u.ID()) + for i, v := range links { + if i != 0 { + fmt.Print(", ") + } + fmt.Print(v) + } + fmt.Printf("), // rank=%.4v\n", rank[u.ID()]) + } + fmt.Println("}") + } +} + +type asDirected struct{ *simple.UndirectedGraph } + +func (g asDirected) HasEdgeFromTo(u, v graph.Node) bool { + return g.UndirectedGraph.HasEdgeBetween(u, v) +} +func (g asDirected) To(v graph.Node) []graph.Node { return g.From(v) } diff --git a/graph/doc.go b/graph/doc.go new file mode 100644 index 00000000..7b7b3cc8 --- /dev/null +++ b/graph/doc.go @@ -0,0 +1,38 @@ +// Copyright ©2014 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +/* +Package graph implements functions and interfaces to deal with formal discrete graphs. It aims to +be first and foremost flexible, with speed as a strong second priority. + +In this package, graphs are taken to be directed, and undirected graphs are considered to be a +special case of directed graphs that happen to have reciprocal edges. Graphs are, by default, +unweighted, but functions that require weighted edges have several methods of dealing with this. +In order of precedence: + +1. These functions have an argument called Cost (and in some cases, HeuristicCost). If this is +present, it will always be used to determine the cost between two nodes. + +2. These functions will check if your graph implements the Coster (and/or HeuristicCoster) +interface. If this is present, and the Cost (or HeuristicCost) argument is nil, these functions +will be used. + +3. Finally, if no user data is supplied, it will use the functions UniformCost (always returns 1) +and/or NulLHeuristic (always returns 0). + +For information on the specification for Cost functions, please see the Coster interface. + +Finally, although the functions take in a Graph -- they will always use the correct behavior. +If your graph implements DirectedGraph, it will use Successors and To where applicable, +if undirected, it will use From instead. If it implements neither, it will scan the edge list +for successors and predecessors where applicable. (This is slow, you should always implement either +Directed or Undirected) + +This package will never modify a graph that is not Mutable (and the interface does not allow it to +do so). However, return values are free to be modified, so never pass a reference to your own edge +list or node list. It also guarantees that any nodes passed back to the user will be the same +nodes returned to it -- that is, it will never take a Node's ID and then wrap the ID in a new +struct and return that. You'll always get back your original data. +*/ +package graph diff --git a/graph/encoding/dot/decode.go b/graph/encoding/dot/decode.go new file mode 100644 index 00000000..22065ddc --- /dev/null +++ b/graph/encoding/dot/decode.go @@ -0,0 +1,237 @@ +// Copyright ©2017 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package dot + +import ( + "fmt" + + "github.com/gonum/graph" + "github.com/gonum/graph/formats/dot" + "github.com/gonum/graph/formats/dot/ast" + "golang.org/x/tools/container/intsets" +) + +// Builder is a graph that can have user-defined nodes and edges added. +type Builder interface { + graph.Graph + graph.Builder + // NewNode adds a new node with a unique node ID to the graph. + NewNode() graph.Node + // NewEdge adds a new edge from the source to the destination node to the + // graph, or returns the existing edge if already present. + NewEdge(from, to graph.Node) graph.Edge +} + +// UnmarshalerAttr is the interface implemented by objects that can unmarshal a +// DOT attribute description of themselves. +type UnmarshalerAttr interface { + // UnmarshalDOTAttr decodes a single DOT attribute. + UnmarshalDOTAttr(attr Attribute) error +} + +// Unmarshal parses the Graphviz DOT-encoded data and stores the result in dst. +func Unmarshal(data []byte, dst Builder) error { + file, err := dot.ParseBytes(data) + if err != nil { + return err + } + if len(file.Graphs) != 1 { + return fmt.Errorf("invalid number of graphs; expected 1, got %d", len(file.Graphs)) + } + return copyGraph(dst, file.Graphs[0]) +} + +// copyGraph copies the nodes and edges from the Graphviz AST source graph to +// the destination graph. Edge direction is maintained if present. +func copyGraph(dst Builder, src *ast.Graph) (err error) { + defer func() { + switch e := recover().(type) { + case nil: + case error: + err = e + default: + panic(e) + } + }() + gen := &generator{ + directed: src.Directed, + ids: make(map[string]graph.Node), + } + for _, stmt := range src.Stmts { + gen.addStmt(dst, stmt) + } + return err +} + +// A generator keeps track of the information required for generating a gonum +// graph from a dot AST graph. +type generator struct { + // Directed graph. + directed bool + // Map from dot AST node ID to gonum node. + ids map[string]graph.Node + // Nodes processed within the context of a subgraph, that is to be used as a + // vertex of an edge. + subNodes []graph.Node + // Stack of start indices into the subgraph node slice. The top element + // corresponds to the start index of the active (or inner-most) subgraph. + subStart []int +} + +// node returns the gonum node corresponding to the given dot AST node ID, +// generating a new such node if none exist. +func (gen *generator) node(dst Builder, id string) graph.Node { + if n, ok := gen.ids[id]; ok { + return n + } + n := dst.NewNode() + gen.ids[id] = n + // Check if within the context of a subgraph, that is to be used as a vertex + // of an edge. + if gen.isInSubgraph() { + // Append node processed within the context of a subgraph, that is to be + // used as a vertex of an edge + gen.appendSubgraphNode(n) + } + return n +} + +// addStmt adds the given statement to the graph. +func (gen *generator) addStmt(dst Builder, stmt ast.Stmt) { + switch stmt := stmt.(type) { + case *ast.NodeStmt: + n := gen.node(dst, stmt.Node.ID) + if n, ok := n.(UnmarshalerAttr); ok { + for _, attr := range stmt.Attrs { + a := Attribute{ + Key: attr.Key, + Value: attr.Val, + } + if err := n.UnmarshalDOTAttr(a); err != nil { + panic(fmt.Errorf("unable to unmarshal node DOT attribute (%s=%s)", a.Key, a.Value)) + } + } + } + case *ast.EdgeStmt: + gen.addEdgeStmt(dst, stmt) + case *ast.AttrStmt: + // ignore. + case *ast.Attr: + // ignore. + case *ast.Subgraph: + for _, stmt := range stmt.Stmts { + gen.addStmt(dst, stmt) + } + default: + panic(fmt.Sprintf("unknown statement type %T", stmt)) + } +} + +// addEdgeStmt adds the given edge statement to the graph. +func (gen *generator) addEdgeStmt(dst Builder, e *ast.EdgeStmt) { + fs := gen.addVertex(dst, e.From) + ts := gen.addEdge(dst, e.To) + for _, f := range fs { + for _, t := range ts { + edge := dst.NewEdge(f, t) + if edge, ok := edge.(UnmarshalerAttr); ok { + for _, attr := range e.Attrs { + a := Attribute{ + Key: attr.Key, + Value: attr.Val, + } + if err := edge.UnmarshalDOTAttr(a); err != nil { + panic(fmt.Errorf("unable to unmarshal edge DOT attribute (%s=%s)", a.Key, a.Value)) + } + } + } + } + } +} + +// addVertex adds the given vertex to the graph, and returns its set of nodes. +func (gen *generator) addVertex(dst Builder, v ast.Vertex) []graph.Node { + switch v := v.(type) { + case *ast.Node: + n := gen.node(dst, v.ID) + return []graph.Node{n} + case *ast.Subgraph: + gen.pushSubgraph() + for _, stmt := range v.Stmts { + gen.addStmt(dst, stmt) + } + return gen.popSubgraph() + default: + panic(fmt.Sprintf("unknown vertex type %T", v)) + } +} + +// addEdge adds the given edge to the graph, and returns its set of nodes. +func (gen *generator) addEdge(dst Builder, to *ast.Edge) []graph.Node { + if !gen.directed && to.Directed { + panic(fmt.Errorf("directed edge to %v in undirected graph", to.Vertex)) + } + fs := gen.addVertex(dst, to.Vertex) + if to.To != nil { + ts := gen.addEdge(dst, to.To) + for _, f := range fs { + for _, t := range ts { + dst.NewEdge(f, t) + } + } + } + return fs +} + +// pushSubgraph pushes the node start index of the active subgraph onto the +// stack. +func (gen *generator) pushSubgraph() { + gen.subStart = append(gen.subStart, len(gen.subNodes)) +} + +// popSubgraph pops the node start index of the active subgraph from the stack, +// and returns the nodes processed since. +func (gen *generator) popSubgraph() []graph.Node { + // Get nodes processed since the subgraph became active. + start := gen.subStart[len(gen.subStart)-1] + // TODO: Figure out a better way to store subgraph nodes, so that duplicates + // may not occur. + nodes := unique(gen.subNodes[start:]) + // Remove subgraph from stack. + gen.subStart = gen.subStart[:len(gen.subStart)-1] + if len(gen.subStart) == 0 { + // Remove subgraph nodes when the bottom-most subgraph has been processed. + gen.subNodes = gen.subNodes[:0] + } + return nodes +} + +// unique returns the set of unique nodes contained within ns. +func unique(ns []graph.Node) []graph.Node { + var nodes []graph.Node + var set intsets.Sparse + for _, n := range ns { + id := n.ID() + if set.Has(id) { + // skip duplicate node + continue + } + set.Insert(id) + nodes = append(nodes, n) + } + return nodes +} + +// isInSubgraph reports whether the active context is within a subgraph, that is +// to be used as a vertex of an edge. +func (gen *generator) isInSubgraph() bool { + return len(gen.subStart) > 0 +} + +// appendSubgraphNode appends the given node to the slice of nodes processed +// within the context of a subgraph. +func (gen *generator) appendSubgraphNode(n graph.Node) { + gen.subNodes = append(gen.subNodes, n) +} diff --git a/graph/encoding/dot/decode_test.go b/graph/encoding/dot/decode_test.go new file mode 100644 index 00000000..3be74f52 --- /dev/null +++ b/graph/encoding/dot/decode_test.go @@ -0,0 +1,196 @@ +// Copyright ©2017 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package dot + +import ( + "fmt" + "testing" + + "github.com/gonum/graph" + "github.com/gonum/graph/simple" +) + +func TestRoundTrip(t *testing.T) { + golden := []struct { + want string + directed bool + }{ + { + want: directed, + directed: true, + }, + { + want: undirected, + directed: false, + }, + } + for i, g := range golden { + var dst Builder + if g.directed { + dst = newDotDirectedGraph() + } else { + dst = newDotUndirectedGraph() + } + data := []byte(g.want) + if err := Unmarshal(data, dst); err != nil { + t.Errorf("i=%d: unable to unmarshal DOT graph; %v", i, err) + continue + } + buf, err := Marshal(dst, "", "", "\t", false) + if err != nil { + t.Errorf("i=%d: unable to marshal graph; %v", i, dst) + continue + } + got := string(buf) + if got != g.want { + t.Errorf("i=%d: graph content mismatch; expected `%s`, got `%s`", i, g.want, got) + continue + } + } +} + +const directed = `digraph { + // Node definitions. + 0 [label="foo 2"]; + 1 [label="bar 2"]; + + // Edge definitions. + 0 -> 1 [label="baz 2"]; +}` + +const undirected = `graph { + // Node definitions. + 0 [label="foo 2"]; + 1 [label="bar 2"]; + + // Edge definitions. + 0 -- 1 [label="baz 2"]; +}` + +// Below follows a minimal implementation of a graph capable of validating the +// round-trip encoding and decoding of DOT graphs with nodes and edges +// containing DOT attributes. + +// dotDirectedGraph extends simple.DirectedGraph to add NewNode and NewEdge +// methods for creating user-defined nodes and edges. +// +// dotDirectedGraph implements the dot.Builder interface. +type dotDirectedGraph struct { + *simple.DirectedGraph +} + +// newDotDirectedGraph returns a new directed capable of creating user-defined +// nodes and edges. +func newDotDirectedGraph() *dotDirectedGraph { + return &dotDirectedGraph{DirectedGraph: simple.NewDirectedGraph(0, 0)} +} + +// NewNode adds a new node with a unique node ID to the graph. +func (g *dotDirectedGraph) NewNode() graph.Node { + n := &dotNode{Node: simple.Node(g.NewNodeID())} + g.AddNode(n) + return n +} + +// NewEdge adds a new edge from the source to the destination node to the graph, +// or returns the existing edge if already present. +func (g *dotDirectedGraph) NewEdge(from, to graph.Node) graph.Edge { + if e := g.Edge(from, to); e != nil { + return e + } + e := &dotEdge{Edge: simple.Edge{F: from, T: to}} + g.SetEdge(e) + return e +} + +// dotUndirectedGraph extends simple.UndirectedGraph to add NewNode and NewEdge +// methods for creating user-defined nodes and edges. +// +// dotUndirectedGraph implements the dot.Builder interface. +type dotUndirectedGraph struct { + *simple.UndirectedGraph +} + +// newDotUndirectedGraph returns a new undirected capable of creating user- +// defined nodes and edges. +func newDotUndirectedGraph() *dotUndirectedGraph { + return &dotUndirectedGraph{UndirectedGraph: simple.NewUndirectedGraph(0, 0)} +} + +// NewNode adds a new node with a unique node ID to the graph. +func (g *dotUndirectedGraph) NewNode() graph.Node { + n := &dotNode{Node: simple.Node(g.NewNodeID())} + g.AddNode(n) + return n +} + +// NewEdge adds a new edge from the source to the destination node to the graph, +// or returns the existing edge if already present. +func (g *dotUndirectedGraph) NewEdge(from, to graph.Node) graph.Edge { + if e := g.Edge(from, to); e != nil { + return e + } + e := &dotEdge{Edge: simple.Edge{F: from, T: to}} + g.SetEdge(e) + return e +} + +// dotNode extends simple.Node with a label field to test round-trip encoding +// and decoding of node DOT label attributes. +type dotNode struct { + simple.Node + // Node label. + Label string +} + +// UnmarshalDOTAttr decodes a single DOT attribute. +func (n *dotNode) UnmarshalDOTAttr(attr Attribute) error { + if attr.Key != "label" { + return fmt.Errorf("unable to unmarshal node DOT attribute with key %q", attr.Key) + } + n.Label = attr.Value + return nil +} + +// DOTAttributes returns the DOT attributes of the node. +func (n *dotNode) DOTAttributes() []Attribute { + if len(n.Label) == 0 { + return nil + } + attr := Attribute{ + Key: "label", + Value: n.Label, + } + return []Attribute{attr} +} + +// dotEdge extends simple.Edge with a label field to test round-trip encoding and +// decoding of edge DOT label attributes. +type dotEdge struct { + simple.Edge + // Edge label. + Label string +} + +// UnmarshalDOTAttr decodes a single DOT attribute. +func (e *dotEdge) UnmarshalDOTAttr(attr Attribute) error { + if attr.Key != "label" { + return fmt.Errorf("unable to unmarshal node DOT attribute with key %q", attr.Key) + } + e.Label = attr.Value + return nil +} + +// DOTAttributes returns the DOT attributes of the edge. +func (e *dotEdge) DOTAttributes() []Attribute { + if len(e.Label) == 0 { + return nil + } + attr := Attribute{ + Key: "label", + Value: e.Label, + } + return []Attribute{attr} +} diff --git a/graph/encoding/dot/dot.go b/graph/encoding/dot/dot.go new file mode 100644 index 00000000..7e3dc30c --- /dev/null +++ b/graph/encoding/dot/dot.go @@ -0,0 +1,378 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package dot implements GraphViz DOT marshaling of graphs. +// +// See the GraphViz DOT Guide and the DOT grammar for more information +// on using specific aspects of the DOT language: +// +// DOT Guide: http://www.graphviz.org/Documentation/dotguide.pdf +// +// DOT grammar: http://www.graphviz.org/doc/info/lang.html +// +package dot + +import ( + "bytes" + "errors" + "fmt" + "sort" + "strings" + + "github.com/gonum/graph" + "github.com/gonum/graph/internal/ordered" +) + +// Node is a DOT graph node. +type Node interface { + // DOTID returns a DOT node ID. + // + // An ID is one of the following: + // + // - a string of alphabetic ([a-zA-Z\x80-\xff]) characters, underscores ('_'). + // digits ([0-9]), not beginning with a digit. + // - a numeral [-]?(.[0-9]+ | [0-9]+(.[0-9]*)?). + // - a double-quoted string ("...") possibly containing escaped quotes (\"). + // - an HTML string (<...>). + DOTID() string +} + +// Attributers are graph.Graph values that specify top-level DOT +// attributes. +type Attributers interface { + DOTAttributers() (graph, node, edge Attributer) +} + +// Attributer defines graph.Node or graph.Edge values that can +// specify DOT attributes. +type Attributer interface { + DOTAttributes() []Attribute +} + +// Attribute is a DOT language key value attribute pair. +type Attribute struct { + Key, Value string +} + +// Porter defines the behavior of graph.Edge values that can specify +// connection ports for their end points. The returned port corresponds +// to the the DOT node port to be used by the edge, compass corresponds +// to DOT compass point to which the edge will be aimed. +type Porter interface { + FromPort() (port, compass string) + ToPort() (port, compass string) +} + +// Structurer represents a graph.Graph that can define subgraphs. +type Structurer interface { + Structure() []Graph +} + +// Graph wraps named graph.Graph values. +type Graph interface { + graph.Graph + DOTID() string +} + +// Subgrapher wraps graph.Node values that represent subgraphs. +type Subgrapher interface { + Subgraph() graph.Graph +} + +// Marshal returns the DOT encoding for the graph g, applying the prefix +// and indent to the encoding. Name is used to specify the graph name. If +// name is empty and g implements Graph, the returned string from DOTID +// will be used. If strict is true the output bytes will be prefixed with +// the DOT "strict" keyword. +// +// Graph serialization will work for a graph.Graph without modification, +// however, advanced GraphViz DOT features provided by Marshal depend on +// implementation of the Node, Attributer, Porter, Attributers, Structurer, +// Subgrapher and Graph interfaces. +func Marshal(g graph.Graph, name, prefix, indent string, strict bool) ([]byte, error) { + var p printer + p.indent = indent + p.prefix = prefix + p.visited = make(map[edge]bool) + if strict { + p.buf.WriteString("strict ") + } + err := p.print(g, name, false, false) + if err != nil { + return nil, err + } + return p.buf.Bytes(), nil +} + +type printer struct { + buf bytes.Buffer + + prefix string + indent string + depth int + + visited map[edge]bool + + err error +} + +type edge struct { + inGraph string + from, to int +} + +func (p *printer) print(g graph.Graph, name string, needsIndent, isSubgraph bool) error { + nodes := g.Nodes() + sort.Sort(ordered.ByID(nodes)) + + p.buf.WriteString(p.prefix) + if needsIndent { + for i := 0; i < p.depth; i++ { + p.buf.WriteString(p.indent) + } + } + _, isDirected := g.(graph.Directed) + if isSubgraph { + p.buf.WriteString("sub") + } else if isDirected { + p.buf.WriteString("di") + } + p.buf.WriteString("graph") + + if name == "" { + if g, ok := g.(Graph); ok { + name = g.DOTID() + } + } + if name != "" { + p.buf.WriteByte(' ') + p.buf.WriteString(name) + } + + p.openBlock(" {") + if a, ok := g.(Attributers); ok { + p.writeAttributeComplex(a) + } + if s, ok := g.(Structurer); ok { + for _, g := range s.Structure() { + _, subIsDirected := g.(graph.Directed) + if subIsDirected != isDirected { + return errors.New("dot: mismatched graph type") + } + p.buf.WriteByte('\n') + p.print(g, g.DOTID(), true, true) + } + } + + havePrintedNodeHeader := false + for _, n := range nodes { + if s, ok := n.(Subgrapher); ok { + // If the node is not linked to any other node + // the graph needs to be written now. + if len(g.From(n)) == 0 { + g := s.Subgraph() + _, subIsDirected := g.(graph.Directed) + if subIsDirected != isDirected { + return errors.New("dot: mismatched graph type") + } + if !havePrintedNodeHeader { + p.newline() + p.buf.WriteString("// Node definitions.") + havePrintedNodeHeader = true + } + p.newline() + p.print(g, graphID(g, n), false, true) + } + continue + } + if !havePrintedNodeHeader { + p.newline() + p.buf.WriteString("// Node definitions.") + havePrintedNodeHeader = true + } + p.newline() + p.writeNode(n) + if a, ok := n.(Attributer); ok { + p.writeAttributeList(a) + } + p.buf.WriteByte(';') + } + + havePrintedEdgeHeader := false + for _, n := range nodes { + to := g.From(n) + sort.Sort(ordered.ByID(to)) + for _, t := range to { + if isDirected { + if p.visited[edge{inGraph: name, from: n.ID(), to: t.ID()}] { + continue + } + p.visited[edge{inGraph: name, from: n.ID(), to: t.ID()}] = true + } else { + if p.visited[edge{inGraph: name, from: n.ID(), to: t.ID()}] { + continue + } + p.visited[edge{inGraph: name, from: n.ID(), to: t.ID()}] = true + p.visited[edge{inGraph: name, from: t.ID(), to: n.ID()}] = true + } + + if !havePrintedEdgeHeader { + p.buf.WriteByte('\n') + p.buf.WriteString(strings.TrimRight(p.prefix, " \t\n")) // Trim whitespace suffix. + p.newline() + p.buf.WriteString("// Edge definitions.") + havePrintedEdgeHeader = true + } + p.newline() + + if s, ok := n.(Subgrapher); ok { + g := s.Subgraph() + _, subIsDirected := g.(graph.Directed) + if subIsDirected != isDirected { + return errors.New("dot: mismatched graph type") + } + p.print(g, graphID(g, n), false, true) + } else { + p.writeNode(n) + } + e, edgeIsPorter := g.Edge(n, t).(Porter) + if edgeIsPorter { + p.writePorts(e.FromPort()) + } + + if isDirected { + p.buf.WriteString(" -> ") + } else { + p.buf.WriteString(" -- ") + } + + if s, ok := t.(Subgrapher); ok { + g := s.Subgraph() + _, subIsDirected := g.(graph.Directed) + if subIsDirected != isDirected { + return errors.New("dot: mismatched graph type") + } + p.print(g, graphID(g, t), false, true) + } else { + p.writeNode(t) + } + if edgeIsPorter { + p.writePorts(e.ToPort()) + } + + if a, ok := g.Edge(n, t).(Attributer); ok { + p.writeAttributeList(a) + } + + p.buf.WriteByte(';') + } + } + p.closeBlock("}") + + return nil +} + +func (p *printer) writeNode(n graph.Node) { + p.buf.WriteString(nodeID(n)) +} + +func (p *printer) writePorts(port, cp string) { + if port != "" { + p.buf.WriteByte(':') + p.buf.WriteString(port) + } + if cp != "" { + p.buf.WriteByte(':') + p.buf.WriteString(cp) + } +} + +func nodeID(n graph.Node) string { + switch n := n.(type) { + case Node: + return n.DOTID() + default: + return fmt.Sprint(n.ID()) + } +} + +func graphID(g graph.Graph, n graph.Node) string { + switch g := g.(type) { + case Node: + return g.DOTID() + default: + return nodeID(n) + } +} + +func (p *printer) writeAttributeList(a Attributer) { + attributes := a.DOTAttributes() + switch len(attributes) { + case 0: + case 1: + p.buf.WriteString(" [") + p.buf.WriteString(attributes[0].Key) + p.buf.WriteByte('=') + p.buf.WriteString(attributes[0].Value) + p.buf.WriteString("]") + default: + p.openBlock(" [") + for _, att := range attributes { + p.newline() + p.buf.WriteString(att.Key) + p.buf.WriteByte('=') + p.buf.WriteString(att.Value) + } + p.closeBlock("]") + } +} + +var attType = []string{"graph", "node", "edge"} + +func (p *printer) writeAttributeComplex(ca Attributers) { + g, n, e := ca.DOTAttributers() + haveWrittenBlock := false + for i, a := range []Attributer{g, n, e} { + attributes := a.DOTAttributes() + if len(attributes) == 0 { + continue + } + if haveWrittenBlock { + p.buf.WriteByte(';') + } + p.newline() + p.buf.WriteString(attType[i]) + p.openBlock(" [") + for _, att := range attributes { + p.newline() + p.buf.WriteString(att.Key) + p.buf.WriteByte('=') + p.buf.WriteString(att.Value) + } + p.closeBlock("]") + haveWrittenBlock = true + } + if haveWrittenBlock { + p.buf.WriteString(";\n") + } +} + +func (p *printer) newline() { + p.buf.WriteByte('\n') + p.buf.WriteString(p.prefix) + for i := 0; i < p.depth; i++ { + p.buf.WriteString(p.indent) + } +} + +func (p *printer) openBlock(b string) { + p.buf.WriteString(b) + p.depth++ +} + +func (p *printer) closeBlock(b string) { + p.depth-- + p.newline() + p.buf.WriteString(b) +} diff --git a/graph/encoding/dot/dot_test.go b/graph/encoding/dot/dot_test.go new file mode 100644 index 00000000..17d3c9d8 --- /dev/null +++ b/graph/encoding/dot/dot_test.go @@ -0,0 +1,1423 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package dot + +import ( + "math" + "testing" + + "github.com/gonum/graph" + "github.com/gonum/graph/simple" +) + +// set is an integer set. +type set map[int]struct{} + +func linksTo(i ...int) set { + if len(i) == 0 { + return nil + } + s := make(set) + for _, v := range i { + s[v] = struct{}{} + } + return s +} + +var ( + // Example graph from http://en.wikipedia.org/wiki/File:PageRanks-Example.svg 16:17, 8 July 2009 + // Node identities are rewritten here to use integers from 0 to match with the DOT output. + pageRankGraph = []set{ + 0: nil, + 1: linksTo(2), + 2: linksTo(1), + 3: linksTo(0, 1), + 4: linksTo(3, 1, 5), + 5: linksTo(1, 4), + 6: linksTo(1, 4), + 7: linksTo(1, 4), + 8: linksTo(1, 4), + 9: linksTo(4), + 10: linksTo(4), + } + + // Example graph from http://en.wikipedia.org/w/index.php?title=PageRank&oldid=659286279#Power_Method + powerMethodGraph = []set{ + 0: linksTo(1, 2), + 1: linksTo(3), + 2: linksTo(3, 4), + 3: linksTo(4), + 4: linksTo(0), + } +) + +func directedGraphFrom(g []set) graph.Directed { + dg := simple.NewDirectedGraph(0, math.Inf(1)) + for u, e := range g { + for v := range e { + dg.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v)}) + } + } + return dg +} + +func undirectedGraphFrom(g []set) graph.Graph { + dg := simple.NewUndirectedGraph(0, math.Inf(1)) + for u, e := range g { + for v := range e { + dg.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v)}) + } + } + return dg +} + +const alpha = "ABCDEFGHIJKLMNOPQRSTUVWXYZ" + +type namedNode struct { + id int + name string +} + +func (n namedNode) ID() int { return n.id } +func (n namedNode) DOTID() string { return n.name } + +func directedNamedIDGraphFrom(g []set) graph.Directed { + dg := simple.NewDirectedGraph(0, math.Inf(1)) + for u, e := range g { + nu := namedNode{id: u, name: alpha[u : u+1]} + for v := range e { + nv := namedNode{id: v, name: alpha[v : v+1]} + dg.SetEdge(simple.Edge{F: nu, T: nv}) + } + } + return dg +} + +func undirectedNamedIDGraphFrom(g []set) graph.Graph { + dg := simple.NewUndirectedGraph(0, math.Inf(1)) + for u, e := range g { + nu := namedNode{id: u, name: alpha[u : u+1]} + for v := range e { + nv := namedNode{id: v, name: alpha[v : v+1]} + dg.SetEdge(simple.Edge{F: nu, T: nv}) + } + } + return dg +} + +type attrNode struct { + id int + name string + attr []Attribute +} + +func (n attrNode) ID() int { return n.id } +func (n attrNode) DOTAttributes() []Attribute { return n.attr } + +func directedNodeAttrGraphFrom(g []set, attr [][]Attribute) graph.Directed { + dg := simple.NewDirectedGraph(0, math.Inf(1)) + for u, e := range g { + var at []Attribute + if u < len(attr) { + at = attr[u] + } + nu := attrNode{id: u, attr: at} + for v := range e { + if v < len(attr) { + at = attr[v] + } + nv := attrNode{id: v, attr: at} + dg.SetEdge(simple.Edge{F: nu, T: nv}) + } + } + return dg +} + +func undirectedNodeAttrGraphFrom(g []set, attr [][]Attribute) graph.Graph { + dg := simple.NewUndirectedGraph(0, math.Inf(1)) + for u, e := range g { + var at []Attribute + if u < len(attr) { + at = attr[u] + } + nu := attrNode{id: u, attr: at} + for v := range e { + if v < len(attr) { + at = attr[v] + } + nv := attrNode{id: v, attr: at} + dg.SetEdge(simple.Edge{F: nu, T: nv}) + } + } + return dg +} + +type namedAttrNode struct { + id int + name string + attr []Attribute +} + +func (n namedAttrNode) ID() int { return n.id } +func (n namedAttrNode) DOTID() string { return n.name } +func (n namedAttrNode) DOTAttributes() []Attribute { return n.attr } + +func directedNamedIDNodeAttrGraphFrom(g []set, attr [][]Attribute) graph.Directed { + dg := simple.NewDirectedGraph(0, math.Inf(1)) + for u, e := range g { + var at []Attribute + if u < len(attr) { + at = attr[u] + } + nu := namedAttrNode{id: u, name: alpha[u : u+1], attr: at} + for v := range e { + if v < len(attr) { + at = attr[v] + } + nv := namedAttrNode{id: v, name: alpha[v : v+1], attr: at} + dg.SetEdge(simple.Edge{F: nu, T: nv}) + } + } + return dg +} + +func undirectedNamedIDNodeAttrGraphFrom(g []set, attr [][]Attribute) graph.Graph { + dg := simple.NewUndirectedGraph(0, math.Inf(1)) + for u, e := range g { + var at []Attribute + if u < len(attr) { + at = attr[u] + } + nu := namedAttrNode{id: u, name: alpha[u : u+1], attr: at} + for v := range e { + if v < len(attr) { + at = attr[v] + } + nv := namedAttrNode{id: v, name: alpha[v : v+1], attr: at} + dg.SetEdge(simple.Edge{F: nu, T: nv}) + } + } + return dg +} + +type attrEdge struct { + from, to graph.Node + + attr []Attribute +} + +func (e attrEdge) From() graph.Node { return e.from } +func (e attrEdge) To() graph.Node { return e.to } +func (e attrEdge) Weight() float64 { return 0 } +func (e attrEdge) DOTAttributes() []Attribute { return e.attr } + +func directedEdgeAttrGraphFrom(g []set, attr map[edge][]Attribute) graph.Directed { + dg := simple.NewDirectedGraph(0, math.Inf(1)) + for u, e := range g { + for v := range e { + dg.SetEdge(attrEdge{from: simple.Node(u), to: simple.Node(v), attr: attr[edge{from: u, to: v}]}) + } + } + return dg +} + +func undirectedEdgeAttrGraphFrom(g []set, attr map[edge][]Attribute) graph.Graph { + dg := simple.NewUndirectedGraph(0, math.Inf(1)) + for u, e := range g { + for v := range e { + dg.SetEdge(attrEdge{from: simple.Node(u), to: simple.Node(v), attr: attr[edge{from: u, to: v}]}) + } + } + return dg +} + +type portedEdge struct { + from, to graph.Node + + directed bool + + fromPort string + fromCompass string + toPort string + toCompass string +} + +func (e portedEdge) From() graph.Node { return e.from } +func (e portedEdge) To() graph.Node { return e.to } +func (e portedEdge) Weight() float64 { return 0 } + +// TODO(kortschak): Figure out a better way to handle the fact that +// headedness is an undefined concept in undirected graphs. We sort +// nodes by ID, so lower ID nodes are always from nodes in undirected +// graphs. We can probably do this in the printer, but I am leaving +// this here as a WARNING. +// Maybe the approach should be to document that for undirected graphs +// the low ID node should be returned by the FromPort and the high ID +// by the ToPort calls. +func (e portedEdge) FromPort() (port, compass string) { + return e.fromPort, e.fromCompass +} +func (e portedEdge) ToPort() (port, compass string) { + return e.toPort, e.toCompass +} + +func directedPortedAttrGraphFrom(g []set, attr [][]Attribute, ports map[edge]portedEdge) graph.Directed { + dg := simple.NewDirectedGraph(0, math.Inf(1)) + for u, e := range g { + var at []Attribute + if u < len(attr) { + at = attr[u] + } + nu := attrNode{id: u, attr: at} + for v := range e { + if v < len(attr) { + at = attr[v] + } + pe := ports[edge{from: u, to: v}] + pe.from = nu + pe.to = attrNode{id: v, attr: at} + dg.SetEdge(pe) + } + } + return dg +} + +func undirectedPortedAttrGraphFrom(g []set, attr [][]Attribute, ports map[edge]portedEdge) graph.Graph { + dg := simple.NewUndirectedGraph(0, math.Inf(1)) + for u, e := range g { + var at []Attribute + if u < len(attr) { + at = attr[u] + } + nu := attrNode{id: u, attr: at} + for v := range e { + if v < len(attr) { + at = attr[v] + } + pe := ports[edge{from: u, to: v}] + pe.from = nu + pe.to = attrNode{id: v, attr: at} + dg.SetEdge(pe) + } + } + return dg +} + +type graphAttributer struct { + graph.Graph + graph attributer + node attributer + edge attributer +} + +type attributer []Attribute + +func (a attributer) DOTAttributes() []Attribute { return a } + +func (g graphAttributer) DOTAttributers() (graph, node, edge Attributer) { + return g.graph, g.node, g.edge +} + +type structuredGraph struct { + *simple.UndirectedGraph + sub []Graph +} + +func undirectedStructuredGraphFrom(c []edge, g ...[]set) graph.Graph { + s := &structuredGraph{UndirectedGraph: simple.NewUndirectedGraph(0, math.Inf(1))} + var base int + for i, sg := range g { + sub := simple.NewUndirectedGraph(0, math.Inf(1)) + for u, e := range sg { + for v := range e { + ce := simple.Edge{F: simple.Node(u + base), T: simple.Node(v + base)} + sub.SetEdge(ce) + } + } + s.sub = append(s.sub, namedGraph{id: i, Graph: sub}) + base += len(sg) + } + for _, e := range c { + s.SetEdge(simple.Edge{F: simple.Node(e.from), T: simple.Node(e.to)}) + } + return s +} + +func (g structuredGraph) Structure() []Graph { + return g.sub +} + +type namedGraph struct { + id int + graph.Graph +} + +func (g namedGraph) DOTID() string { return alpha[g.id : g.id+1] } + +type subGraph struct { + id int + graph.Graph +} + +func (g subGraph) ID() int { return g.id } +func (g subGraph) Subgraph() graph.Graph { + return namedGraph{id: g.id, Graph: g.Graph} +} + +func undirectedSubGraphFrom(g []set, s map[int][]set) graph.Graph { + var base int + subs := make(map[int]subGraph) + for i, sg := range s { + sub := simple.NewUndirectedGraph(0, math.Inf(1)) + for u, e := range sg { + for v := range e { + ce := simple.Edge{F: simple.Node(u + base), T: simple.Node(v + base)} + sub.SetEdge(ce) + } + } + subs[i] = subGraph{id: i, Graph: sub} + base += len(sg) + } + + dg := simple.NewUndirectedGraph(0, math.Inf(1)) + for u, e := range g { + var nu graph.Node + if sg, ok := subs[u]; ok { + sg.id += base + nu = sg + } else { + nu = simple.Node(u + base) + } + for v := range e { + var nv graph.Node + if sg, ok := subs[v]; ok { + sg.id += base + nv = sg + } else { + nv = simple.Node(v + base) + } + dg.SetEdge(simple.Edge{F: nu, T: nv}) + } + } + return dg +} + +var encodeTests = []struct { + name string + g graph.Graph + strict bool + + prefix string + + want string +}{ + // Basic graph.Graph handling. + { + name: "PageRank", + g: directedGraphFrom(pageRankGraph), + + want: `digraph PageRank { + // Node definitions. + 0; + 1; + 2; + 3; + 4; + 5; + 6; + 7; + 8; + 9; + 10; + + // Edge definitions. + 1 -> 2; + 2 -> 1; + 3 -> 0; + 3 -> 1; + 4 -> 1; + 4 -> 3; + 4 -> 5; + 5 -> 1; + 5 -> 4; + 6 -> 1; + 6 -> 4; + 7 -> 1; + 7 -> 4; + 8 -> 1; + 8 -> 4; + 9 -> 4; + 10 -> 4; +}`, + }, + { + g: undirectedGraphFrom(pageRankGraph), + + want: `graph { + // Node definitions. + 0; + 1; + 2; + 3; + 4; + 5; + 6; + 7; + 8; + 9; + 10; + + // Edge definitions. + 0 -- 3; + 1 -- 2; + 1 -- 3; + 1 -- 4; + 1 -- 5; + 1 -- 6; + 1 -- 7; + 1 -- 8; + 3 -- 4; + 4 -- 5; + 4 -- 6; + 4 -- 7; + 4 -- 8; + 4 -- 9; + 4 -- 10; +}`, + }, + { + g: directedGraphFrom(powerMethodGraph), + + want: `digraph { + // Node definitions. + 0; + 1; + 2; + 3; + 4; + + // Edge definitions. + 0 -> 1; + 0 -> 2; + 1 -> 3; + 2 -> 3; + 2 -> 4; + 3 -> 4; + 4 -> 0; +}`, + }, + { + g: undirectedGraphFrom(powerMethodGraph), + + want: `graph { + // Node definitions. + 0; + 1; + 2; + 3; + 4; + + // Edge definitions. + 0 -- 1; + 0 -- 2; + 0 -- 4; + 1 -- 3; + 2 -- 3; + 2 -- 4; + 3 -- 4; +}`, + }, + { + g: undirectedGraphFrom(powerMethodGraph), + prefix: "# ", + + want: `# graph { +# // Node definitions. +# 0; +# 1; +# 2; +# 3; +# 4; +# +# // Edge definitions. +# 0 -- 1; +# 0 -- 2; +# 0 -- 4; +# 1 -- 3; +# 2 -- 3; +# 2 -- 4; +# 3 -- 4; +# }`, + }, + + // Names named nodes. + { + name: "PageRank", + g: directedNamedIDGraphFrom(pageRankGraph), + + want: `digraph PageRank { + // Node definitions. + A; + B; + C; + D; + E; + F; + G; + H; + I; + J; + K; + + // Edge definitions. + B -> C; + C -> B; + D -> A; + D -> B; + E -> B; + E -> D; + E -> F; + F -> B; + F -> E; + G -> B; + G -> E; + H -> B; + H -> E; + I -> B; + I -> E; + J -> E; + K -> E; +}`, + }, + { + g: undirectedNamedIDGraphFrom(pageRankGraph), + + want: `graph { + // Node definitions. + A; + B; + C; + D; + E; + F; + G; + H; + I; + J; + K; + + // Edge definitions. + A -- D; + B -- C; + B -- D; + B -- E; + B -- F; + B -- G; + B -- H; + B -- I; + D -- E; + E -- F; + E -- G; + E -- H; + E -- I; + E -- J; + E -- K; +}`, + }, + { + g: directedNamedIDGraphFrom(powerMethodGraph), + + want: `digraph { + // Node definitions. + A; + B; + C; + D; + E; + + // Edge definitions. + A -> B; + A -> C; + B -> D; + C -> D; + C -> E; + D -> E; + E -> A; +}`, + }, + { + g: undirectedNamedIDGraphFrom(powerMethodGraph), + + want: `graph { + // Node definitions. + A; + B; + C; + D; + E; + + // Edge definitions. + A -- B; + A -- C; + A -- E; + B -- D; + C -- D; + C -- E; + D -- E; +}`, + }, + { + g: undirectedNamedIDGraphFrom(powerMethodGraph), + prefix: "# ", + + want: `# graph { +# // Node definitions. +# A; +# B; +# C; +# D; +# E; +# +# // Edge definitions. +# A -- B; +# A -- C; +# A -- E; +# B -- D; +# C -- D; +# C -- E; +# D -- E; +# }`, + }, + + // Handling nodes with attributes. + { + g: directedNodeAttrGraphFrom(powerMethodGraph, nil), + + want: `digraph { + // Node definitions. + 0; + 1; + 2; + 3; + 4; + + // Edge definitions. + 0 -> 1; + 0 -> 2; + 1 -> 3; + 2 -> 3; + 2 -> 4; + 3 -> 4; + 4 -> 0; +}`, + }, + { + g: undirectedNodeAttrGraphFrom(powerMethodGraph, nil), + + want: `graph { + // Node definitions. + 0; + 1; + 2; + 3; + 4; + + // Edge definitions. + 0 -- 1; + 0 -- 2; + 0 -- 4; + 1 -- 3; + 2 -- 3; + 2 -- 4; + 3 -- 4; +}`, + }, + { + g: directedNodeAttrGraphFrom(powerMethodGraph, [][]Attribute{ + 2: {{"fontsize", "16"}, {"shape", "ellipse"}}, + 4: {}, + }), + + want: `digraph { + // Node definitions. + 0; + 1; + 2 [ + fontsize=16 + shape=ellipse + ]; + 3; + 4; + + // Edge definitions. + 0 -> 1; + 0 -> 2; + 1 -> 3; + 2 -> 3; + 2 -> 4; + 3 -> 4; + 4 -> 0; +}`, + }, + { + g: undirectedNodeAttrGraphFrom(powerMethodGraph, [][]Attribute{ + 2: {{"fontsize", "16"}, {"shape", "ellipse"}}, + 4: {}, + }), + + want: `graph { + // Node definitions. + 0; + 1; + 2 [ + fontsize=16 + shape=ellipse + ]; + 3; + 4; + + // Edge definitions. + 0 -- 1; + 0 -- 2; + 0 -- 4; + 1 -- 3; + 2 -- 3; + 2 -- 4; + 3 -- 4; +}`, + }, + { + g: directedNamedIDNodeAttrGraphFrom(powerMethodGraph, [][]Attribute{ + 2: {{"fontsize", "16"}, {"shape", "ellipse"}}, + 4: {}, + }), + + want: `digraph { + // Node definitions. + A; + B; + C [ + fontsize=16 + shape=ellipse + ]; + D; + E; + + // Edge definitions. + A -> B; + A -> C; + B -> D; + C -> D; + C -> E; + D -> E; + E -> A; +}`, + }, + { + g: undirectedNamedIDNodeAttrGraphFrom(powerMethodGraph, [][]Attribute{ + 0: nil, + 1: nil, + 2: {{"fontsize", "16"}, {"shape", "ellipse"}}, + 3: nil, + 4: {}, + }), + + want: `graph { + // Node definitions. + A; + B; + C [ + fontsize=16 + shape=ellipse + ]; + D; + E; + + // Edge definitions. + A -- B; + A -- C; + A -- E; + B -- D; + C -- D; + C -- E; + D -- E; +}`, + }, + + // Handling edge with attributes. + { + g: directedEdgeAttrGraphFrom(powerMethodGraph, nil), + + want: `digraph { + // Node definitions. + 0; + 1; + 2; + 3; + 4; + + // Edge definitions. + 0 -> 1; + 0 -> 2; + 1 -> 3; + 2 -> 3; + 2 -> 4; + 3 -> 4; + 4 -> 0; +}`, + }, + { + g: undirectedEdgeAttrGraphFrom(powerMethodGraph, nil), + + want: `graph { + // Node definitions. + 0; + 1; + 2; + 3; + 4; + + // Edge definitions. + 0 -- 1; + 0 -- 2; + 0 -- 4; + 1 -- 3; + 2 -- 3; + 2 -- 4; + 3 -- 4; +}`, + }, + { + g: directedEdgeAttrGraphFrom(powerMethodGraph, map[edge][]Attribute{ + edge{from: 0, to: 2}: {{"label", `"???"`}, {"style", "dashed"}}, + edge{from: 2, to: 4}: {}, + edge{from: 3, to: 4}: {{"color", "red"}}, + }), + + want: `digraph { + // Node definitions. + 0; + 1; + 2; + 3; + 4; + + // Edge definitions. + 0 -> 1; + 0 -> 2 [ + label="???" + style=dashed + ]; + 1 -> 3; + 2 -> 3; + 2 -> 4; + 3 -> 4 [color=red]; + 4 -> 0; +}`, + }, + { + g: undirectedEdgeAttrGraphFrom(powerMethodGraph, map[edge][]Attribute{ + edge{from: 0, to: 2}: {{"label", `"???"`}, {"style", "dashed"}}, + edge{from: 2, to: 4}: {}, + edge{from: 3, to: 4}: {{"color", "red"}}, + }), + + want: `graph { + // Node definitions. + 0; + 1; + 2; + 3; + 4; + + // Edge definitions. + 0 -- 1; + 0 -- 2 [ + label="???" + style=dashed + ]; + 0 -- 4; + 1 -- 3; + 2 -- 3; + 2 -- 4; + 3 -- 4 [color=red]; +}`, + }, + + // Handling nodes with ports. + { + g: directedPortedAttrGraphFrom(powerMethodGraph, nil, nil), + + want: `digraph { + // Node definitions. + 0; + 1; + 2; + 3; + 4; + + // Edge definitions. + 0 -> 1; + 0 -> 2; + 1 -> 3; + 2 -> 3; + 2 -> 4; + 3 -> 4; + 4 -> 0; +}`, + }, + { + g: undirectedPortedAttrGraphFrom(powerMethodGraph, nil, nil), + + want: `graph { + // Node definitions. + 0; + 1; + 2; + 3; + 4; + + // Edge definitions. + 0 -- 1; + 0 -- 2; + 0 -- 4; + 1 -- 3; + 2 -- 3; + 2 -- 4; + 3 -- 4; +}`, + }, + { + g: directedPortedAttrGraphFrom(powerMethodGraph, + [][]Attribute{ + 2: {{"shape", "record"}, {"label", `"English|German"`}}, + 4: {{"shape", "record"}, {"label", `"English|German"`}}, + }, + map[edge]portedEdge{ + edge{from: 0, to: 1}: {fromCompass: "s"}, + edge{from: 0, to: 2}: {fromCompass: "s", toPort: "Zwei", toCompass: "e"}, + edge{from: 2, to: 3}: {fromPort: "Zwei", fromCompass: "e"}, + edge{from: 2, to: 4}: {fromPort: "Two", fromCompass: "w", toPort: "Four", toCompass: "w"}, + edge{from: 3, to: 4}: {toPort: "Four", toCompass: "w"}, + edge{from: 4, to: 0}: {fromPort: "Four", fromCompass: "_", toCompass: "s"}, + }, + ), + + want: `digraph { + // Node definitions. + 0; + 1; + 2 [ + shape=record + label="English|German" + ]; + 3; + 4 [ + shape=record + label="English|German" + ]; + + // Edge definitions. + 0:s -> 1; + 0:s -> 2:Zwei:e; + 1 -> 3; + 2:Zwei:e -> 3; + 2:Two:w -> 4:Four:w; + 3 -> 4:Four:w; + 4:Four:_ -> 0:s; +}`, + }, + { + g: undirectedPortedAttrGraphFrom(powerMethodGraph, + [][]Attribute{ + 2: {{"shape", "record"}, {"label", `"English|German"`}}, + 4: {{"shape", "record"}, {"label", `"English|German"`}}, + }, + map[edge]portedEdge{ + edge{from: 0, to: 1}: {fromCompass: "s"}, + edge{from: 0, to: 2}: {fromCompass: "s", toPort: "Zwei", toCompass: "e"}, + edge{from: 2, to: 3}: {fromPort: "Zwei", fromCompass: "e"}, + edge{from: 2, to: 4}: {fromPort: "Two", fromCompass: "w", toPort: "Four", toCompass: "w"}, + edge{from: 3, to: 4}: {toPort: "Four", toCompass: "w"}, + + // This definition is reversed (see comment above at portedEdge + // definition) so that 4 gets the from port. This is a result + // of the fact that we sort nodes by ID, so the lower node + // will be always be printed first when the graph is undirected, + // thus becoming the from port, but we define the edges here + // from a directed adjacency list. + edge{from: 4, to: 0}: {fromCompass: "s", toPort: "Four", toCompass: "_"}, + }, + ), + + want: `graph { + // Node definitions. + 0; + 1; + 2 [ + shape=record + label="English|German" + ]; + 3; + 4 [ + shape=record + label="English|German" + ]; + + // Edge definitions. + 0:s -- 1; + 0:s -- 2:Zwei:e; + 0:s -- 4:Four:_; + 1 -- 3; + 2:Zwei:e -- 3; + 2:Two:w -- 4:Four:w; + 3 -- 4:Four:w; +}`, + }, + + // Handling graph attributes. + { + g: graphAttributer{Graph: undirectedEdgeAttrGraphFrom(powerMethodGraph, map[edge][]Attribute{ + edge{from: 0, to: 2}: {{"label", `"???"`}, {"style", "dashed"}}, + edge{from: 2, to: 4}: {}, + edge{from: 3, to: 4}: {{"color", "red"}}, + })}, + + want: `graph { + // Node definitions. + 0; + 1; + 2; + 3; + 4; + + // Edge definitions. + 0 -- 1; + 0 -- 2 [ + label="???" + style=dashed + ]; + 0 -- 4; + 1 -- 3; + 2 -- 3; + 2 -- 4; + 3 -- 4 [color=red]; +}`, + }, + { + g: graphAttributer{Graph: undirectedEdgeAttrGraphFrom(powerMethodGraph, map[edge][]Attribute{ + edge{from: 0, to: 2}: {{"label", `"???"`}, {"style", "dashed"}}, + edge{from: 2, to: 4}: {}, + edge{from: 3, to: 4}: {{"color", "red"}}, + }), + graph: []Attribute{{"rankdir", `"LR"`}}, + node: []Attribute{{"fontsize", "16"}, {"shape", "ellipse"}}, + }, + + want: `graph { + graph [ + rankdir="LR" + ]; + node [ + fontsize=16 + shape=ellipse + ]; + + // Node definitions. + 0; + 1; + 2; + 3; + 4; + + // Edge definitions. + 0 -- 1; + 0 -- 2 [ + label="???" + style=dashed + ]; + 0 -- 4; + 1 -- 3; + 2 -- 3; + 2 -- 4; + 3 -- 4 [color=red]; +}`, + }, + + // Handling structured graphs. + { + g: undirectedStructuredGraphFrom(nil, powerMethodGraph, pageRankGraph), + + want: `graph { + subgraph A { + // Node definitions. + 0; + 1; + 2; + 3; + 4; + + // Edge definitions. + 0 -- 1; + 0 -- 2; + 0 -- 4; + 1 -- 3; + 2 -- 3; + 2 -- 4; + 3 -- 4; + } + subgraph B { + // Node definitions. + 5; + 6; + 7; + 8; + 9; + 10; + 11; + 12; + 13; + 14; + 15; + + // Edge definitions. + 5 -- 8; + 6 -- 7; + 6 -- 8; + 6 -- 9; + 6 -- 10; + 6 -- 11; + 6 -- 12; + 6 -- 13; + 8 -- 9; + 9 -- 10; + 9 -- 11; + 9 -- 12; + 9 -- 13; + 9 -- 14; + 9 -- 15; + } +}`, + }, + { + g: undirectedStructuredGraphFrom([]edge{{from: 0, to: 9}}, powerMethodGraph, pageRankGraph), + + want: `graph { + subgraph A { + // Node definitions. + 0; + 1; + 2; + 3; + 4; + + // Edge definitions. + 0 -- 1; + 0 -- 2; + 0 -- 4; + 1 -- 3; + 2 -- 3; + 2 -- 4; + 3 -- 4; + } + subgraph B { + // Node definitions. + 5; + 6; + 7; + 8; + 9; + 10; + 11; + 12; + 13; + 14; + 15; + + // Edge definitions. + 5 -- 8; + 6 -- 7; + 6 -- 8; + 6 -- 9; + 6 -- 10; + 6 -- 11; + 6 -- 12; + 6 -- 13; + 8 -- 9; + 9 -- 10; + 9 -- 11; + 9 -- 12; + 9 -- 13; + 9 -- 14; + 9 -- 15; + } + // Node definitions. + 0; + 9; + + // Edge definitions. + 0 -- 9; +}`, + }, + + // Handling subgraphs. + { + g: undirectedSubGraphFrom(pageRankGraph, map[int][]set{2: powerMethodGraph}), + + want: `graph { + // Node definitions. + 5; + 6; + 8; + 9; + 10; + 11; + 12; + 13; + 14; + 15; + + // Edge definitions. + 5 -- 8; + 6 -- subgraph H { + // Node definitions. + 0; + 1; + 2; + 3; + 4; + + // Edge definitions. + 0 -- 1; + 0 -- 2; + 0 -- 4; + 1 -- 3; + 2 -- 3; + 2 -- 4; + 3 -- 4; + }; + 6 -- 8; + 6 -- 9; + 6 -- 10; + 6 -- 11; + 6 -- 12; + 6 -- 13; + 8 -- 9; + 9 -- 10; + 9 -- 11; + 9 -- 12; + 9 -- 13; + 9 -- 14; + 9 -- 15; +}`, + }, + { + name: "H", + g: undirectedSubGraphFrom(pageRankGraph, map[int][]set{1: powerMethodGraph}), + strict: true, + + want: `strict graph H { + // Node definitions. + 5; + 7; + 8; + 9; + 10; + 11; + 12; + 13; + 14; + 15; + + // Edge definitions. + 5 -- 8; + subgraph G { + // Node definitions. + 0; + 1; + 2; + 3; + 4; + + // Edge definitions. + 0 -- 1; + 0 -- 2; + 0 -- 4; + 1 -- 3; + 2 -- 3; + 2 -- 4; + 3 -- 4; + } -- 7; + subgraph G { + // Node definitions. + 0; + 1; + 2; + 3; + 4; + } -- 8; + subgraph G { + // Node definitions. + 0; + 1; + 2; + 3; + 4; + } -- 9; + subgraph G { + // Node definitions. + 0; + 1; + 2; + 3; + 4; + } -- 10; + subgraph G { + // Node definitions. + 0; + 1; + 2; + 3; + 4; + } -- 11; + subgraph G { + // Node definitions. + 0; + 1; + 2; + 3; + 4; + } -- 12; + subgraph G { + // Node definitions. + 0; + 1; + 2; + 3; + 4; + } -- 13; + 8 -- 9; + 9 -- 10; + 9 -- 11; + 9 -- 12; + 9 -- 13; + 9 -- 14; + 9 -- 15; +}`, + }, +} + +func TestEncode(t *testing.T) { + for i, test := range encodeTests { + got, err := Marshal(test.g, test.name, test.prefix, "\t", test.strict) + if err != nil { + t.Errorf("unexpected error: %v", err) + continue + } + if string(got) != test.want { + t.Errorf("unexpected DOT result for test %d:\ngot: %s\nwant:%s", i, got, test.want) + } + } +} diff --git a/graph/ex/fdpclust/gn.go b/graph/ex/fdpclust/gn.go new file mode 100644 index 00000000..986fa9d8 --- /dev/null +++ b/graph/ex/fdpclust/gn.go @@ -0,0 +1,278 @@ +package main + +import ( + "github.com/gonum/graph" + "github.com/gonum/graph/simple" +) + +type GraphNode struct { + id int + neighbors []graph.Node + roots []*GraphNode +} + +func (g *GraphNode) Has(n graph.Node) bool { + if n.ID() == g.id { + return true + } + + visited := map[int]struct{}{g.id: struct{}{}} + for _, root := range g.roots { + if root.ID() == n.ID() { + return true + } + + if root.has(n, visited) { + return true + } + } + + for _, neigh := range g.neighbors { + if neigh.ID() == n.ID() { + return true + } + + if gn, ok := neigh.(*GraphNode); ok { + if gn.has(n, visited) { + return true + } + } + } + + return false +} + +func (g *GraphNode) has(n graph.Node, visited map[int]struct{}) bool { + for _, root := range g.roots { + if _, ok := visited[root.ID()]; ok { + continue + } + + visited[root.ID()] = struct{}{} + if root.ID() == n.ID() { + return true + } + + if root.has(n, visited) { + return true + } + + } + + for _, neigh := range g.neighbors { + if _, ok := visited[neigh.ID()]; ok { + continue + } + + visited[neigh.ID()] = struct{}{} + if neigh.ID() == n.ID() { + return true + } + + if gn, ok := neigh.(*GraphNode); ok { + if gn.has(n, visited) { + return true + } + } + + } + + return false +} + +func (g *GraphNode) Nodes() []graph.Node { + toReturn := []graph.Node{g} + visited := map[int]struct{}{g.id: struct{}{}} + + for _, root := range g.roots { + toReturn = append(toReturn, root) + visited[root.ID()] = struct{}{} + + toReturn = root.nodes(toReturn, visited) + } + + for _, neigh := range g.neighbors { + toReturn = append(toReturn, neigh) + visited[neigh.ID()] = struct{}{} + + if gn, ok := neigh.(*GraphNode); ok { + toReturn = gn.nodes(toReturn, visited) + } + } + + return toReturn +} + +func (g *GraphNode) nodes(list []graph.Node, visited map[int]struct{}) []graph.Node { + for _, root := range g.roots { + if _, ok := visited[root.ID()]; ok { + continue + } + visited[root.ID()] = struct{}{} + list = append(list, graph.Node(root)) + + list = root.nodes(list, visited) + } + + for _, neigh := range g.neighbors { + if _, ok := visited[neigh.ID()]; ok { + continue + } + + list = append(list, neigh) + if gn, ok := neigh.(*GraphNode); ok { + list = gn.nodes(list, visited) + } + } + + return list +} + +func (g *GraphNode) From(n graph.Node) []graph.Node { + if n.ID() == g.ID() { + return g.neighbors + } + + visited := map[int]struct{}{g.id: struct{}{}} + for _, root := range g.roots { + visited[root.ID()] = struct{}{} + + if result := root.findNeighbors(n, visited); result != nil { + return result + } + } + + for _, neigh := range g.neighbors { + visited[neigh.ID()] = struct{}{} + + if gn, ok := neigh.(*GraphNode); ok { + if result := gn.findNeighbors(n, visited); result != nil { + return result + } + } + } + + return nil +} + +func (g *GraphNode) findNeighbors(n graph.Node, visited map[int]struct{}) []graph.Node { + if n.ID() == g.ID() { + return g.neighbors + } + + for _, root := range g.roots { + if _, ok := visited[root.ID()]; ok { + continue + } + visited[root.ID()] = struct{}{} + + if result := root.findNeighbors(n, visited); result != nil { + return result + } + } + + for _, neigh := range g.neighbors { + if _, ok := visited[neigh.ID()]; ok { + continue + } + visited[neigh.ID()] = struct{}{} + + if gn, ok := neigh.(*GraphNode); ok { + if result := gn.findNeighbors(n, visited); result != nil { + return result + } + } + } + + return nil +} + +func (g *GraphNode) HasEdgeBetween(u, v graph.Node) bool { + return g.EdgeBetween(u, v) != nil +} + +func (g *GraphNode) Edge(u, v graph.Node) graph.Edge { + return g.EdgeBetween(u, v) +} + +func (g *GraphNode) EdgeBetween(u, v graph.Node) graph.Edge { + if u.ID() == g.id || v.ID() == g.id { + for _, neigh := range g.neighbors { + if neigh.ID() == u.ID() || neigh.ID() == v.ID() { + return simple.Edge{F: g, T: neigh} + } + } + return nil + } + + visited := map[int]struct{}{g.id: struct{}{}} + for _, root := range g.roots { + visited[root.ID()] = struct{}{} + if result := root.edgeBetween(u, v, visited); result != nil { + return result + } + } + + for _, neigh := range g.neighbors { + visited[neigh.ID()] = struct{}{} + if gn, ok := neigh.(*GraphNode); ok { + if result := gn.edgeBetween(u, v, visited); result != nil { + return result + } + } + } + + return nil +} + +func (g *GraphNode) edgeBetween(u, v graph.Node, visited map[int]struct{}) graph.Edge { + if u.ID() == g.id || v.ID() == g.id { + for _, neigh := range g.neighbors { + if neigh.ID() == u.ID() || neigh.ID() == v.ID() { + return simple.Edge{F: g, T: neigh} + } + } + return nil + } + + for _, root := range g.roots { + if _, ok := visited[root.ID()]; ok { + continue + } + visited[root.ID()] = struct{}{} + if result := root.edgeBetween(u, v, visited); result != nil { + return result + } + } + + for _, neigh := range g.neighbors { + if _, ok := visited[neigh.ID()]; ok { + continue + } + + visited[neigh.ID()] = struct{}{} + if gn, ok := neigh.(*GraphNode); ok { + if result := gn.edgeBetween(u, v, visited); result != nil { + return result + } + } + } + + return nil +} + +func (g *GraphNode) ID() int { + return g.id +} + +func (g *GraphNode) AddNeighbor(n *GraphNode) { + g.neighbors = append(g.neighbors, graph.Node(n)) +} + +func (g *GraphNode) AddRoot(n *GraphNode) { + g.roots = append(g.roots, n) +} + +func NewGraphNode(id int) *GraphNode { + return &GraphNode{id: id, neighbors: make([]graph.Node, 0), roots: make([]*GraphNode, 0)} +} diff --git a/graph/ex/fdpclust/main.go b/graph/ex/fdpclust/main.go new file mode 100644 index 00000000..78ad275f --- /dev/null +++ b/graph/ex/fdpclust/main.go @@ -0,0 +1,75 @@ +package main + +import ( + "fmt" + + "github.com/gonum/graph" + "github.com/gonum/graph/topo" +) + +func main() { + // graph G { + G := NewGraphNode(0) + // e + e := NewGraphNode(1) + + // subgraph clusterA { + clusterA := NewGraphNode(2) + + // a -- b + a := NewGraphNode(3) + b := NewGraphNode(4) + a.AddNeighbor(b) + b.AddNeighbor(a) + clusterA.AddRoot(a) + clusterA.AddRoot(b) + + // subgraph clusterC { + clusterC := NewGraphNode(5) + // C -- D + C := NewGraphNode(6) + D := NewGraphNode(7) + C.AddNeighbor(D) + D.AddNeighbor(C) + + clusterC.AddRoot(C) + clusterC.AddRoot(D) + // } + clusterA.AddRoot(clusterC) + // } + + // subgraph clusterB { + clusterB := NewGraphNode(8) + + // d -- f + d := NewGraphNode(9) + f := NewGraphNode(10) + d.AddNeighbor(f) + f.AddNeighbor(d) + clusterB.AddRoot(d) + clusterB.AddRoot(f) + // } + + // d -- D + d.AddNeighbor(D) + D.AddNeighbor(d) + + // e -- clusterB + e.AddNeighbor(clusterB) + clusterB.AddNeighbor(e) + + // clusterC -- clusterB + clusterC.AddNeighbor(clusterB) + clusterB.AddNeighbor(clusterC) + + G.AddRoot(e) + G.AddRoot(clusterA) + G.AddRoot(clusterB) + // } + + if !topo.IsPathIn(G, []graph.Node{C, D, d, f}) { + fmt.Println("Not working!") + } else { + fmt.Println("Working!") + } +} diff --git a/graph/formats/dot/README.md b/graph/formats/dot/README.md new file mode 100644 index 00000000..6949ce03 --- /dev/null +++ b/graph/formats/dot/README.md @@ -0,0 +1,9 @@ +# formats/dot + +## License + +The source code and any original content of the formats/dot directory is released under [Public Domain Dedication](https://creativecommons.org/publicdomain/zero/1.0/). + +The source code is also licensed under the gonum license, and users are free to choice the license which suits their needs. + +Please see github.com/gonum/license for general license information, contributors, authors, etc on the Gonum suite of packages. diff --git a/graph/formats/dot/ast/ast.go b/graph/formats/dot/ast/ast.go new file mode 100644 index 00000000..a8ac7bf8 --- /dev/null +++ b/graph/formats/dot/ast/ast.go @@ -0,0 +1,408 @@ +// This file is dual licensed under CC0 and The gonum license. +// +// Copyright ©2017 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// +// Copyright ©2017 Robin Eklind. +// This file is made available under a Creative Commons CC0 1.0 +// Universal Public Domain Dedication. + +// Package ast declares the types used to represent abstract syntax trees of +// Graphviz DOT graphs. +package ast + +import ( + "bytes" + "fmt" +) + +// === [ File ] ================================================================ + +// A File represents a DOT file. +// +// Examples. +// +// digraph G { +// A -> B +// } +// graph H { +// C - D +// } +type File struct { + // Graphs. + Graphs []*Graph +} + +// String returns the string representation of the file. +func (f *File) String() string { + buf := new(bytes.Buffer) + for i, graph := range f.Graphs { + if i != 0 { + buf.WriteString("\n") + } + buf.WriteString(graph.String()) + } + return buf.String() +} + +// === [ Graphs ] ============================================================== + +// A Graph represents a directed or an undirected graph. +// +// Examples. +// +// digraph G { +// A -> {B C} +// B -> C +// } +type Graph struct { + // Strict graph; multi-edges forbidden. + Strict bool + // Directed graph. + Directed bool + // Graph ID; or empty if anonymous. + ID string + // Graph statements. + Stmts []Stmt +} + +// String returns the string representation of the graph. +func (g *Graph) String() string { + buf := new(bytes.Buffer) + if g.Strict { + buf.WriteString("strict ") + } + if g.Directed { + buf.WriteString("digraph ") + } else { + buf.WriteString("graph ") + } + if len(g.ID) > 0 { + fmt.Fprintf(buf, "%s ", g.ID) + } + buf.WriteString("{\n") + for _, stmt := range g.Stmts { + fmt.Fprintf(buf, "\t%s\n", stmt) + } + buf.WriteString("}") + return buf.String() +} + +// === [ Statements ] ========================================================== + +// A Stmt represents a statement, and has one of the following underlying types. +// +// *NodeStmt +// *EdgeStmt +// *AttrStmt +// *Attr +// *Subgraph +type Stmt interface { + fmt.Stringer + // isStmt ensures that only statements can be assigned to the Stmt interface. + isStmt() +} + +// --- [ Node statement ] ------------------------------------------------------ + +// A NodeStmt represents a node statement. +// +// Examples. +// +// A [color=blue] +type NodeStmt struct { + // Node. + Node *Node + // Node attributes. + Attrs []*Attr +} + +// String returns the string representation of the node statement. +func (e *NodeStmt) String() string { + buf := new(bytes.Buffer) + buf.WriteString(e.Node.String()) + if len(e.Attrs) > 0 { + buf.WriteString(" [") + for i, attr := range e.Attrs { + if i != 0 { + buf.WriteString(" ") + } + buf.WriteString(attr.String()) + } + buf.WriteString("]") + } + return buf.String() +} + +// --- [ Edge statement ] ------------------------------------------------------ + +// An EdgeStmt represents an edge statement. +// +// Examples. +// +// A -> B +// A -> {B C} +// A -> B -> C +type EdgeStmt struct { + // Source vertex. + From Vertex + // Outgoing edge. + To *Edge + // Edge attributes. + Attrs []*Attr +} + +// String returns the string representation of the edge statement. +func (e *EdgeStmt) String() string { + buf := new(bytes.Buffer) + fmt.Fprintf(buf, "%s %s", e.From, e.To) + if len(e.Attrs) > 0 { + buf.WriteString(" [") + for i, attr := range e.Attrs { + if i != 0 { + buf.WriteString(" ") + } + buf.WriteString(attr.String()) + } + buf.WriteString("]") + } + return buf.String() +} + +// An Edge represents an edge between two vertices. +type Edge struct { + // Directed edge. + Directed bool + // Destination vertex. + Vertex Vertex + // Outgoing edge; or nil if none. + To *Edge +} + +// String returns the string representation of the edge. +func (e *Edge) String() string { + op := "--" + if e.Directed { + op = "->" + } + if e.To != nil { + return fmt.Sprintf("%s %s %s", op, e.Vertex, e.To) + } + return fmt.Sprintf("%s %s", op, e.Vertex) +} + +// --- [ Attribute statement ] ------------------------------------------------- + +// An AttrStmt represents an attribute statement. +// +// Examples. +// +// graph [rankdir=LR] +// node [color=blue fillcolor=red] +// edge [minlen=1] +type AttrStmt struct { + // Graph component kind to which the attributes are assigned. + Kind Kind + // Attributes. + Attrs []*Attr +} + +// String returns the string representation of the attribute statement. +func (a *AttrStmt) String() string { + buf := new(bytes.Buffer) + fmt.Fprintf(buf, "%s [", a.Kind) + for i, attr := range a.Attrs { + if i != 0 { + buf.WriteString(" ") + } + buf.WriteString(attr.String()) + } + buf.WriteString("]") + return buf.String() +} + +// Kind specifies the set of graph components to which attribute statements may +// be assigned. +type Kind uint + +// Graph component kinds. +const ( + KindGraph Kind = iota // graph + KindNode // node + KindEdge // edge +) + +// String returns the string representation of the graph component kind. +func (k Kind) String() string { + switch k { + case KindGraph: + return "graph" + case KindNode: + return "node" + case KindEdge: + return "edge" + } + panic(fmt.Sprintf("invalid graph component kind (%d)", uint(k))) +} + +// --- [ Attribute ] ----------------------------------------------------------- + +// An Attr represents an attribute. +// +// Examples. +// +// rank=same +type Attr struct { + // Attribute key. + Key string + // Attribute value. + Val string +} + +// String returns the string representation of the attribute. +func (a *Attr) String() string { + return fmt.Sprintf("%s=%s", a.Key, a.Val) +} + +// --- [ Subgraph ] ------------------------------------------------------------ + +// A Subgraph represents a subgraph vertex. +// +// Examples. +// +// subgraph S {A B C} +type Subgraph struct { + // Subgraph ID; or empty if none. + ID string + // Subgraph statements. + Stmts []Stmt +} + +// String returns the string representation of the subgraph. +func (s *Subgraph) String() string { + buf := new(bytes.Buffer) + if len(s.ID) > 0 { + fmt.Fprintf(buf, "subgraph %s ", s.ID) + } + buf.WriteString("{") + for i, stmt := range s.Stmts { + if i != 0 { + buf.WriteString(" ") + } + buf.WriteString(stmt.String()) + } + buf.WriteString("}") + return buf.String() +} + +// isStmt ensures that only statements can be assigned to the Stmt interface. +func (*NodeStmt) isStmt() {} +func (*EdgeStmt) isStmt() {} +func (*AttrStmt) isStmt() {} +func (*Attr) isStmt() {} +func (*Subgraph) isStmt() {} + +// === [ Vertices ] ============================================================ + +// A Vertex represents a vertex, and has one of the following underlying types. +// +// *Node +// *Subgraph +type Vertex interface { + fmt.Stringer + // isVertex ensures that only vertices can be assigned to the Vertex + // interface. + isVertex() +} + +// --- [ Node identifier ] ----------------------------------------------------- + +// A Node represents a node vertex. +// +// Examples. +// +// A +// A:nw +type Node struct { + // Node ID. + ID string + // Node port; or nil if none. + Port *Port +} + +// String returns the string representation of the node. +func (n *Node) String() string { + if n.Port != nil { + return fmt.Sprintf("%s%s", n.ID, n.Port) + } + return n.ID +} + +// A Port specifies where on a node an edge should be aimed. +type Port struct { + // Port ID; or empty if none. + ID string + // Compass point. + CompassPoint CompassPoint +} + +// String returns the string representation of the port. +func (p *Port) String() string { + buf := new(bytes.Buffer) + if len(p.ID) > 0 { + fmt.Fprintf(buf, ":%s", p.ID) + } + if p.CompassPoint != CompassPointDefault { + fmt.Fprintf(buf, ":%s", p.CompassPoint) + } + return buf.String() +} + +// CompassPoint specifies the set of compass points. +type CompassPoint uint + +// Compass points. +const ( + CompassPointDefault CompassPoint = iota // _ + CompassPointNorth // n + CompassPointNorthEast // ne + CompassPointEast // e + CompassPointSouthEast // se + CompassPointSouth // s + CompassPointSouthWest // sw + CompassPointWest // w + CompassPointNorthWest // nw + CompassPointCenter // c +) + +// String returns the string representation of the compass point. +func (c CompassPoint) String() string { + switch c { + case CompassPointDefault: + return "_" + case CompassPointNorth: + return "n" + case CompassPointNorthEast: + return "ne" + case CompassPointEast: + return "e" + case CompassPointSouthEast: + return "se" + case CompassPointSouth: + return "s" + case CompassPointSouthWest: + return "sw" + case CompassPointWest: + return "w" + case CompassPointNorthWest: + return "nw" + case CompassPointCenter: + return "c" + } + panic(fmt.Sprintf("invalid compass point (%d)", uint(c))) +} + +// isVertex ensures that only vertices can be assigned to the Vertex interface. +func (*Node) isVertex() {} +func (*Subgraph) isVertex() {} diff --git a/graph/formats/dot/ast/ast_test.go b/graph/formats/dot/ast/ast_test.go new file mode 100644 index 00000000..a5473330 --- /dev/null +++ b/graph/formats/dot/ast/ast_test.go @@ -0,0 +1,101 @@ +// This file is dual licensed under CC0 and The gonum license. +// +// Copyright ©2017 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// +// Copyright ©2017 Robin Eklind. +// This file is made available under a Creative Commons CC0 1.0 +// Universal Public Domain Dedication. + +package ast_test + +import ( + "bytes" + "io/ioutil" + "testing" + + "github.com/gonum/graph/formats/dot" + "github.com/gonum/graph/formats/dot/ast" +) + +func TestParseFile(t *testing.T) { + golden := []struct { + in string + out string + }{ + {in: "../internal/testdata/empty.dot"}, + {in: "../internal/testdata/graph.dot"}, + {in: "../internal/testdata/digraph.dot"}, + {in: "../internal/testdata/strict.dot"}, + {in: "../internal/testdata/multi.dot"}, + {in: "../internal/testdata/named_graph.dot"}, + {in: "../internal/testdata/node_stmt.dot"}, + {in: "../internal/testdata/edge_stmt.dot"}, + {in: "../internal/testdata/attr_stmt.dot"}, + {in: "../internal/testdata/attr.dot"}, + { + in: "../internal/testdata/subgraph.dot", + out: "../internal/testdata/subgraph.golden", + }, + { + in: "../internal/testdata/semi.dot", + out: "../internal/testdata/semi.golden", + }, + { + in: "../internal/testdata/empty_attr.dot", + out: "../internal/testdata/empty_attr.golden", + }, + { + in: "../internal/testdata/attr_lists.dot", + out: "../internal/testdata/attr_lists.golden", + }, + { + in: "../internal/testdata/attr_sep.dot", + out: "../internal/testdata/attr_sep.golden", + }, + {in: "../internal/testdata/subgraph_vertex.dot"}, + { + in: "../internal/testdata/port.dot", + out: "../internal/testdata/port.golden", + }, + } + for _, g := range golden { + file, err := dot.ParseFile(g.in) + if err != nil { + t.Errorf("%q: unable to parse file; %v", g.in, err) + continue + } + // If no output path is specified, the input is already golden. + out := g.in + if len(g.out) > 0 { + out = g.out + } + buf, err := ioutil.ReadFile(out) + if err != nil { + t.Errorf("%q: unable to read file; %v", g.in, err) + continue + } + got := file.String() + // Remove trailing newline. + want := string(bytes.TrimSpace(buf)) + if got != want { + t.Errorf("%q: graph mismatch; expected %q, got %q", g.in, want, got) + } + } +} + +// Verify that all statements implement the Stmt interface. +var ( + _ ast.Stmt = (*ast.NodeStmt)(nil) + _ ast.Stmt = (*ast.EdgeStmt)(nil) + _ ast.Stmt = (*ast.AttrStmt)(nil) + _ ast.Stmt = (*ast.Attr)(nil) + _ ast.Stmt = (*ast.Subgraph)(nil) +) + +// Verify that all vertices implement the Vertex interface. +var ( + _ ast.Vertex = (*ast.Node)(nil) + _ ast.Vertex = (*ast.Subgraph)(nil) +) diff --git a/graph/formats/dot/dot.go b/graph/formats/dot/dot.go new file mode 100644 index 00000000..890083b2 --- /dev/null +++ b/graph/formats/dot/dot.go @@ -0,0 +1,63 @@ +// This file is dual licensed under CC0 and The gonum license. +// +// Copyright ©2017 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// +// Copyright ©2017 Robin Eklind. +// This file is made available under a Creative Commons CC0 1.0 +// Universal Public Domain Dedication. + +// Package dot implements a parser for Graphviz DOT files. +package dot + +import ( + "fmt" + "io" + "io/ioutil" + + "github.com/gonum/graph/formats/dot/ast" + "github.com/gonum/graph/formats/dot/internal/lexer" + "github.com/gonum/graph/formats/dot/internal/parser" +) + +// ParseFile parses the given Graphviz DOT file into an AST. +func ParseFile(path string) (*ast.File, error) { + buf, err := ioutil.ReadFile(path) + if err != nil { + return nil, err + } + return ParseBytes(buf) +} + +// Parse parses the given Graphviz DOT file into an AST, reading from r. +func Parse(r io.Reader) (*ast.File, error) { + buf, err := ioutil.ReadAll(r) + if err != nil { + return nil, err + } + return ParseBytes(buf) +} + +// ParseBytes parses the given Graphviz DOT file into an AST, reading from b. +func ParseBytes(b []byte) (*ast.File, error) { + l := lexer.NewLexer(b) + p := parser.NewParser() + file, err := p.Parse(l) + if err != nil { + return nil, err + } + f, ok := file.(*ast.File) + if !ok { + return nil, fmt.Errorf("invalid file type; expected *ast.File, got %T", file) + } + if err := check(f); err != nil { + return nil, err + } + return f, nil +} + +// ParseString parses the given Graphviz DOT file into an AST, reading from s. +func ParseString(s string) (*ast.File, error) { + return ParseBytes([]byte(s)) +} diff --git a/graph/formats/dot/internal/Makefile b/graph/formats/dot/internal/Makefile new file mode 100644 index 00000000..e31b4612 --- /dev/null +++ b/graph/formats/dot/internal/Makefile @@ -0,0 +1,37 @@ +gen: dot.bnf + gocc $< + # TODO: Remove once https://github.com/goccmack/gocc/issues/36 gets resolved. + ./paste_copyright.bash + find . -type f -name '*.go' | xargs goimports -w + +debug_lexer: dot.bnf + gocc -debug_lexer -v -a $< + # TODO: Remove once https://github.com/goccmack/gocc/issues/36 gets resolved. + find . -type f -name '*.go' | xargs goimports -w + +debug_parser: dot.bnf + gocc -debug_parser -v -a $< + # TODO: Remove once https://github.com/goccmack/gocc/issues/36 gets resolved. + find . -type f -name '*.go' | xargs goimports -w + +clean: + rm -f errors/errors.go + rm -f lexer/acttab.go + rm -f lexer/lexer.go + rm -f lexer/transitiontable.go + rm -f parser/action.go + rm -f parser/actiontable.go + rm -f parser/gototable.go + rm -f parser/parser.go + rm -f parser/productionstable.go + rm -f token/token.go + rm -f util/litconv.go + rm -f util/rune.go + -rmdir --ignore-fail-on-non-empty errors + -rmdir --ignore-fail-on-non-empty lexer + -rmdir --ignore-fail-on-non-empty parser + -rmdir --ignore-fail-on-non-empty token + -rmdir --ignore-fail-on-non-empty util + rm -f terminals.txt LR1_conflicts.txt LR1_sets.txt first.txt lexer_sets.txt + +.PHONY: gen debug_lexer debug_parser clean diff --git a/graph/formats/dot/internal/astx/astx.go b/graph/formats/dot/internal/astx/astx.go new file mode 100644 index 00000000..987124a0 --- /dev/null +++ b/graph/formats/dot/internal/astx/astx.go @@ -0,0 +1,328 @@ +// This file is dual licensed under CC0 and The gonum license. +// +// Copyright ©2017 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// +// Copyright ©2017 Robin Eklind. +// This file is made available under a Creative Commons CC0 1.0 +// Universal Public Domain Dedication. + +// Package astx implements utility functions for generating abstract syntax +// trees of Graphviz DOT graphs. +package astx + +import ( + "fmt" + "strings" + + "github.com/gonum/graph/formats/dot/ast" + "github.com/gonum/graph/formats/dot/internal/token" +) + +// === [ File ] ================================================================ + +// NewFile returns a new file based on the given graph. +func NewFile(graph interface{}) (*ast.File, error) { + g, ok := graph.(*ast.Graph) + if !ok { + return nil, fmt.Errorf("invalid graph type; expected *ast.Graph, got %T", graph) + } + return &ast.File{Graphs: []*ast.Graph{g}}, nil +} + +// AppendGraph appends graph to the given file. +func AppendGraph(file, graph interface{}) (*ast.File, error) { + f, ok := file.(*ast.File) + if !ok { + return nil, fmt.Errorf("invalid file type; expected *ast.File, got %T", file) + } + g, ok := graph.(*ast.Graph) + if !ok { + return nil, fmt.Errorf("invalid graph type; expected *ast.Graph, got %T", graph) + } + f.Graphs = append(f.Graphs, g) + return f, nil +} + +// === [ Graphs ] ============================================================== + +// NewGraph returns a new graph based on the given graph strictness, direction, +// optional ID and optional statements. +func NewGraph(strict, directed, optID, optStmts interface{}) (*ast.Graph, error) { + s, ok := strict.(bool) + if !ok { + return nil, fmt.Errorf("invalid strictness type; expected bool, got %T", strict) + } + d, ok := directed.(bool) + if !ok { + return nil, fmt.Errorf("invalid direction type; expected bool, got %T", directed) + } + id, ok := optID.(string) + if optID != nil && !ok { + return nil, fmt.Errorf("invalid ID type; expected string or nil, got %T", optID) + } + stmts, ok := optStmts.([]ast.Stmt) + if optStmts != nil && !ok { + return nil, fmt.Errorf("invalid statements type; expected []ast.Stmt or nil, got %T", optStmts) + } + return &ast.Graph{Strict: s, Directed: d, ID: id, Stmts: stmts}, nil +} + +// === [ Statements ] ========================================================== + +// NewStmtList returns a new statement list based on the given statement. +func NewStmtList(stmt interface{}) ([]ast.Stmt, error) { + s, ok := stmt.(ast.Stmt) + if !ok { + return nil, fmt.Errorf("invalid statement type; expected ast.Stmt, got %T", stmt) + } + return []ast.Stmt{s}, nil +} + +// AppendStmt appends stmt to the given statement list. +func AppendStmt(list, stmt interface{}) ([]ast.Stmt, error) { + l, ok := list.([]ast.Stmt) + if !ok { + return nil, fmt.Errorf("invalid statement list type; expected []ast.Stmt, got %T", list) + } + s, ok := stmt.(ast.Stmt) + if !ok { + return nil, fmt.Errorf("invalid statement type; expected ast.Stmt, got %T", stmt) + } + return append(l, s), nil +} + +// --- [ Node statement ] ------------------------------------------------------ + +// NewNodeStmt returns a new node statement based on the given node and optional +// attributes. +func NewNodeStmt(node, optAttrs interface{}) (*ast.NodeStmt, error) { + n, ok := node.(*ast.Node) + if !ok { + return nil, fmt.Errorf("invalid node type; expected *ast.Node, got %T", node) + } + attrs, ok := optAttrs.([]*ast.Attr) + if optAttrs != nil && !ok { + return nil, fmt.Errorf("invalid attributes type; expected []*ast.Attr or nil, got %T", optAttrs) + } + return &ast.NodeStmt{Node: n, Attrs: attrs}, nil +} + +// --- [ Edge statement ] ------------------------------------------------------ + +// NewEdgeStmt returns a new edge statement based on the given source vertex, +// outgoing edge and optional attributes. +func NewEdgeStmt(from, to, optAttrs interface{}) (*ast.EdgeStmt, error) { + f, ok := from.(ast.Vertex) + if !ok { + return nil, fmt.Errorf("invalid source vertex type; expected ast.Vertex, got %T", from) + } + t, ok := to.(*ast.Edge) + if !ok { + return nil, fmt.Errorf("invalid outgoing edge type; expected *ast.Edge, got %T", to) + } + attrs, ok := optAttrs.([]*ast.Attr) + if optAttrs != nil && !ok { + return nil, fmt.Errorf("invalid attributes type; expected []*ast.Attr or nil, got %T", optAttrs) + } + return &ast.EdgeStmt{From: f, To: t, Attrs: attrs}, nil +} + +// NewEdge returns a new edge based on the given edge direction, destination +// vertex and optional outgoing edge. +func NewEdge(directed, vertex, optTo interface{}) (*ast.Edge, error) { + d, ok := directed.(bool) + if !ok { + return nil, fmt.Errorf("invalid direction type; expected bool, got %T", directed) + } + v, ok := vertex.(ast.Vertex) + if !ok { + return nil, fmt.Errorf("invalid destination vertex type; expected ast.Vertex, got %T", vertex) + } + to, ok := optTo.(*ast.Edge) + if optTo != nil && !ok { + return nil, fmt.Errorf("invalid outgoing edge type; expected *ast.Edge or nil, got %T", optTo) + } + return &ast.Edge{Directed: d, Vertex: v, To: to}, nil +} + +// --- [ Attribute statement ] ------------------------------------------------- + +// NewAttrStmt returns a new attribute statement based on the given graph +// component kind and attributes. +func NewAttrStmt(kind, optAttrs interface{}) (*ast.AttrStmt, error) { + k, ok := kind.(ast.Kind) + if !ok { + return nil, fmt.Errorf("invalid graph component kind type; expected ast.Kind, got %T", kind) + } + attrs, ok := optAttrs.([]*ast.Attr) + if optAttrs != nil && !ok { + return nil, fmt.Errorf("invalid attributes type; expected []*ast.Attr or nil, got %T", optAttrs) + } + return &ast.AttrStmt{Kind: k, Attrs: attrs}, nil +} + +// NewAttrList returns a new attribute list based on the given attribute. +func NewAttrList(attr interface{}) ([]*ast.Attr, error) { + a, ok := attr.(*ast.Attr) + if !ok { + return nil, fmt.Errorf("invalid attribute type; expected *ast.Attr, got %T", attr) + } + return []*ast.Attr{a}, nil +} + +// AppendAttr appends attr to the given attribute list. +func AppendAttr(list, attr interface{}) ([]*ast.Attr, error) { + l, ok := list.([]*ast.Attr) + if !ok { + return nil, fmt.Errorf("invalid attribute list type; expected []*ast.Attr, got %T", list) + } + a, ok := attr.(*ast.Attr) + if !ok { + return nil, fmt.Errorf("invalid attribute type; expected *ast.Attr, got %T", attr) + } + return append(l, a), nil +} + +// AppendAttrList appends the optional attrs to the given optional attribute +// list. +func AppendAttrList(optList, optAttrs interface{}) ([]*ast.Attr, error) { + list, ok := optList.([]*ast.Attr) + if optList != nil && !ok { + return nil, fmt.Errorf("invalid attribute list type; expected []*ast.Attr or nil, got %T", optList) + } + attrs, ok := optAttrs.([]*ast.Attr) + if optAttrs != nil && !ok { + return nil, fmt.Errorf("invalid attributes type; expected []*ast.Attr or nil, got %T", optAttrs) + } + return append(list, attrs...), nil +} + +// --- [ Attribute ] ----------------------------------------------------------- + +// NewAttr returns a new attribute based on the given key-value pair. +func NewAttr(key, val interface{}) (*ast.Attr, error) { + k, ok := key.(string) + if !ok { + return nil, fmt.Errorf("invalid key type; expected string, got %T", key) + } + v, ok := val.(string) + if !ok { + return nil, fmt.Errorf("invalid value type; expected string, got %T", val) + } + return &ast.Attr{Key: k, Val: v}, nil +} + +// --- [ Subgraph ] ------------------------------------------------------------ + +// NewSubgraph returns a new subgraph based on the given optional subgraph ID +// and optional statements. +func NewSubgraph(optID, optStmts interface{}) (*ast.Subgraph, error) { + id, ok := optID.(string) + if optID != nil && !ok { + return nil, fmt.Errorf("invalid ID type; expected string or nil, got %T", optID) + } + stmts, ok := optStmts.([]ast.Stmt) + if optStmts != nil && !ok { + return nil, fmt.Errorf("invalid statements type; expected []ast.Stmt or nil, got %T", optStmts) + } + return &ast.Subgraph{ID: id, Stmts: stmts}, nil +} + +// === [ Vertices ] ============================================================ + +// --- [ Node identifier ] ----------------------------------------------------- + +// NewNode returns a new node based on the given node id and optional port. +func NewNode(id, optPort interface{}) (*ast.Node, error) { + i, ok := id.(string) + if !ok { + return nil, fmt.Errorf("invalid ID type; expected string, got %T", id) + } + port, ok := optPort.(*ast.Port) + if optPort != nil && !ok { + return nil, fmt.Errorf("invalid port type; expected *ast.Port or nil, got %T", optPort) + } + return &ast.Node{ID: i, Port: port}, nil +} + +// NewPort returns a new port based on the given id and optional compass point. +func NewPort(id, optCompassPoint interface{}) (*ast.Port, error) { + // Note, if optCompassPoint is nil, id may be either an identifier or a + // compass point. + // + // The following strings are valid compass points: + // + // "n", "ne", "e", "se", "s", "sw", "w", "nw", "c" and "_" + i, ok := id.(string) + if !ok { + return nil, fmt.Errorf("invalid ID type; expected string, got %T", id) + } + + // Early return if optional compass point is absent and ID is a valid compass + // point. + if optCompassPoint == nil { + if compassPoint, ok := getCompassPoint(i); ok { + return &ast.Port{CompassPoint: compassPoint}, nil + } + } + + c, ok := optCompassPoint.(string) + if optCompassPoint != nil && !ok { + return nil, fmt.Errorf("invalid compass point type; expected string or nil, got %T", optCompassPoint) + } + compassPoint, _ := getCompassPoint(c) + return &ast.Port{ID: i, CompassPoint: compassPoint}, nil +} + +// getCompassPoint returns the corresponding compass point to the given string, +// and a boolean value indicating if such a compass point exists. +func getCompassPoint(s string) (ast.CompassPoint, bool) { + switch s { + case "_": + return ast.CompassPointDefault, true + case "n": + return ast.CompassPointNorth, true + case "ne": + return ast.CompassPointNorthEast, true + case "e": + return ast.CompassPointEast, true + case "se": + return ast.CompassPointSouthEast, true + case "s": + return ast.CompassPointSouth, true + case "sw": + return ast.CompassPointSouthWest, true + case "w": + return ast.CompassPointWest, true + case "nw": + return ast.CompassPointNorthWest, true + case "c": + return ast.CompassPointCenter, true + } + return ast.CompassPointDefault, false +} + +// === [ Identifiers ] ========================================================= + +// NewID returns a new identifier based on the given ID token. +func NewID(id interface{}) (string, error) { + i, ok := id.(*token.Token) + if !ok { + return "", fmt.Errorf("invalid identifier type; expected *token.Token, got %T", id) + } + s := string(i.Lit) + + // As another aid for readability, dot allows double-quoted strings to span + // multiple physical lines using the standard C convention of a backslash + // immediately preceding a newline character. + if strings.HasPrefix(s, `"`) && strings.HasSuffix(s, `"`) { + // Strip "\\\n" sequences. + s = strings.Replace(s, "\\\n", "", -1) + } + + // TODO: Add support for concatenated using a '+' operator. + + return s, nil +} diff --git a/graph/formats/dot/internal/astx/astx_test.go b/graph/formats/dot/internal/astx/astx_test.go new file mode 100644 index 00000000..68560dbe --- /dev/null +++ b/graph/formats/dot/internal/astx/astx_test.go @@ -0,0 +1,90 @@ +// This file is dual licensed under CC0 and The gonum license. +// +// Copyright ©2017 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// +// Copyright ©2017 Robin Eklind. +// This file is made available under a Creative Commons CC0 1.0 +// Universal Public Domain Dedication. + +package astx_test + +import ( + "bytes" + "io/ioutil" + "testing" + + "github.com/gonum/graph/formats/dot" +) + +func TestParseFile(t *testing.T) { + golden := []struct { + in string + out string + }{ + {in: "../testdata/empty.dot"}, + {in: "../testdata/graph.dot"}, + {in: "../testdata/digraph.dot"}, + {in: "../testdata/strict.dot"}, + {in: "../testdata/multi.dot"}, + {in: "../testdata/named_graph.dot"}, + {in: "../testdata/node_stmt.dot"}, + {in: "../testdata/edge_stmt.dot"}, + {in: "../testdata/attr_stmt.dot"}, + {in: "../testdata/attr.dot"}, + { + in: "../testdata/subgraph.dot", + out: "../testdata/subgraph.golden", + }, + { + in: "../testdata/semi.dot", + out: "../testdata/semi.golden", + }, + { + in: "../testdata/empty_attr.dot", + out: "../testdata/empty_attr.golden", + }, + { + in: "../testdata/attr_lists.dot", + out: "../testdata/attr_lists.golden", + }, + { + in: "../testdata/attr_sep.dot", + out: "../testdata/attr_sep.golden", + }, + {in: "../testdata/subgraph_vertex.dot"}, + { + in: "../testdata/port.dot", + out: "../testdata/port.golden", + }, + {in: "../testdata/quoted_id.dot"}, + { + in: "../testdata/backslash_newline_id.dot", + out: "../testdata/backslash_newline_id.golden", + }, + } + for _, g := range golden { + file, err := dot.ParseFile(g.in) + if err != nil { + t.Errorf("%q: unable to parse file; %v", g.in, err) + continue + } + // If no output path is specified, the input is already golden. + out := g.in + if len(g.out) > 0 { + out = g.out + } + buf, err := ioutil.ReadFile(out) + if err != nil { + t.Errorf("%q: unable to read file; %v", g.in, err) + continue + } + got := file.String() + // Remove trailing newline. + want := string(bytes.TrimSpace(buf)) + if got != want { + t.Errorf("%q: graph mismatch; expected `%s`, got `%s`", g.in, want, got) + } + } +} diff --git a/graph/formats/dot/internal/dot.bnf b/graph/formats/dot/internal/dot.bnf new file mode 100644 index 00000000..a86a867c --- /dev/null +++ b/graph/formats/dot/internal/dot.bnf @@ -0,0 +1,358 @@ +// The DOT Language +// +// http://www.graphviz.org/doc/info/lang.html + +// ### [ Tokens ] ############################################################## + +// The keywords node, edge, graph, digraph, subgraph, and strict are case- +// independent. + +node + : 'n' 'o' 'd' 'e' + | 'N' 'o' 'd' 'e' + | 'N' 'O' 'D' 'E' +; + +edge + : 'e' 'd' 'g' 'e' + | 'E' 'd' 'g' 'e' + | 'E' 'D' 'G' 'E' +; + +// TODO: Rename graphx to graph once gocc#20 is fixed [1]. +// +// [1]: https://github.com/goccmack/gocc/issues/20 + +graphx + : 'g' 'r' 'a' 'p' 'h' + | 'G' 'r' 'a' 'p' 'h' + | 'G' 'R' 'A' 'P' 'H' +; + +digraph + : 'd' 'i' 'g' 'r' 'a' 'p' 'h' + | 'D' 'i' 'g' 'r' 'a' 'p' 'h' + | 'd' 'i' 'G' 'r' 'a' 'p' 'h' + | 'D' 'i' 'G' 'r' 'a' 'p' 'h' + | 'D' 'I' 'G' 'R' 'A' 'P' 'H' +; + +subgraph + : 's' 'u' 'b' 'g' 'r' 'a' 'p' 'h' + | 'S' 'u' 'b' 'g' 'r' 'a' 'p' 'h' + | 's' 'u' 'b' 'G' 'r' 'a' 'p' 'h' + | 'S' 'u' 'b' 'G' 'r' 'a' 'p' 'h' + | 'S' 'U' 'B' 'G' 'R' 'A' 'P' 'H' +; + +strict + : 's' 't' 'r' 'i' 'c' 't' + | 'S' 't' 'r' 'i' 'c' 't' + | 'S' 'T' 'R' 'I' 'C' 'T' +; + +// An arbitrary ASCII character except null (0x00), double quote (0x22) and +// backslash (0x5C). +_ascii_char + // skip null (0x00) + : '\x01' - '\x21' + // skip double quote (0x22) + | '\x23' - '\x5B' + // skip backslash (0x5C) + | '\x5D' - '\x7F' +; + +_ascii_letter + : 'a' - 'z' + | 'A' - 'Z' +; + +_ascii_digit : '0' - '9' ; + +_unicode_char + : _ascii_char + | _unicode_byte +; + +_unicode_byte + : '\u0080' - '\uFFFC' + // skip invalid code point (\uFFFD) + | '\uFFFE' - '\U0010FFFF' +; + +_letter : _ascii_letter | _unicode_byte | '_' ; +_decimal_digit : _ascii_digit ; +_decimals : _decimal_digit { _decimal_digit } ; + +// An ID is one of the following: +// +// 1) Any string of alphabetic ([a-zA-Z\200-\377]) characters, underscores +// ('_') or digits ([0-9]), not beginning with a digit; +// +// 2) a numeral [-]?(.[0-9]+ | [0-9]+(.[0-9]*)? ); +// +// 3) any double-quoted string ("...") possibly containing escaped quotes +// (\"); +// +// 4) an HTML string (<...>). + +id + : _letter { _letter | _decimal_digit } + | _int_lit + | _string_lit + | _html_lit +; + +_int_lit + : [ '-' ] '.' _decimals + | [ '-' ] _decimals [ '.' { _decimal_digit } ] +; + +// In quoted strings in DOT, the only escaped character is double-quote ("). +// That is, in quoted strings, the dyad \" is converted to "; all other +// characters are left unchanged. In particular, \\ remains \\. + +// As another aid for readability, dot allows double-quoted strings to span +// multiple physical lines using the standard C convention of a backslash +// immediately preceding a newline character. + +// In addition, double-quoted strings can be concatenated using a '+' operator. + +_escaped_char : '\\' ( _unicode_char | '"' | '\\' ) ; +_char : _unicode_char | _escaped_char ; +_string_lit : '"' { _char } '"' ; + +// An arbitrary HTML character except null (0x00), left angle bracket (0x3C) and +// right angle bracket (0x3E). +_html_char + // skip null (0x00) + : '\x01' - '\x3B' + // skip left angle bracket (0x3C) + | '\x3D' + // skip right angle bracket (0x3E) + | '\x3F' - '\xFF' +; + +_html_chars : { _html_char } ; +_html_tag : '<' _html_chars '>' ; +_html_lit : '<' { _html_chars | _html_tag } '>' ; + +// The language supports C++-style comments: /* */ and //. In addition, a line +// beginning with a '#' character is considered a line output from a C +// preprocessor (e.g., # 34 to indicate line 34 ) and discarded. + +_line_comment + : '/' '/' { . } '\n' + | '#' { . } '\n' +; + +_block_comment : '/' '*' { . | '*' } '*' '/' ; +!comment : _line_comment | _block_comment ; + +!whitespace : ' ' | '\t' | '\r' | '\n' ; + +// ### [ Syntax ] ############################################################## + +<< import ( + "github.com/gonum/graph/formats/dot/ast" + "github.com/gonum/graph/formats/dot/internal/astx" +) >> + +// === [ Files ] =============================================================== + +File + : Graph << astx.NewFile($0) >> + | File Graph << astx.AppendGraph($0, $1) >> +; + +// === [ Graphs ] ============================================================== + +// Graph : [ "strict" ] ( "graph" | "digraph" ) [ ID ] "{" [ StmtList ] "}" + +Graph + : OptStrict DirectedGraph OptID + "{" OptStmtList "}" << astx.NewGraph($0, $1, $2, $4) >> +; + +OptStrict + : empty << false, nil >> + | strict << true, nil >> +; + +DirectedGraph + : graphx << false, nil >> + | digraph << true, nil >> +; + +// === [ Statements ] ========================================================== + +// StmtList +// : Stmt [ ";" ] +// | StmtList Stmt [ ";" ] + +StmtList + : Stmt OptSemi << astx.NewStmtList($0) >> + | StmtList Stmt OptSemi << astx.AppendStmt($0, $1) >> +; + +OptStmtList + : empty + | StmtList +; + +Stmt + : NodeStmt + | EdgeStmt + | AttrStmt + | Attr + | Subgraph +; + +OptSemi + : empty + | ";" +; + +// --- [ Node statement ] ------------------------------------------------------ + +// NodeStmt : Node [ AttrList ] + +NodeStmt + : Node OptAttrList << astx.NewNodeStmt($0, $1) >> +; + +// --- [ Edge statement ] ------------------------------------------------------ + +// EdgeStmt : ( Node | Subgraph ) Edge [ AttrList ] + +EdgeStmt + : Vertex Edge OptAttrList << astx.NewEdgeStmt($0, $1, $2) >> +; + +// Edge : ( "--" | "-->" ) ( Node | Subgraph ) [ Edge ] + +Edge + : DirectedEdge Vertex OptEdge << astx.NewEdge($0, $1, $2) >> +; + +DirectedEdge + : "--" << false, nil >> + | "->" << true, nil >> +; + +OptEdge + : empty + | Edge +; + +// --- [ Attribute statement ] ------------------------------------------------- + +// AttrStmt : ( "graph" | "node" | "edge" ) AttrList + +AttrStmt + : Component AttrList << astx.NewAttrStmt($0, $1) >> +; + +Component + : graphx << ast.KindGraph, nil >> + | node << ast.KindNode, nil >> + | edge << ast.KindEdge, nil >> +; + +// AttrList : "[" [ AList ] "]" [ AttrList ] + +AttrList + : "[" OptAList "]" << $1, nil >> + | AttrList "[" OptAList "]" << astx.AppendAttrList($0, $2) >> +; + +OptAttrList + : empty + | AttrList +; + +// AList +// : Attr [ ( ";" | "," ) ] +// | AList Attr [ ( ";" | "," ) ] + +AList + : Attr OptSep << astx.NewAttrList($0) >> + | AList Attr OptSep << astx.AppendAttr($0, $1) >> +; + +OptAList + : empty + | AList +; + +OptSep + : empty + | ";" + | "," +; + +// --- [ Attribute ] ----------------------------------------------------------- + +Attr + : ID "=" ID << astx.NewAttr($0, $2) >> +; + +// --- [ Subgraph ] ------------------------------------------------------------ + +// Subgraph : [ "subgraph" [ ID ] ] "{" [ StmtList ] "}" + +Subgraph + : OptSubgraphID "{" OptStmtList "}" << astx.NewSubgraph($0, $2) >> +; + +OptSubgraphID + : empty + | subgraph OptID << $1, nil >> +; + +// === [ Vertices ] ============================================================ + +Vertex + : Node + | Subgraph +; + +// --- [ Node identifier ] ----------------------------------------------------- + +// Node : ID [ Port ] + +Node + : ID OptPort << astx.NewNode($0, $1) >> +; + +// Port +// : ":" ID [ ":" CompassPoint ] +// | ":" CompassPoint +// +// CompassPoint +// : "n" | "ne" | "e" | "se" | "s" | "sw" | "w" | "nw" | "c" | "_" + +// Note also that the allowed compass point values are not keywords, so these +// strings can be used elsewhere as ordinary identifiers and, conversely, the +// parser will actually accept any identifier. + +Port + : ":" ID << astx.NewPort($1, nil) >> + | ":" ID ":" ID << astx.NewPort($1, $3) >> +; + +OptPort + : empty + | Port +; + +// === [ Identifiers ] ========================================================= + +ID + : id << astx.NewID($0) >> +; + +OptID + : empty << "", nil >> + | ID +; diff --git a/graph/formats/dot/internal/errors/errors.go b/graph/formats/dot/internal/errors/errors.go new file mode 100644 index 00000000..81009b0e --- /dev/null +++ b/graph/formats/dot/internal/errors/errors.go @@ -0,0 +1,66 @@ +// Code generated by gocc; DO NOT EDIT. + +// This file is dual licensed under CC0 and The gonum license. +// +// Copyright ©2017 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// +// Copyright ©2017 Robin Eklind. +// This file is made available under a Creative Commons CC0 1.0 +// Universal Public Domain Dedication. + +package errors + +import ( + "bytes" + "fmt" + + "github.com/gonum/graph/formats/dot/internal/token" +) + +type ErrorSymbol interface { +} + +type Error struct { + Err error + ErrorToken *token.Token + ErrorSymbols []ErrorSymbol + ExpectedTokens []string + StackTop int +} + +func (E *Error) String() string { + w := new(bytes.Buffer) + fmt.Fprintf(w, "Error") + if E.Err != nil { + fmt.Fprintf(w, " %s\n", E.Err) + } else { + fmt.Fprintf(w, "\n") + } + fmt.Fprintf(w, "Token: type=%d, lit=%s\n", E.ErrorToken.Type, E.ErrorToken.Lit) + fmt.Fprintf(w, "Pos: offset=%d, line=%d, column=%d\n", E.ErrorToken.Pos.Offset, E.ErrorToken.Pos.Line, E.ErrorToken.Pos.Column) + fmt.Fprintf(w, "Expected one of: ") + for _, sym := range E.ExpectedTokens { + fmt.Fprintf(w, "%s ", sym) + } + fmt.Fprintf(w, "ErrorSymbol:\n") + for _, sym := range E.ErrorSymbols { + fmt.Fprintf(w, "%v\n", sym) + } + return w.String() +} + +func (e *Error) Error() string { + w := new(bytes.Buffer) + fmt.Fprintf(w, "Error in S%d: %s, %s", e.StackTop, token.TokMap.TokenString(e.ErrorToken), e.ErrorToken.Pos.String()) + if e.Err != nil { + fmt.Fprintf(w, e.Err.Error()) + } else { + fmt.Fprintf(w, ", expected one of: ") + for _, expected := range e.ExpectedTokens { + fmt.Fprintf(w, "%s ", expected) + } + } + return w.String() +} diff --git a/graph/formats/dot/internal/lexer/acttab.go b/graph/formats/dot/internal/lexer/acttab.go new file mode 100644 index 00000000..4cecfb59 --- /dev/null +++ b/graph/formats/dot/internal/lexer/acttab.go @@ -0,0 +1,597 @@ +// Code generated by gocc; DO NOT EDIT. + +// This file is dual licensed under CC0 and The gonum license. +// +// Copyright ©2017 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// +// Copyright ©2017 Robin Eklind. +// This file is made available under a Creative Commons CC0 1.0 +// Universal Public Domain Dedication. + +package lexer + +import ( + "fmt" + + "github.com/gonum/graph/formats/dot/internal/token" +) + +type ActionTable [NumStates]ActionRow + +type ActionRow struct { + Accept token.Type + Ignore string +} + +func (this ActionRow) String() string { + return fmt.Sprintf("Accept=%d, Ignore=%s", this.Accept, this.Ignore) +} + +var ActTab = ActionTable{ + ActionRow{ // S0 + Accept: 0, + Ignore: "", + }, + ActionRow{ // S1 + Accept: -1, + Ignore: "!whitespace", + }, + ActionRow{ // S2 + Accept: 0, + Ignore: "", + }, + ActionRow{ // S3 + Accept: 0, + Ignore: "", + }, + ActionRow{ // S4 + Accept: 15, + Ignore: "", + }, + ActionRow{ // S5 + Accept: 0, + Ignore: "", + }, + ActionRow{ // S6 + Accept: 0, + Ignore: "", + }, + ActionRow{ // S7 + Accept: 0, + Ignore: "", + }, + ActionRow{ // S8 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S9 + Accept: 18, + Ignore: "", + }, + ActionRow{ // S10 + Accept: 8, + Ignore: "", + }, + ActionRow{ // S11 + Accept: 0, + Ignore: "", + }, + ActionRow{ // S12 + Accept: 16, + Ignore: "", + }, + ActionRow{ // S13 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S14 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S15 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S16 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S17 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S18 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S19 + Accept: 13, + Ignore: "", + }, + ActionRow{ // S20 + Accept: 14, + Ignore: "", + }, + ActionRow{ // S21 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S22 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S23 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S24 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S25 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S26 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S27 + Accept: 2, + Ignore: "", + }, + ActionRow{ // S28 + Accept: 3, + Ignore: "", + }, + ActionRow{ // S29 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S30 + Accept: 0, + Ignore: "", + }, + ActionRow{ // S31 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S32 + Accept: 0, + Ignore: "", + }, + ActionRow{ // S33 + Accept: 0, + Ignore: "", + }, + ActionRow{ // S34 + Accept: -1, + Ignore: "!comment", + }, + ActionRow{ // S35 + Accept: 9, + Ignore: "", + }, + ActionRow{ // S36 + Accept: 10, + Ignore: "", + }, + ActionRow{ // S37 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S38 + Accept: 0, + Ignore: "", + }, + ActionRow{ // S39 + Accept: 0, + Ignore: "", + }, + ActionRow{ // S40 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S41 + Accept: 0, + Ignore: "", + }, + ActionRow{ // S42 + Accept: 0, + Ignore: "", + }, + ActionRow{ // S43 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S44 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S45 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S46 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S47 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S48 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S49 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S50 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S51 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S52 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S53 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S54 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S55 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S56 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S57 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S58 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S59 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S60 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S61 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S62 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S63 + Accept: 0, + Ignore: "", + }, + ActionRow{ // S64 + Accept: 0, + Ignore: "", + }, + ActionRow{ // S65 + Accept: 0, + Ignore: "", + }, + ActionRow{ // S66 + Accept: 0, + Ignore: "", + }, + ActionRow{ // S67 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S68 + Accept: 0, + Ignore: "", + }, + ActionRow{ // S69 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S70 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S71 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S72 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S73 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S74 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S75 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S76 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S77 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S78 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S79 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S80 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S81 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S82 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S83 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S84 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S85 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S86 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S87 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S88 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S89 + Accept: -1, + Ignore: "!comment", + }, + ActionRow{ // S90 + Accept: 0, + Ignore: "", + }, + ActionRow{ // S91 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S92 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S93 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S94 + Accept: 12, + Ignore: "", + }, + ActionRow{ // S95 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S96 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S97 + Accept: 11, + Ignore: "", + }, + ActionRow{ // S98 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S99 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S100 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S101 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S102 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S103 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S104 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S105 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S106 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S107 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S108 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S109 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S110 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S111 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S112 + Accept: 6, + Ignore: "", + }, + ActionRow{ // S113 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S114 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S115 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S116 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S117 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S118 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S119 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S120 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S121 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S122 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S123 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S124 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S125 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S126 + Accept: 5, + Ignore: "", + }, + ActionRow{ // S127 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S128 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S129 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S130 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S131 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S132 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S133 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S134 + Accept: 7, + Ignore: "", + }, + ActionRow{ // S135 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S136 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S137 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S138 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S139 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S140 + Accept: 17, + Ignore: "", + }, +} diff --git a/graph/formats/dot/internal/lexer/lexer.go b/graph/formats/dot/internal/lexer/lexer.go new file mode 100644 index 00000000..be647b8b --- /dev/null +++ b/graph/formats/dot/internal/lexer/lexer.go @@ -0,0 +1,338 @@ +// Code generated by gocc; DO NOT EDIT. + +// This file is dual licensed under CC0 and The gonum license. +// +// Copyright ©2017 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// +// Copyright ©2017 Robin Eklind. +// This file is made available under a Creative Commons CC0 1.0 +// Universal Public Domain Dedication. + +package lexer + +import ( + // "fmt" + "io/ioutil" + "unicode/utf8" + + // "github.com/gonum/graph/formats/dot/internal/util" + "github.com/gonum/graph/formats/dot/internal/token" +) + +const ( + NoState = -1 + NumStates = 141 + NumSymbols = 184 +) + +type Lexer struct { + src []byte + pos int + line int + column int +} + +func NewLexer(src []byte) *Lexer { + lexer := &Lexer{ + src: src, + pos: 0, + line: 1, + column: 1, + } + return lexer +} + +func NewLexerFile(fpath string) (*Lexer, error) { + src, err := ioutil.ReadFile(fpath) + if err != nil { + return nil, err + } + return NewLexer(src), nil +} + +func (this *Lexer) Scan() (tok *token.Token) { + + // fmt.Printf("Lexer.Scan() pos=%d\n", this.pos) + + tok = new(token.Token) + if this.pos >= len(this.src) { + tok.Type = token.EOF + tok.Pos.Offset, tok.Pos.Line, tok.Pos.Column = this.pos, this.line, this.column + return + } + start, startLine, startColumn, end := this.pos, this.line, this.column, 0 + tok.Type = token.INVALID + state, rune1, size := 0, rune(-1), 0 + for state != -1 { + + // fmt.Printf("\tpos=%d, line=%d, col=%d, state=%d\n", this.pos, this.line, this.column, state) + + if this.pos >= len(this.src) { + rune1 = -1 + } else { + rune1, size = utf8.DecodeRune(this.src[this.pos:]) + this.pos += size + } + + // Production start + if rune1 != -1 { + state = TransTab[state](rune1) + } else { + state = -1 + } + // Production end + + // Debug start + // nextState := -1 + // if rune1 != -1 { + // nextState = TransTab[state](rune1) + // } + // fmt.Printf("\tS%d, : tok=%s, rune == %s(%x), next state == %d\n", state, token.TokMap.Id(tok.Type), util.RuneToString(rune1), rune1, nextState) + // fmt.Printf("\t\tpos=%d, size=%d, start=%d, end=%d\n", this.pos, size, start, end) + // if nextState != -1 { + // fmt.Printf("\t\taction:%s\n", ActTab[nextState].String()) + // } + // state = nextState + // Debug end + + if state != -1 { + + switch rune1 { + case '\n': + this.line++ + this.column = 1 + case '\r': + this.column = 1 + case '\t': + this.column += 4 + default: + this.column++ + } + + switch { + case ActTab[state].Accept != -1: + tok.Type = ActTab[state].Accept + // fmt.Printf("\t Accept(%s), %s(%d)\n", string(act), token.TokMap.Id(tok), tok) + end = this.pos + case ActTab[state].Ignore != "": + // fmt.Printf("\t Ignore(%s)\n", string(act)) + start, startLine, startColumn = this.pos, this.line, this.column + state = 0 + if start >= len(this.src) { + tok.Type = token.EOF + } + + } + } else { + if tok.Type == token.INVALID { + end = this.pos + } + } + } + if end > start { + this.pos = end + tok.Lit = this.src[start:end] + } else { + tok.Lit = []byte{} + } + tok.Pos.Offset, tok.Pos.Line, tok.Pos.Column = start, startLine, startColumn + + // fmt.Printf("Token at %s: %s \"%s\"\n", tok.String(), token.TokMap.Id(tok.Type), tok.Lit) + + return +} + +func (this *Lexer) Reset() { + this.pos = 0 +} + +/* +Lexer symbols: +0: 'n' +1: 'o' +2: 'd' +3: 'e' +4: 'N' +5: 'o' +6: 'd' +7: 'e' +8: 'N' +9: 'O' +10: 'D' +11: 'E' +12: 'e' +13: 'd' +14: 'g' +15: 'e' +16: 'E' +17: 'd' +18: 'g' +19: 'e' +20: 'E' +21: 'D' +22: 'G' +23: 'E' +24: 'g' +25: 'r' +26: 'a' +27: 'p' +28: 'h' +29: 'G' +30: 'r' +31: 'a' +32: 'p' +33: 'h' +34: 'G' +35: 'R' +36: 'A' +37: 'P' +38: 'H' +39: 'd' +40: 'i' +41: 'g' +42: 'r' +43: 'a' +44: 'p' +45: 'h' +46: 'D' +47: 'i' +48: 'g' +49: 'r' +50: 'a' +51: 'p' +52: 'h' +53: 'd' +54: 'i' +55: 'G' +56: 'r' +57: 'a' +58: 'p' +59: 'h' +60: 'D' +61: 'i' +62: 'G' +63: 'r' +64: 'a' +65: 'p' +66: 'h' +67: 'D' +68: 'I' +69: 'G' +70: 'R' +71: 'A' +72: 'P' +73: 'H' +74: 's' +75: 'u' +76: 'b' +77: 'g' +78: 'r' +79: 'a' +80: 'p' +81: 'h' +82: 'S' +83: 'u' +84: 'b' +85: 'g' +86: 'r' +87: 'a' +88: 'p' +89: 'h' +90: 's' +91: 'u' +92: 'b' +93: 'G' +94: 'r' +95: 'a' +96: 'p' +97: 'h' +98: 'S' +99: 'u' +100: 'b' +101: 'G' +102: 'r' +103: 'a' +104: 'p' +105: 'h' +106: 'S' +107: 'U' +108: 'B' +109: 'G' +110: 'R' +111: 'A' +112: 'P' +113: 'H' +114: 's' +115: 't' +116: 'r' +117: 'i' +118: 'c' +119: 't' +120: 'S' +121: 't' +122: 'r' +123: 'i' +124: 'c' +125: 't' +126: 'S' +127: 'T' +128: 'R' +129: 'I' +130: 'C' +131: 'T' +132: '{' +133: '}' +134: ';' +135: '-' +136: '-' +137: '-' +138: '>' +139: '[' +140: ']' +141: ',' +142: '=' +143: ':' +144: '_' +145: '-' +146: '.' +147: '-' +148: '.' +149: '\' +150: '"' +151: '\' +152: '"' +153: '"' +154: '=' +155: '<' +156: '>' +157: '<' +158: '>' +159: '/' +160: '/' +161: '\n' +162: '#' +163: '\n' +164: '/' +165: '*' +166: '*' +167: '*' +168: '/' +169: ' ' +170: '\t' +171: '\r' +172: '\n' +173: \u0001-'!' +174: '#'-'[' +175: ']'-\u007f +176: 'a'-'z' +177: 'A'-'Z' +178: '0'-'9' +179: \u0080-\ufffc +180: \ufffe-\U0010ffff +181: \u0001-';' +182: '?'-\u00ff +183: . + +*/ diff --git a/graph/formats/dot/internal/lexer/lexer_test.go b/graph/formats/dot/internal/lexer/lexer_test.go new file mode 100644 index 00000000..fedf5e8d --- /dev/null +++ b/graph/formats/dot/internal/lexer/lexer_test.go @@ -0,0 +1,54 @@ +// This file is dual licensed under CC0 and The gonum license. +// +// Copyright ©2017 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// +// Copyright ©2017 Robin Eklind. +// This file is made available under a Creative Commons CC0 1.0 +// Universal Public Domain Dedication. + +package lexer_test + +import ( + "bytes" + "io/ioutil" + "testing" + + "github.com/gonum/graph/formats/dot" +) + +func TestParseFile(t *testing.T) { + golden := []struct { + in string + out string + }{ + { + in: "testdata/tokens.dot", + out: "testdata/tokens.golden", + }, + } + for _, g := range golden { + file, err := dot.ParseFile(g.in) + if err != nil { + t.Errorf("%q: unable to parse file; %v", g.in, err) + continue + } + // If no output path is specified, the input is already golden. + out := g.in + if len(g.out) > 0 { + out = g.out + } + buf, err := ioutil.ReadFile(out) + if err != nil { + t.Errorf("%q: unable to read file; %v", g.in, err) + continue + } + got := file.String() + // Remove trailing newline. + want := string(bytes.TrimSpace(buf)) + if got != want { + t.Errorf("%q: graph mismatch; expected %q, got %q", g.in, want, got) + } + } +} diff --git a/graph/formats/dot/internal/lexer/testdata/tokens.dot b/graph/formats/dot/internal/lexer/testdata/tokens.dot new file mode 100644 index 00000000..3a161951 --- /dev/null +++ b/graph/formats/dot/internal/lexer/testdata/tokens.dot @@ -0,0 +1,39 @@ +# C preprocessing directives act as comments. +/* block comment */ +// keywords are case-insensitive. +graph { + node [] + Node [] + NODE [] + edge [] + Edge [] + EDGE [] + subgraph {} + subGraph {} + Subgraph {} + SubGraph {} + SUBGRAPH S {} + A; B [style=filled, fillcolor=red] + C:nw -- D:se + "foo" + .10 + -20 + 3.14 + F [label=<
foo
>] + _foo + a10 +} +Graph { +} +GRAPH { +} +digraph { +} +Digraph { +} +diGraph { +} +DiGraph { +} +DIGRAPH { +} diff --git a/graph/formats/dot/internal/lexer/testdata/tokens.golden b/graph/formats/dot/internal/lexer/testdata/tokens.golden new file mode 100644 index 00000000..f64bdefc --- /dev/null +++ b/graph/formats/dot/internal/lexer/testdata/tokens.golden @@ -0,0 +1,37 @@ +graph { + node [] + node [] + node [] + edge [] + edge [] + edge [] + {} + {} + {} + {} + subgraph S {} + A + B [style=filled fillcolor=red] + C:nw -- D:se + "foo" + .10 + -20 + 3.14 + F [label=<
foo
>] + _foo + a10 +} +graph { +} +graph { +} +digraph { +} +digraph { +} +digraph { +} +digraph { +} +digraph { +} diff --git a/graph/formats/dot/internal/lexer/transitiontable.go b/graph/formats/dot/internal/lexer/transitiontable.go new file mode 100644 index 00000000..98c1d051 --- /dev/null +++ b/graph/formats/dot/internal/lexer/transitiontable.go @@ -0,0 +1,3027 @@ +// Code generated by gocc; DO NOT EDIT. + +// This file is dual licensed under CC0 and The gonum license. +// +// Copyright ©2017 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// +// Copyright ©2017 Robin Eklind. +// This file is made available under a Creative Commons CC0 1.0 +// Universal Public Domain Dedication. + +package lexer + +/* +Let s be the current state +Let r be the current input rune +transitionTable[s](r) returns the next state. +*/ +type TransitionTable [NumStates]func(rune) int + +var TransTab = TransitionTable{ + + // S0 + func(r rune) int { + switch { + case r == 9: // ['\t','\t'] + return 1 + case r == 10: // ['\n','\n'] + return 1 + case r == 13: // ['\r','\r'] + return 1 + case r == 32: // [' ',' '] + return 1 + case r == 34: // ['"','"'] + return 2 + case r == 35: // ['#','#'] + return 3 + case r == 44: // [',',','] + return 4 + case r == 45: // ['-','-'] + return 5 + case r == 46: // ['.','.'] + return 6 + case r == 47: // ['/','/'] + return 7 + case 48 <= r && r <= 57: // ['0','9'] + return 8 + case r == 58: // [':',':'] + return 9 + case r == 59: // [';',';'] + return 10 + case r == 60: // ['<','<'] + return 11 + case r == 61: // ['=','='] + return 12 + case 65 <= r && r <= 67: // ['A','C'] + return 13 + case r == 68: // ['D','D'] + return 14 + case r == 69: // ['E','E'] + return 15 + case r == 70: // ['F','F'] + return 13 + case r == 71: // ['G','G'] + return 16 + case 72 <= r && r <= 77: // ['H','M'] + return 13 + case r == 78: // ['N','N'] + return 17 + case 79 <= r && r <= 82: // ['O','R'] + return 13 + case r == 83: // ['S','S'] + return 18 + case 84 <= r && r <= 90: // ['T','Z'] + return 13 + case r == 91: // ['[','['] + return 19 + case r == 93: // [']',']'] + return 20 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 99: // ['a','c'] + return 13 + case r == 100: // ['d','d'] + return 22 + case r == 101: // ['e','e'] + return 23 + case r == 102: // ['f','f'] + return 13 + case r == 103: // ['g','g'] + return 24 + case 104 <= r && r <= 109: // ['h','m'] + return 13 + case r == 110: // ['n','n'] + return 25 + case 111 <= r && r <= 114: // ['o','r'] + return 13 + case r == 115: // ['s','s'] + return 26 + case 116 <= r && r <= 122: // ['t','z'] + return 13 + case r == 123: // ['{','{'] + return 27 + case r == 125: // ['}','}'] + return 28 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S1 + func(r rune) int { + switch { + + } + return NoState + }, + + // S2 + func(r rune) int { + switch { + case 1 <= r && r <= 33: // [\u0001,'!'] + return 30 + case r == 34: // ['"','"'] + return 31 + case 35 <= r && r <= 91: // ['#','['] + return 30 + case r == 92: // ['\','\'] + return 32 + case 93 <= r && r <= 127: // [']',\u007f] + return 30 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 33 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 33 + + } + return NoState + }, + + // S3 + func(r rune) int { + switch { + case r == 10: // ['\n','\n'] + return 34 + + default: + return 3 + } + + }, + + // S4 + func(r rune) int { + switch { + + } + return NoState + }, + + // S5 + func(r rune) int { + switch { + case r == 45: // ['-','-'] + return 35 + case r == 46: // ['.','.'] + return 6 + case 48 <= r && r <= 57: // ['0','9'] + return 8 + case r == 62: // ['>','>'] + return 36 + + } + return NoState + }, + + // S6 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 37 + + } + return NoState + }, + + // S7 + func(r rune) int { + switch { + case r == 42: // ['*','*'] + return 38 + case r == 47: // ['/','/'] + return 39 + + } + return NoState + }, + + // S8 + func(r rune) int { + switch { + case r == 46: // ['.','.'] + return 40 + case 48 <= r && r <= 57: // ['0','9'] + return 8 + + } + return NoState + }, + + // S9 + func(r rune) int { + switch { + + } + return NoState + }, + + // S10 + func(r rune) int { + switch { + + } + return NoState + }, + + // S11 + func(r rune) int { + switch { + case 1 <= r && r <= 59: // [\u0001,';'] + return 41 + case r == 60: // ['<','<'] + return 42 + case r == 61: // ['=','='] + return 41 + case r == 62: // ['>','>'] + return 43 + case 63 <= r && r <= 255: // ['?',\u00ff] + return 41 + + } + return NoState + }, + + // S12 + func(r rune) int { + switch { + + } + return NoState + }, + + // S13 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S14 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 72: // ['A','H'] + return 13 + case r == 73: // ['I','I'] + return 45 + case 74 <= r && r <= 90: // ['J','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 104: // ['a','h'] + return 13 + case r == 105: // ['i','i'] + return 46 + case 106 <= r && r <= 122: // ['j','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S15 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 67: // ['A','C'] + return 13 + case r == 68: // ['D','D'] + return 47 + case 69 <= r && r <= 90: // ['E','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 99: // ['a','c'] + return 13 + case r == 100: // ['d','d'] + return 48 + case 101 <= r && r <= 122: // ['e','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S16 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 81: // ['A','Q'] + return 13 + case r == 82: // ['R','R'] + return 49 + case 83 <= r && r <= 90: // ['S','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 113: // ['a','q'] + return 13 + case r == 114: // ['r','r'] + return 50 + case 115 <= r && r <= 122: // ['s','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S17 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 78: // ['A','N'] + return 13 + case r == 79: // ['O','O'] + return 51 + case 80 <= r && r <= 90: // ['P','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 110: // ['a','n'] + return 13 + case r == 111: // ['o','o'] + return 52 + case 112 <= r && r <= 122: // ['p','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S18 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 83: // ['A','S'] + return 13 + case r == 84: // ['T','T'] + return 53 + case r == 85: // ['U','U'] + return 54 + case 86 <= r && r <= 90: // ['V','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 115: // ['a','s'] + return 13 + case r == 116: // ['t','t'] + return 55 + case r == 117: // ['u','u'] + return 56 + case 118 <= r && r <= 122: // ['v','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S19 + func(r rune) int { + switch { + + } + return NoState + }, + + // S20 + func(r rune) int { + switch { + + } + return NoState + }, + + // S21 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S22 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 104: // ['a','h'] + return 13 + case r == 105: // ['i','i'] + return 57 + case 106 <= r && r <= 122: // ['j','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S23 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 99: // ['a','c'] + return 13 + case r == 100: // ['d','d'] + return 58 + case 101 <= r && r <= 122: // ['e','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S24 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 113: // ['a','q'] + return 13 + case r == 114: // ['r','r'] + return 59 + case 115 <= r && r <= 122: // ['s','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S25 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 110: // ['a','n'] + return 13 + case r == 111: // ['o','o'] + return 60 + case 112 <= r && r <= 122: // ['p','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S26 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 115: // ['a','s'] + return 13 + case r == 116: // ['t','t'] + return 61 + case r == 117: // ['u','u'] + return 62 + case 118 <= r && r <= 122: // ['v','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S27 + func(r rune) int { + switch { + + } + return NoState + }, + + // S28 + func(r rune) int { + switch { + + } + return NoState + }, + + // S29 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S30 + func(r rune) int { + switch { + case 1 <= r && r <= 33: // [\u0001,'!'] + return 30 + case r == 34: // ['"','"'] + return 31 + case 35 <= r && r <= 91: // ['#','['] + return 30 + case r == 92: // ['\','\'] + return 32 + case 93 <= r && r <= 127: // [']',\u007f] + return 30 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 33 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 33 + + } + return NoState + }, + + // S31 + func(r rune) int { + switch { + + } + return NoState + }, + + // S32 + func(r rune) int { + switch { + case 1 <= r && r <= 33: // [\u0001,'!'] + return 63 + case r == 34: // ['"','"'] + return 64 + case 35 <= r && r <= 91: // ['#','['] + return 63 + case r == 92: // ['\','\'] + return 64 + case 93 <= r && r <= 127: // [']',\u007f] + return 63 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 65 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 65 + + } + return NoState + }, + + // S33 + func(r rune) int { + switch { + case 1 <= r && r <= 33: // [\u0001,'!'] + return 30 + case r == 34: // ['"','"'] + return 31 + case 35 <= r && r <= 91: // ['#','['] + return 30 + case r == 92: // ['\','\'] + return 32 + case 93 <= r && r <= 127: // [']',\u007f] + return 30 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 33 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 33 + + } + return NoState + }, + + // S34 + func(r rune) int { + switch { + + } + return NoState + }, + + // S35 + func(r rune) int { + switch { + + } + return NoState + }, + + // S36 + func(r rune) int { + switch { + + } + return NoState + }, + + // S37 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 37 + + } + return NoState + }, + + // S38 + func(r rune) int { + switch { + case r == 42: // ['*','*'] + return 66 + + default: + return 38 + } + + }, + + // S39 + func(r rune) int { + switch { + case r == 10: // ['\n','\n'] + return 34 + + default: + return 39 + } + + }, + + // S40 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 67 + + } + return NoState + }, + + // S41 + func(r rune) int { + switch { + case 1 <= r && r <= 59: // [\u0001,';'] + return 41 + case r == 60: // ['<','<'] + return 42 + case r == 61: // ['=','='] + return 41 + case r == 62: // ['>','>'] + return 43 + case 63 <= r && r <= 255: // ['?',\u00ff] + return 41 + + } + return NoState + }, + + // S42 + func(r rune) int { + switch { + case 1 <= r && r <= 59: // [\u0001,';'] + return 68 + case r == 61: // ['=','='] + return 68 + case 63 <= r && r <= 255: // ['?',\u00ff] + return 68 + + } + return NoState + }, + + // S43 + func(r rune) int { + switch { + + } + return NoState + }, + + // S44 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S45 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 70: // ['A','F'] + return 13 + case r == 71: // ['G','G'] + return 69 + case 72 <= r && r <= 90: // ['H','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S46 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 70: // ['A','F'] + return 13 + case r == 71: // ['G','G'] + return 70 + case 72 <= r && r <= 90: // ['H','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 102: // ['a','f'] + return 13 + case r == 103: // ['g','g'] + return 71 + case 104 <= r && r <= 122: // ['h','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S47 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 70: // ['A','F'] + return 13 + case r == 71: // ['G','G'] + return 72 + case 72 <= r && r <= 90: // ['H','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S48 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 102: // ['a','f'] + return 13 + case r == 103: // ['g','g'] + return 73 + case 104 <= r && r <= 122: // ['h','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S49 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case r == 65: // ['A','A'] + return 74 + case 66 <= r && r <= 90: // ['B','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S50 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case r == 97: // ['a','a'] + return 75 + case 98 <= r && r <= 122: // ['b','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S51 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 67: // ['A','C'] + return 13 + case r == 68: // ['D','D'] + return 76 + case 69 <= r && r <= 90: // ['E','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S52 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 99: // ['a','c'] + return 13 + case r == 100: // ['d','d'] + return 77 + case 101 <= r && r <= 122: // ['e','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S53 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 81: // ['A','Q'] + return 13 + case r == 82: // ['R','R'] + return 78 + case 83 <= r && r <= 90: // ['S','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S54 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case r == 65: // ['A','A'] + return 13 + case r == 66: // ['B','B'] + return 79 + case 67 <= r && r <= 90: // ['C','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S55 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 113: // ['a','q'] + return 13 + case r == 114: // ['r','r'] + return 80 + case 115 <= r && r <= 122: // ['s','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S56 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case r == 97: // ['a','a'] + return 13 + case r == 98: // ['b','b'] + return 81 + case 99 <= r && r <= 122: // ['c','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S57 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 70: // ['A','F'] + return 13 + case r == 71: // ['G','G'] + return 82 + case 72 <= r && r <= 90: // ['H','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 102: // ['a','f'] + return 13 + case r == 103: // ['g','g'] + return 83 + case 104 <= r && r <= 122: // ['h','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S58 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 102: // ['a','f'] + return 13 + case r == 103: // ['g','g'] + return 84 + case 104 <= r && r <= 122: // ['h','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S59 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case r == 97: // ['a','a'] + return 85 + case 98 <= r && r <= 122: // ['b','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S60 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 99: // ['a','c'] + return 13 + case r == 100: // ['d','d'] + return 86 + case 101 <= r && r <= 122: // ['e','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S61 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 113: // ['a','q'] + return 13 + case r == 114: // ['r','r'] + return 87 + case 115 <= r && r <= 122: // ['s','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S62 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case r == 97: // ['a','a'] + return 13 + case r == 98: // ['b','b'] + return 88 + case 99 <= r && r <= 122: // ['c','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S63 + func(r rune) int { + switch { + case 1 <= r && r <= 33: // [\u0001,'!'] + return 30 + case r == 34: // ['"','"'] + return 31 + case 35 <= r && r <= 91: // ['#','['] + return 30 + case r == 92: // ['\','\'] + return 32 + case 93 <= r && r <= 127: // [']',\u007f] + return 30 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 33 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 33 + + } + return NoState + }, + + // S64 + func(r rune) int { + switch { + case 1 <= r && r <= 33: // [\u0001,'!'] + return 30 + case r == 34: // ['"','"'] + return 31 + case 35 <= r && r <= 91: // ['#','['] + return 30 + case r == 92: // ['\','\'] + return 32 + case 93 <= r && r <= 127: // [']',\u007f] + return 30 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 33 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 33 + + } + return NoState + }, + + // S65 + func(r rune) int { + switch { + case 1 <= r && r <= 33: // [\u0001,'!'] + return 30 + case r == 34: // ['"','"'] + return 31 + case 35 <= r && r <= 91: // ['#','['] + return 30 + case r == 92: // ['\','\'] + return 32 + case 93 <= r && r <= 127: // [']',\u007f] + return 30 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 33 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 33 + + } + return NoState + }, + + // S66 + func(r rune) int { + switch { + case r == 42: // ['*','*'] + return 66 + case r == 47: // ['/','/'] + return 89 + + default: + return 38 + } + + }, + + // S67 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 67 + + } + return NoState + }, + + // S68 + func(r rune) int { + switch { + case 1 <= r && r <= 59: // [\u0001,';'] + return 68 + case r == 61: // ['=','='] + return 68 + case r == 62: // ['>','>'] + return 90 + case 63 <= r && r <= 255: // ['?',\u00ff] + return 68 + + } + return NoState + }, + + // S69 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 81: // ['A','Q'] + return 13 + case r == 82: // ['R','R'] + return 91 + case 83 <= r && r <= 90: // ['S','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S70 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 113: // ['a','q'] + return 13 + case r == 114: // ['r','r'] + return 92 + case 115 <= r && r <= 122: // ['s','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S71 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 113: // ['a','q'] + return 13 + case r == 114: // ['r','r'] + return 93 + case 115 <= r && r <= 122: // ['s','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S72 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 68: // ['A','D'] + return 13 + case r == 69: // ['E','E'] + return 94 + case 70 <= r && r <= 90: // ['F','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S73 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 100: // ['a','d'] + return 13 + case r == 101: // ['e','e'] + return 94 + case 102 <= r && r <= 122: // ['f','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S74 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 79: // ['A','O'] + return 13 + case r == 80: // ['P','P'] + return 95 + case 81 <= r && r <= 90: // ['Q','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S75 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 111: // ['a','o'] + return 13 + case r == 112: // ['p','p'] + return 96 + case 113 <= r && r <= 122: // ['q','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S76 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 68: // ['A','D'] + return 13 + case r == 69: // ['E','E'] + return 97 + case 70 <= r && r <= 90: // ['F','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S77 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 100: // ['a','d'] + return 13 + case r == 101: // ['e','e'] + return 97 + case 102 <= r && r <= 122: // ['f','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S78 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 72: // ['A','H'] + return 13 + case r == 73: // ['I','I'] + return 98 + case 74 <= r && r <= 90: // ['J','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S79 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 70: // ['A','F'] + return 13 + case r == 71: // ['G','G'] + return 99 + case 72 <= r && r <= 90: // ['H','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S80 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 104: // ['a','h'] + return 13 + case r == 105: // ['i','i'] + return 100 + case 106 <= r && r <= 122: // ['j','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S81 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 70: // ['A','F'] + return 13 + case r == 71: // ['G','G'] + return 101 + case 72 <= r && r <= 90: // ['H','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 102: // ['a','f'] + return 13 + case r == 103: // ['g','g'] + return 102 + case 104 <= r && r <= 122: // ['h','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S82 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 113: // ['a','q'] + return 13 + case r == 114: // ['r','r'] + return 103 + case 115 <= r && r <= 122: // ['s','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S83 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 113: // ['a','q'] + return 13 + case r == 114: // ['r','r'] + return 104 + case 115 <= r && r <= 122: // ['s','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S84 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 100: // ['a','d'] + return 13 + case r == 101: // ['e','e'] + return 94 + case 102 <= r && r <= 122: // ['f','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S85 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 111: // ['a','o'] + return 13 + case r == 112: // ['p','p'] + return 105 + case 113 <= r && r <= 122: // ['q','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S86 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 100: // ['a','d'] + return 13 + case r == 101: // ['e','e'] + return 97 + case 102 <= r && r <= 122: // ['f','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S87 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 104: // ['a','h'] + return 13 + case r == 105: // ['i','i'] + return 106 + case 106 <= r && r <= 122: // ['j','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S88 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 70: // ['A','F'] + return 13 + case r == 71: // ['G','G'] + return 107 + case 72 <= r && r <= 90: // ['H','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 102: // ['a','f'] + return 13 + case r == 103: // ['g','g'] + return 108 + case 104 <= r && r <= 122: // ['h','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S89 + func(r rune) int { + switch { + + } + return NoState + }, + + // S90 + func(r rune) int { + switch { + case 1 <= r && r <= 59: // [\u0001,';'] + return 41 + case r == 60: // ['<','<'] + return 42 + case r == 61: // ['=','='] + return 41 + case r == 62: // ['>','>'] + return 43 + case 63 <= r && r <= 255: // ['?',\u00ff] + return 41 + + } + return NoState + }, + + // S91 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case r == 65: // ['A','A'] + return 109 + case 66 <= r && r <= 90: // ['B','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S92 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case r == 97: // ['a','a'] + return 110 + case 98 <= r && r <= 122: // ['b','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S93 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case r == 97: // ['a','a'] + return 111 + case 98 <= r && r <= 122: // ['b','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S94 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S95 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 71: // ['A','G'] + return 13 + case r == 72: // ['H','H'] + return 112 + case 73 <= r && r <= 90: // ['I','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S96 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 103: // ['a','g'] + return 13 + case r == 104: // ['h','h'] + return 112 + case 105 <= r && r <= 122: // ['i','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S97 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S98 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 66: // ['A','B'] + return 13 + case r == 67: // ['C','C'] + return 113 + case 68 <= r && r <= 90: // ['D','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S99 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 81: // ['A','Q'] + return 13 + case r == 82: // ['R','R'] + return 114 + case 83 <= r && r <= 90: // ['S','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S100 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 98: // ['a','b'] + return 13 + case r == 99: // ['c','c'] + return 115 + case 100 <= r && r <= 122: // ['d','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S101 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 113: // ['a','q'] + return 13 + case r == 114: // ['r','r'] + return 116 + case 115 <= r && r <= 122: // ['s','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S102 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 113: // ['a','q'] + return 13 + case r == 114: // ['r','r'] + return 117 + case 115 <= r && r <= 122: // ['s','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S103 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case r == 97: // ['a','a'] + return 118 + case 98 <= r && r <= 122: // ['b','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S104 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case r == 97: // ['a','a'] + return 119 + case 98 <= r && r <= 122: // ['b','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S105 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 103: // ['a','g'] + return 13 + case r == 104: // ['h','h'] + return 112 + case 105 <= r && r <= 122: // ['i','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S106 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 98: // ['a','b'] + return 13 + case r == 99: // ['c','c'] + return 120 + case 100 <= r && r <= 122: // ['d','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S107 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 113: // ['a','q'] + return 13 + case r == 114: // ['r','r'] + return 121 + case 115 <= r && r <= 122: // ['s','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S108 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 113: // ['a','q'] + return 13 + case r == 114: // ['r','r'] + return 122 + case 115 <= r && r <= 122: // ['s','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S109 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 79: // ['A','O'] + return 13 + case r == 80: // ['P','P'] + return 123 + case 81 <= r && r <= 90: // ['Q','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S110 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 111: // ['a','o'] + return 13 + case r == 112: // ['p','p'] + return 124 + case 113 <= r && r <= 122: // ['q','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S111 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 111: // ['a','o'] + return 13 + case r == 112: // ['p','p'] + return 125 + case 113 <= r && r <= 122: // ['q','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S112 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S113 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 83: // ['A','S'] + return 13 + case r == 84: // ['T','T'] + return 126 + case 85 <= r && r <= 90: // ['U','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S114 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case r == 65: // ['A','A'] + return 127 + case 66 <= r && r <= 90: // ['B','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S115 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 115: // ['a','s'] + return 13 + case r == 116: // ['t','t'] + return 126 + case 117 <= r && r <= 122: // ['u','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S116 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case r == 97: // ['a','a'] + return 128 + case 98 <= r && r <= 122: // ['b','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S117 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case r == 97: // ['a','a'] + return 129 + case 98 <= r && r <= 122: // ['b','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S118 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 111: // ['a','o'] + return 13 + case r == 112: // ['p','p'] + return 130 + case 113 <= r && r <= 122: // ['q','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S119 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 111: // ['a','o'] + return 13 + case r == 112: // ['p','p'] + return 131 + case 113 <= r && r <= 122: // ['q','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S120 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 115: // ['a','s'] + return 13 + case r == 116: // ['t','t'] + return 126 + case 117 <= r && r <= 122: // ['u','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S121 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case r == 97: // ['a','a'] + return 132 + case 98 <= r && r <= 122: // ['b','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S122 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case r == 97: // ['a','a'] + return 133 + case 98 <= r && r <= 122: // ['b','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S123 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 71: // ['A','G'] + return 13 + case r == 72: // ['H','H'] + return 134 + case 73 <= r && r <= 90: // ['I','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S124 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 103: // ['a','g'] + return 13 + case r == 104: // ['h','h'] + return 134 + case 105 <= r && r <= 122: // ['i','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S125 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 103: // ['a','g'] + return 13 + case r == 104: // ['h','h'] + return 134 + case 105 <= r && r <= 122: // ['i','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S126 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S127 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 79: // ['A','O'] + return 13 + case r == 80: // ['P','P'] + return 135 + case 81 <= r && r <= 90: // ['Q','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S128 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 111: // ['a','o'] + return 13 + case r == 112: // ['p','p'] + return 136 + case 113 <= r && r <= 122: // ['q','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S129 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 111: // ['a','o'] + return 13 + case r == 112: // ['p','p'] + return 137 + case 113 <= r && r <= 122: // ['q','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S130 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 103: // ['a','g'] + return 13 + case r == 104: // ['h','h'] + return 134 + case 105 <= r && r <= 122: // ['i','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S131 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 103: // ['a','g'] + return 13 + case r == 104: // ['h','h'] + return 134 + case 105 <= r && r <= 122: // ['i','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S132 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 111: // ['a','o'] + return 13 + case r == 112: // ['p','p'] + return 138 + case 113 <= r && r <= 122: // ['q','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S133 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 111: // ['a','o'] + return 13 + case r == 112: // ['p','p'] + return 139 + case 113 <= r && r <= 122: // ['q','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S134 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S135 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 71: // ['A','G'] + return 13 + case r == 72: // ['H','H'] + return 140 + case 73 <= r && r <= 90: // ['I','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S136 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 103: // ['a','g'] + return 13 + case r == 104: // ['h','h'] + return 140 + case 105 <= r && r <= 122: // ['i','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S137 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 103: // ['a','g'] + return 13 + case r == 104: // ['h','h'] + return 140 + case 105 <= r && r <= 122: // ['i','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S138 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 103: // ['a','g'] + return 13 + case r == 104: // ['h','h'] + return 140 + case 105 <= r && r <= 122: // ['i','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S139 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 103: // ['a','g'] + return 13 + case r == 104: // ['h','h'] + return 140 + case 105 <= r && r <= 122: // ['i','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, + + // S140 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 44 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + + } + return NoState + }, +} diff --git a/graph/formats/dot/internal/parser/action.go b/graph/formats/dot/internal/parser/action.go new file mode 100644 index 00000000..10d7eccb --- /dev/null +++ b/graph/formats/dot/internal/parser/action.go @@ -0,0 +1,61 @@ +// Code generated by gocc; DO NOT EDIT. + +// This file is dual licensed under CC0 and The gonum license. +// +// Copyright ©2017 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// +// Copyright ©2017 Robin Eklind. +// This file is made available under a Creative Commons CC0 1.0 +// Universal Public Domain Dedication. + +package parser + +import ( + "fmt" +) + +type action interface { + act() + String() string +} + +type ( + accept bool + shift int // value is next state index + reduce int // value is production index +) + +func (this accept) act() {} +func (this shift) act() {} +func (this reduce) act() {} + +func (this accept) Equal(that action) bool { + if _, ok := that.(accept); ok { + return true + } + return false +} + +func (this reduce) Equal(that action) bool { + that1, ok := that.(reduce) + if !ok { + return false + } + return this == that1 +} + +func (this shift) Equal(that action) bool { + that1, ok := that.(shift) + if !ok { + return false + } + return this == that1 +} + +func (this accept) String() string { return "accept(0)" } +func (this shift) String() string { return fmt.Sprintf("shift:%d", this) } +func (this reduce) String() string { + return fmt.Sprintf("reduce:%d(%s)", this, productionsTable[this].String) +} diff --git a/graph/formats/dot/internal/parser/actiontable.go b/graph/formats/dot/internal/parser/actiontable.go new file mode 100644 index 00000000..bffc8f3c --- /dev/null +++ b/graph/formats/dot/internal/parser/actiontable.go @@ -0,0 +1,2286 @@ +// Code generated by gocc; DO NOT EDIT. + +// This file is dual licensed under CC0 and The gonum license. +// +// Copyright ©2017 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// +// Copyright ©2017 Robin Eklind. +// This file is made available under a Creative Commons CC0 1.0 +// Universal Public Domain Dedication. + +package parser + +type ( + actionTable [numStates]actionRow + actionRow struct { + canRecover bool + actions [numSymbols]action + } +) + +var actionTab = actionTable{ + actionRow{ // S0 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + shift(4), /* strict */ + reduce(4), /* graphx, reduce: OptStrict */ + reduce(4), /* digraph, reduce: OptStrict */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + nil, /* id */ + + }, + }, + actionRow{ // S1 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + accept(true), /* $ */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + shift(4), /* strict */ + reduce(4), /* graphx, reduce: OptStrict */ + reduce(4), /* digraph, reduce: OptStrict */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + nil, /* id */ + + }, + }, + actionRow{ // S2 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + reduce(1), /* $, reduce: File */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + reduce(1), /* strict, reduce: File */ + reduce(1), /* graphx, reduce: File */ + reduce(1), /* digraph, reduce: File */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + nil, /* id */ + + }, + }, + actionRow{ // S3 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + shift(7), /* graphx */ + shift(8), /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + nil, /* id */ + + }, + }, + actionRow{ // S4 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + reduce(5), /* graphx, reduce: OptStrict */ + reduce(5), /* digraph, reduce: OptStrict */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + nil, /* id */ + + }, + }, + actionRow{ // S5 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + reduce(2), /* $, reduce: File */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + reduce(2), /* strict, reduce: File */ + reduce(2), /* graphx, reduce: File */ + reduce(2), /* digraph, reduce: File */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + nil, /* id */ + + }, + }, + actionRow{ // S6 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(53), /* {, reduce: OptID */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + shift(11), /* id */ + + }, + }, + actionRow{ // S7 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(6), /* {, reduce: DirectedGraph */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + reduce(6), /* id, reduce: DirectedGraph */ + + }, + }, + actionRow{ // S8 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(7), /* {, reduce: DirectedGraph */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + reduce(7), /* id, reduce: DirectedGraph */ + + }, + }, + actionRow{ // S9 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + shift(12), /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + nil, /* id */ + + }, + }, + actionRow{ // S10 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(54), /* {, reduce: OptID */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + nil, /* id */ + + }, + }, + actionRow{ // S11 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(52), /* {, reduce: ID */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + nil, /* id */ + + }, + }, + actionRow{ // S12 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(43), /* {, reduce: OptSubgraphID */ + reduce(10), /* }, reduce: OptStmtList */ + nil, /* empty */ + nil, /* strict */ + shift(14), /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + shift(25), /* node */ + shift(26), /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + shift(29), /* subgraph */ + nil, /* : */ + shift(30), /* id */ + + }, + }, + actionRow{ // S13 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + shift(31), /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + nil, /* id */ + + }, + }, + actionRow{ // S14 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + reduce(27), /* [, reduce: Component */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + nil, /* id */ + + }, + }, + actionRow{ // S15 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(43), /* {, reduce: OptSubgraphID */ + reduce(11), /* }, reduce: OptStmtList */ + nil, /* empty */ + nil, /* strict */ + shift(14), /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + shift(25), /* node */ + shift(26), /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + shift(29), /* subgraph */ + nil, /* : */ + shift(30), /* id */ + + }, + }, + actionRow{ // S16 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(17), /* {, reduce: OptSemi */ + reduce(17), /* }, reduce: OptSemi */ + nil, /* empty */ + nil, /* strict */ + reduce(17), /* graphx, reduce: OptSemi */ + nil, /* digraph */ + shift(34), /* ; */ + nil, /* -- */ + nil, /* -> */ + reduce(17), /* node, reduce: OptSemi */ + reduce(17), /* edge, reduce: OptSemi */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(17), /* subgraph, reduce: OptSemi */ + nil, /* : */ + reduce(17), /* id, reduce: OptSemi */ + + }, + }, + actionRow{ // S17 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(12), /* {, reduce: Stmt */ + reduce(12), /* }, reduce: Stmt */ + nil, /* empty */ + nil, /* strict */ + reduce(12), /* graphx, reduce: Stmt */ + nil, /* digraph */ + reduce(12), /* ;, reduce: Stmt */ + nil, /* -- */ + nil, /* -> */ + reduce(12), /* node, reduce: Stmt */ + reduce(12), /* edge, reduce: Stmt */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(12), /* subgraph, reduce: Stmt */ + nil, /* : */ + reduce(12), /* id, reduce: Stmt */ + + }, + }, + actionRow{ // S18 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(13), /* {, reduce: Stmt */ + reduce(13), /* }, reduce: Stmt */ + nil, /* empty */ + nil, /* strict */ + reduce(13), /* graphx, reduce: Stmt */ + nil, /* digraph */ + reduce(13), /* ;, reduce: Stmt */ + nil, /* -- */ + nil, /* -> */ + reduce(13), /* node, reduce: Stmt */ + reduce(13), /* edge, reduce: Stmt */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(13), /* subgraph, reduce: Stmt */ + nil, /* : */ + reduce(13), /* id, reduce: Stmt */ + + }, + }, + actionRow{ // S19 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(14), /* {, reduce: Stmt */ + reduce(14), /* }, reduce: Stmt */ + nil, /* empty */ + nil, /* strict */ + reduce(14), /* graphx, reduce: Stmt */ + nil, /* digraph */ + reduce(14), /* ;, reduce: Stmt */ + nil, /* -- */ + nil, /* -> */ + reduce(14), /* node, reduce: Stmt */ + reduce(14), /* edge, reduce: Stmt */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(14), /* subgraph, reduce: Stmt */ + nil, /* : */ + reduce(14), /* id, reduce: Stmt */ + + }, + }, + actionRow{ // S20 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(15), /* {, reduce: Stmt */ + reduce(15), /* }, reduce: Stmt */ + nil, /* empty */ + nil, /* strict */ + reduce(15), /* graphx, reduce: Stmt */ + nil, /* digraph */ + reduce(15), /* ;, reduce: Stmt */ + nil, /* -- */ + nil, /* -> */ + reduce(15), /* node, reduce: Stmt */ + reduce(15), /* edge, reduce: Stmt */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(15), /* subgraph, reduce: Stmt */ + nil, /* : */ + reduce(15), /* id, reduce: Stmt */ + + }, + }, + actionRow{ // S21 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(16), /* {, reduce: Stmt */ + reduce(16), /* }, reduce: Stmt */ + nil, /* empty */ + nil, /* strict */ + reduce(16), /* graphx, reduce: Stmt */ + nil, /* digraph */ + reduce(16), /* ;, reduce: Stmt */ + reduce(46), /* --, reduce: Vertex */ + reduce(46), /* ->, reduce: Vertex */ + reduce(16), /* node, reduce: Stmt */ + reduce(16), /* edge, reduce: Stmt */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(16), /* subgraph, reduce: Stmt */ + nil, /* : */ + reduce(16), /* id, reduce: Stmt */ + + }, + }, + actionRow{ // S22 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(32), /* {, reduce: OptAttrList */ + reduce(32), /* }, reduce: OptAttrList */ + nil, /* empty */ + nil, /* strict */ + reduce(32), /* graphx, reduce: OptAttrList */ + nil, /* digraph */ + reduce(32), /* ;, reduce: OptAttrList */ + reduce(45), /* --, reduce: Vertex */ + reduce(45), /* ->, reduce: Vertex */ + reduce(32), /* node, reduce: OptAttrList */ + reduce(32), /* edge, reduce: OptAttrList */ + shift(37), /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(32), /* subgraph, reduce: OptAttrList */ + nil, /* : */ + reduce(32), /* id, reduce: OptAttrList */ + + }, + }, + actionRow{ // S23 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + shift(40), /* -- */ + shift(41), /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + nil, /* id */ + + }, + }, + actionRow{ // S24 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + shift(37), /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + nil, /* id */ + + }, + }, + actionRow{ // S25 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + reduce(28), /* [, reduce: Component */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + nil, /* id */ + + }, + }, + actionRow{ // S26 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + reduce(29), /* [, reduce: Component */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + nil, /* id */ + + }, + }, + actionRow{ // S27 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(50), /* {, reduce: OptPort */ + reduce(50), /* }, reduce: OptPort */ + nil, /* empty */ + nil, /* strict */ + reduce(50), /* graphx, reduce: OptPort */ + nil, /* digraph */ + reduce(50), /* ;, reduce: OptPort */ + reduce(50), /* --, reduce: OptPort */ + reduce(50), /* ->, reduce: OptPort */ + reduce(50), /* node, reduce: OptPort */ + reduce(50), /* edge, reduce: OptPort */ + reduce(50), /* [, reduce: OptPort */ + nil, /* ] */ + nil, /* , */ + shift(43), /* = */ + reduce(50), /* subgraph, reduce: OptPort */ + shift(46), /* : */ + reduce(50), /* id, reduce: OptPort */ + + }, + }, + actionRow{ // S28 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + shift(47), /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + nil, /* id */ + + }, + }, + actionRow{ // S29 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(53), /* {, reduce: OptID */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + shift(11), /* id */ + + }, + }, + actionRow{ // S30 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(52), /* {, reduce: ID */ + reduce(52), /* }, reduce: ID */ + nil, /* empty */ + nil, /* strict */ + reduce(52), /* graphx, reduce: ID */ + nil, /* digraph */ + reduce(52), /* ;, reduce: ID */ + reduce(52), /* --, reduce: ID */ + reduce(52), /* ->, reduce: ID */ + reduce(52), /* node, reduce: ID */ + reduce(52), /* edge, reduce: ID */ + reduce(52), /* [, reduce: ID */ + nil, /* ] */ + nil, /* , */ + reduce(52), /* =, reduce: ID */ + reduce(52), /* subgraph, reduce: ID */ + reduce(52), /* :, reduce: ID */ + reduce(52), /* id, reduce: ID */ + + }, + }, + actionRow{ // S31 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + reduce(3), /* $, reduce: Graph */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + reduce(3), /* strict, reduce: Graph */ + reduce(3), /* graphx, reduce: Graph */ + reduce(3), /* digraph, reduce: Graph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + nil, /* id */ + + }, + }, + actionRow{ // S32 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(17), /* {, reduce: OptSemi */ + reduce(17), /* }, reduce: OptSemi */ + nil, /* empty */ + nil, /* strict */ + reduce(17), /* graphx, reduce: OptSemi */ + nil, /* digraph */ + shift(34), /* ; */ + nil, /* -- */ + nil, /* -> */ + reduce(17), /* node, reduce: OptSemi */ + reduce(17), /* edge, reduce: OptSemi */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(17), /* subgraph, reduce: OptSemi */ + nil, /* : */ + reduce(17), /* id, reduce: OptSemi */ + + }, + }, + actionRow{ // S33 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(8), /* {, reduce: StmtList */ + reduce(8), /* }, reduce: StmtList */ + nil, /* empty */ + nil, /* strict */ + reduce(8), /* graphx, reduce: StmtList */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + reduce(8), /* node, reduce: StmtList */ + reduce(8), /* edge, reduce: StmtList */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(8), /* subgraph, reduce: StmtList */ + nil, /* : */ + reduce(8), /* id, reduce: StmtList */ + + }, + }, + actionRow{ // S34 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(18), /* {, reduce: OptSemi */ + reduce(18), /* }, reduce: OptSemi */ + nil, /* empty */ + nil, /* strict */ + reduce(18), /* graphx, reduce: OptSemi */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + reduce(18), /* node, reduce: OptSemi */ + reduce(18), /* edge, reduce: OptSemi */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(18), /* subgraph, reduce: OptSemi */ + nil, /* : */ + reduce(18), /* id, reduce: OptSemi */ + + }, + }, + actionRow{ // S35 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(19), /* {, reduce: NodeStmt */ + reduce(19), /* }, reduce: NodeStmt */ + nil, /* empty */ + nil, /* strict */ + reduce(19), /* graphx, reduce: NodeStmt */ + nil, /* digraph */ + reduce(19), /* ;, reduce: NodeStmt */ + nil, /* -- */ + nil, /* -> */ + reduce(19), /* node, reduce: NodeStmt */ + reduce(19), /* edge, reduce: NodeStmt */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(19), /* subgraph, reduce: NodeStmt */ + nil, /* : */ + reduce(19), /* id, reduce: NodeStmt */ + + }, + }, + actionRow{ // S36 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(33), /* {, reduce: OptAttrList */ + reduce(33), /* }, reduce: OptAttrList */ + nil, /* empty */ + nil, /* strict */ + reduce(33), /* graphx, reduce: OptAttrList */ + nil, /* digraph */ + reduce(33), /* ;, reduce: OptAttrList */ + nil, /* -- */ + nil, /* -> */ + reduce(33), /* node, reduce: OptAttrList */ + reduce(33), /* edge, reduce: OptAttrList */ + shift(50), /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(33), /* subgraph, reduce: OptAttrList */ + nil, /* : */ + reduce(33), /* id, reduce: OptAttrList */ + + }, + }, + actionRow{ // S37 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + reduce(36), /* ], reduce: OptAList */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + shift(55), /* id */ + + }, + }, + actionRow{ // S38 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(32), /* {, reduce: OptAttrList */ + reduce(32), /* }, reduce: OptAttrList */ + nil, /* empty */ + nil, /* strict */ + reduce(32), /* graphx, reduce: OptAttrList */ + nil, /* digraph */ + reduce(32), /* ;, reduce: OptAttrList */ + nil, /* -- */ + nil, /* -> */ + reduce(32), /* node, reduce: OptAttrList */ + reduce(32), /* edge, reduce: OptAttrList */ + shift(37), /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(32), /* subgraph, reduce: OptAttrList */ + nil, /* : */ + reduce(32), /* id, reduce: OptAttrList */ + + }, + }, + actionRow{ // S39 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(43), /* {, reduce: OptSubgraphID */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + shift(29), /* subgraph */ + nil, /* : */ + shift(62), /* id */ + + }, + }, + actionRow{ // S40 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(22), /* {, reduce: DirectedEdge */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(22), /* subgraph, reduce: DirectedEdge */ + nil, /* : */ + reduce(22), /* id, reduce: DirectedEdge */ + + }, + }, + actionRow{ // S41 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(23), /* {, reduce: DirectedEdge */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(23), /* subgraph, reduce: DirectedEdge */ + nil, /* : */ + reduce(23), /* id, reduce: DirectedEdge */ + + }, + }, + actionRow{ // S42 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(26), /* {, reduce: AttrStmt */ + reduce(26), /* }, reduce: AttrStmt */ + nil, /* empty */ + nil, /* strict */ + reduce(26), /* graphx, reduce: AttrStmt */ + nil, /* digraph */ + reduce(26), /* ;, reduce: AttrStmt */ + nil, /* -- */ + nil, /* -> */ + reduce(26), /* node, reduce: AttrStmt */ + reduce(26), /* edge, reduce: AttrStmt */ + shift(50), /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(26), /* subgraph, reduce: AttrStmt */ + nil, /* : */ + reduce(26), /* id, reduce: AttrStmt */ + + }, + }, + actionRow{ // S43 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + shift(64), /* id */ + + }, + }, + actionRow{ // S44 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(47), /* {, reduce: Node */ + reduce(47), /* }, reduce: Node */ + nil, /* empty */ + nil, /* strict */ + reduce(47), /* graphx, reduce: Node */ + nil, /* digraph */ + reduce(47), /* ;, reduce: Node */ + reduce(47), /* --, reduce: Node */ + reduce(47), /* ->, reduce: Node */ + reduce(47), /* node, reduce: Node */ + reduce(47), /* edge, reduce: Node */ + reduce(47), /* [, reduce: Node */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(47), /* subgraph, reduce: Node */ + nil, /* : */ + reduce(47), /* id, reduce: Node */ + + }, + }, + actionRow{ // S45 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(51), /* {, reduce: OptPort */ + reduce(51), /* }, reduce: OptPort */ + nil, /* empty */ + nil, /* strict */ + reduce(51), /* graphx, reduce: OptPort */ + nil, /* digraph */ + reduce(51), /* ;, reduce: OptPort */ + reduce(51), /* --, reduce: OptPort */ + reduce(51), /* ->, reduce: OptPort */ + reduce(51), /* node, reduce: OptPort */ + reduce(51), /* edge, reduce: OptPort */ + reduce(51), /* [, reduce: OptPort */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(51), /* subgraph, reduce: OptPort */ + nil, /* : */ + reduce(51), /* id, reduce: OptPort */ + + }, + }, + actionRow{ // S46 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + shift(62), /* id */ + + }, + }, + actionRow{ // S47 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(43), /* {, reduce: OptSubgraphID */ + reduce(10), /* }, reduce: OptStmtList */ + nil, /* empty */ + nil, /* strict */ + shift(14), /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + shift(25), /* node */ + shift(26), /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + shift(29), /* subgraph */ + nil, /* : */ + shift(30), /* id */ + + }, + }, + actionRow{ // S48 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(44), /* {, reduce: OptSubgraphID */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + nil, /* id */ + + }, + }, + actionRow{ // S49 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(9), /* {, reduce: StmtList */ + reduce(9), /* }, reduce: StmtList */ + nil, /* empty */ + nil, /* strict */ + reduce(9), /* graphx, reduce: StmtList */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + reduce(9), /* node, reduce: StmtList */ + reduce(9), /* edge, reduce: StmtList */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(9), /* subgraph, reduce: StmtList */ + nil, /* : */ + reduce(9), /* id, reduce: StmtList */ + + }, + }, + actionRow{ // S50 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + reduce(36), /* ], reduce: OptAList */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + shift(55), /* id */ + + }, + }, + actionRow{ // S51 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + shift(68), /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + reduce(38), /* ], reduce: OptSep */ + shift(70), /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + reduce(38), /* id, reduce: OptSep */ + + }, + }, + actionRow{ // S52 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + shift(71), /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + nil, /* id */ + + }, + }, + actionRow{ // S53 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + reduce(37), /* ], reduce: OptAList */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + shift(55), /* id */ + + }, + }, + actionRow{ // S54 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + shift(73), /* = */ + nil, /* subgraph */ + nil, /* : */ + nil, /* id */ + + }, + }, + actionRow{ // S55 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + reduce(52), /* =, reduce: ID */ + nil, /* subgraph */ + nil, /* : */ + nil, /* id */ + + }, + }, + actionRow{ // S56 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(20), /* {, reduce: EdgeStmt */ + reduce(20), /* }, reduce: EdgeStmt */ + nil, /* empty */ + nil, /* strict */ + reduce(20), /* graphx, reduce: EdgeStmt */ + nil, /* digraph */ + reduce(20), /* ;, reduce: EdgeStmt */ + nil, /* -- */ + nil, /* -> */ + reduce(20), /* node, reduce: EdgeStmt */ + reduce(20), /* edge, reduce: EdgeStmt */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(20), /* subgraph, reduce: EdgeStmt */ + nil, /* : */ + reduce(20), /* id, reduce: EdgeStmt */ + + }, + }, + actionRow{ // S57 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(46), /* {, reduce: Vertex */ + reduce(46), /* }, reduce: Vertex */ + nil, /* empty */ + nil, /* strict */ + reduce(46), /* graphx, reduce: Vertex */ + nil, /* digraph */ + reduce(46), /* ;, reduce: Vertex */ + reduce(46), /* --, reduce: Vertex */ + reduce(46), /* ->, reduce: Vertex */ + reduce(46), /* node, reduce: Vertex */ + reduce(46), /* edge, reduce: Vertex */ + reduce(46), /* [, reduce: Vertex */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(46), /* subgraph, reduce: Vertex */ + nil, /* : */ + reduce(46), /* id, reduce: Vertex */ + + }, + }, + actionRow{ // S58 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(45), /* {, reduce: Vertex */ + reduce(45), /* }, reduce: Vertex */ + nil, /* empty */ + nil, /* strict */ + reduce(45), /* graphx, reduce: Vertex */ + nil, /* digraph */ + reduce(45), /* ;, reduce: Vertex */ + reduce(45), /* --, reduce: Vertex */ + reduce(45), /* ->, reduce: Vertex */ + reduce(45), /* node, reduce: Vertex */ + reduce(45), /* edge, reduce: Vertex */ + reduce(45), /* [, reduce: Vertex */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(45), /* subgraph, reduce: Vertex */ + nil, /* : */ + reduce(45), /* id, reduce: Vertex */ + + }, + }, + actionRow{ // S59 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(24), /* {, reduce: OptEdge */ + reduce(24), /* }, reduce: OptEdge */ + nil, /* empty */ + nil, /* strict */ + reduce(24), /* graphx, reduce: OptEdge */ + nil, /* digraph */ + reduce(24), /* ;, reduce: OptEdge */ + shift(40), /* -- */ + shift(41), /* -> */ + reduce(24), /* node, reduce: OptEdge */ + reduce(24), /* edge, reduce: OptEdge */ + reduce(24), /* [, reduce: OptEdge */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(24), /* subgraph, reduce: OptEdge */ + nil, /* : */ + reduce(24), /* id, reduce: OptEdge */ + + }, + }, + actionRow{ // S60 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(50), /* {, reduce: OptPort */ + reduce(50), /* }, reduce: OptPort */ + nil, /* empty */ + nil, /* strict */ + reduce(50), /* graphx, reduce: OptPort */ + nil, /* digraph */ + reduce(50), /* ;, reduce: OptPort */ + reduce(50), /* --, reduce: OptPort */ + reduce(50), /* ->, reduce: OptPort */ + reduce(50), /* node, reduce: OptPort */ + reduce(50), /* edge, reduce: OptPort */ + reduce(50), /* [, reduce: OptPort */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(50), /* subgraph, reduce: OptPort */ + shift(46), /* : */ + reduce(50), /* id, reduce: OptPort */ + + }, + }, + actionRow{ // S61 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + shift(76), /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + nil, /* id */ + + }, + }, + actionRow{ // S62 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(52), /* {, reduce: ID */ + reduce(52), /* }, reduce: ID */ + nil, /* empty */ + nil, /* strict */ + reduce(52), /* graphx, reduce: ID */ + nil, /* digraph */ + reduce(52), /* ;, reduce: ID */ + reduce(52), /* --, reduce: ID */ + reduce(52), /* ->, reduce: ID */ + reduce(52), /* node, reduce: ID */ + reduce(52), /* edge, reduce: ID */ + reduce(52), /* [, reduce: ID */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(52), /* subgraph, reduce: ID */ + reduce(52), /* :, reduce: ID */ + reduce(52), /* id, reduce: ID */ + + }, + }, + actionRow{ // S63 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(41), /* {, reduce: Attr */ + reduce(41), /* }, reduce: Attr */ + nil, /* empty */ + nil, /* strict */ + reduce(41), /* graphx, reduce: Attr */ + nil, /* digraph */ + reduce(41), /* ;, reduce: Attr */ + nil, /* -- */ + nil, /* -> */ + reduce(41), /* node, reduce: Attr */ + reduce(41), /* edge, reduce: Attr */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(41), /* subgraph, reduce: Attr */ + nil, /* : */ + reduce(41), /* id, reduce: Attr */ + + }, + }, + actionRow{ // S64 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(52), /* {, reduce: ID */ + reduce(52), /* }, reduce: ID */ + nil, /* empty */ + nil, /* strict */ + reduce(52), /* graphx, reduce: ID */ + nil, /* digraph */ + reduce(52), /* ;, reduce: ID */ + nil, /* -- */ + nil, /* -> */ + reduce(52), /* node, reduce: ID */ + reduce(52), /* edge, reduce: ID */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(52), /* subgraph, reduce: ID */ + nil, /* : */ + reduce(52), /* id, reduce: ID */ + + }, + }, + actionRow{ // S65 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(48), /* {, reduce: Port */ + reduce(48), /* }, reduce: Port */ + nil, /* empty */ + nil, /* strict */ + reduce(48), /* graphx, reduce: Port */ + nil, /* digraph */ + reduce(48), /* ;, reduce: Port */ + reduce(48), /* --, reduce: Port */ + reduce(48), /* ->, reduce: Port */ + reduce(48), /* node, reduce: Port */ + reduce(48), /* edge, reduce: Port */ + reduce(48), /* [, reduce: Port */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(48), /* subgraph, reduce: Port */ + shift(77), /* : */ + reduce(48), /* id, reduce: Port */ + + }, + }, + actionRow{ // S66 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + shift(78), /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + nil, /* id */ + + }, + }, + actionRow{ // S67 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + shift(79), /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + nil, /* id */ + + }, + }, + actionRow{ // S68 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + reduce(39), /* ], reduce: OptSep */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + reduce(39), /* id, reduce: OptSep */ + + }, + }, + actionRow{ // S69 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + reduce(34), /* ], reduce: AList */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + reduce(34), /* id, reduce: AList */ + + }, + }, + actionRow{ // S70 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + reduce(40), /* ], reduce: OptSep */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + reduce(40), /* id, reduce: OptSep */ + + }, + }, + actionRow{ // S71 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(30), /* {, reduce: AttrList */ + reduce(30), /* }, reduce: AttrList */ + nil, /* empty */ + nil, /* strict */ + reduce(30), /* graphx, reduce: AttrList */ + nil, /* digraph */ + reduce(30), /* ;, reduce: AttrList */ + nil, /* -- */ + nil, /* -> */ + reduce(30), /* node, reduce: AttrList */ + reduce(30), /* edge, reduce: AttrList */ + reduce(30), /* [, reduce: AttrList */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(30), /* subgraph, reduce: AttrList */ + nil, /* : */ + reduce(30), /* id, reduce: AttrList */ + + }, + }, + actionRow{ // S72 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + shift(68), /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + reduce(38), /* ], reduce: OptSep */ + shift(70), /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + reduce(38), /* id, reduce: OptSep */ + + }, + }, + actionRow{ // S73 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + shift(82), /* id */ + + }, + }, + actionRow{ // S74 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(25), /* {, reduce: OptEdge */ + reduce(25), /* }, reduce: OptEdge */ + nil, /* empty */ + nil, /* strict */ + reduce(25), /* graphx, reduce: OptEdge */ + nil, /* digraph */ + reduce(25), /* ;, reduce: OptEdge */ + nil, /* -- */ + nil, /* -> */ + reduce(25), /* node, reduce: OptEdge */ + reduce(25), /* edge, reduce: OptEdge */ + reduce(25), /* [, reduce: OptEdge */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(25), /* subgraph, reduce: OptEdge */ + nil, /* : */ + reduce(25), /* id, reduce: OptEdge */ + + }, + }, + actionRow{ // S75 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(21), /* {, reduce: Edge */ + reduce(21), /* }, reduce: Edge */ + nil, /* empty */ + nil, /* strict */ + reduce(21), /* graphx, reduce: Edge */ + nil, /* digraph */ + reduce(21), /* ;, reduce: Edge */ + nil, /* -- */ + nil, /* -> */ + reduce(21), /* node, reduce: Edge */ + reduce(21), /* edge, reduce: Edge */ + reduce(21), /* [, reduce: Edge */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(21), /* subgraph, reduce: Edge */ + nil, /* : */ + reduce(21), /* id, reduce: Edge */ + + }, + }, + actionRow{ // S76 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(43), /* {, reduce: OptSubgraphID */ + reduce(10), /* }, reduce: OptStmtList */ + nil, /* empty */ + nil, /* strict */ + shift(14), /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + shift(25), /* node */ + shift(26), /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + shift(29), /* subgraph */ + nil, /* : */ + shift(30), /* id */ + + }, + }, + actionRow{ // S77 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + shift(85), /* id */ + + }, + }, + actionRow{ // S78 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(42), /* {, reduce: Subgraph */ + reduce(42), /* }, reduce: Subgraph */ + nil, /* empty */ + nil, /* strict */ + reduce(42), /* graphx, reduce: Subgraph */ + nil, /* digraph */ + reduce(42), /* ;, reduce: Subgraph */ + reduce(42), /* --, reduce: Subgraph */ + reduce(42), /* ->, reduce: Subgraph */ + reduce(42), /* node, reduce: Subgraph */ + reduce(42), /* edge, reduce: Subgraph */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(42), /* subgraph, reduce: Subgraph */ + nil, /* : */ + reduce(42), /* id, reduce: Subgraph */ + + }, + }, + actionRow{ // S79 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(31), /* {, reduce: AttrList */ + reduce(31), /* }, reduce: AttrList */ + nil, /* empty */ + nil, /* strict */ + reduce(31), /* graphx, reduce: AttrList */ + nil, /* digraph */ + reduce(31), /* ;, reduce: AttrList */ + nil, /* -- */ + nil, /* -> */ + reduce(31), /* node, reduce: AttrList */ + reduce(31), /* edge, reduce: AttrList */ + reduce(31), /* [, reduce: AttrList */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(31), /* subgraph, reduce: AttrList */ + nil, /* : */ + reduce(31), /* id, reduce: AttrList */ + + }, + }, + actionRow{ // S80 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + reduce(35), /* ], reduce: AList */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + reduce(35), /* id, reduce: AList */ + + }, + }, + actionRow{ // S81 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + reduce(41), /* ;, reduce: Attr */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + reduce(41), /* ], reduce: Attr */ + reduce(41), /* ,, reduce: Attr */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + reduce(41), /* id, reduce: Attr */ + + }, + }, + actionRow{ // S82 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + reduce(52), /* ;, reduce: ID */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + reduce(52), /* ], reduce: ID */ + reduce(52), /* ,, reduce: ID */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + reduce(52), /* id, reduce: ID */ + + }, + }, + actionRow{ // S83 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + shift(86), /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + nil, /* id */ + + }, + }, + actionRow{ // S84 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(49), /* {, reduce: Port */ + reduce(49), /* }, reduce: Port */ + nil, /* empty */ + nil, /* strict */ + reduce(49), /* graphx, reduce: Port */ + nil, /* digraph */ + reduce(49), /* ;, reduce: Port */ + reduce(49), /* --, reduce: Port */ + reduce(49), /* ->, reduce: Port */ + reduce(49), /* node, reduce: Port */ + reduce(49), /* edge, reduce: Port */ + reduce(49), /* [, reduce: Port */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(49), /* subgraph, reduce: Port */ + nil, /* : */ + reduce(49), /* id, reduce: Port */ + + }, + }, + actionRow{ // S85 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(52), /* {, reduce: ID */ + reduce(52), /* }, reduce: ID */ + nil, /* empty */ + nil, /* strict */ + reduce(52), /* graphx, reduce: ID */ + nil, /* digraph */ + reduce(52), /* ;, reduce: ID */ + reduce(52), /* --, reduce: ID */ + reduce(52), /* ->, reduce: ID */ + reduce(52), /* node, reduce: ID */ + reduce(52), /* edge, reduce: ID */ + reduce(52), /* [, reduce: ID */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(52), /* subgraph, reduce: ID */ + nil, /* : */ + reduce(52), /* id, reduce: ID */ + + }, + }, + actionRow{ // S86 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(42), /* {, reduce: Subgraph */ + reduce(42), /* }, reduce: Subgraph */ + nil, /* empty */ + nil, /* strict */ + reduce(42), /* graphx, reduce: Subgraph */ + nil, /* digraph */ + reduce(42), /* ;, reduce: Subgraph */ + reduce(42), /* --, reduce: Subgraph */ + reduce(42), /* ->, reduce: Subgraph */ + reduce(42), /* node, reduce: Subgraph */ + reduce(42), /* edge, reduce: Subgraph */ + reduce(42), /* [, reduce: Subgraph */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(42), /* subgraph, reduce: Subgraph */ + nil, /* : */ + reduce(42), /* id, reduce: Subgraph */ + + }, + }, +} diff --git a/graph/formats/dot/internal/parser/gototable.go b/graph/formats/dot/internal/parser/gototable.go new file mode 100644 index 00000000..b4539576 --- /dev/null +++ b/graph/formats/dot/internal/parser/gototable.go @@ -0,0 +1,2894 @@ +// Code generated by gocc; DO NOT EDIT. + +// This file is dual licensed under CC0 and The gonum license. +// +// Copyright ©2017 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// +// Copyright ©2017 Robin Eklind. +// This file is made available under a Creative Commons CC0 1.0 +// Universal Public Domain Dedication. + +package parser + +const numNTSymbols = 30 + +type ( + gotoTable [numStates]gotoRow + gotoRow [numNTSymbols]int +) + +var gotoTab = gotoTable{ + gotoRow{ // S0 + -1, // S' + 1, // File + 2, // Graph + 3, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S1 + -1, // S' + -1, // File + 5, // Graph + 3, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S2 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S3 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + 6, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S4 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S5 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S6 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + 10, // ID + 9, // OptID + + }, + gotoRow{ // S7 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S8 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S9 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S10 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S11 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S12 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + 15, // StmtList + 13, // OptStmtList + 16, // Stmt + -1, // OptSemi + 17, // NodeStmt + 18, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + 19, // AttrStmt + 24, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + 20, // Attr + 21, // Subgraph + 28, // OptSubgraphID + 23, // Vertex + 22, // Node + -1, // Port + -1, // OptPort + 27, // ID + -1, // OptID + + }, + gotoRow{ // S13 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S14 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S15 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + 32, // Stmt + -1, // OptSemi + 17, // NodeStmt + 18, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + 19, // AttrStmt + 24, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + 20, // Attr + 21, // Subgraph + 28, // OptSubgraphID + 23, // Vertex + 22, // Node + -1, // Port + -1, // OptPort + 27, // ID + -1, // OptID + + }, + gotoRow{ // S16 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + 33, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S17 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S18 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S19 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S20 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S21 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S22 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + 36, // AttrList + 35, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S23 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + 38, // Edge + 39, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S24 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + 42, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S25 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S26 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S27 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + 45, // Port + 44, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S28 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S29 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + 10, // ID + 48, // OptID + + }, + gotoRow{ // S30 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S31 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S32 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + 49, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S33 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S34 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S35 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S36 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S37 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + 53, // AList + 52, // OptAList + -1, // OptSep + 51, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + 54, // ID + -1, // OptID + + }, + gotoRow{ // S38 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + 36, // AttrList + 56, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S39 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + 57, // Subgraph + 61, // OptSubgraphID + 59, // Vertex + 58, // Node + -1, // Port + -1, // OptPort + 60, // ID + -1, // OptID + + }, + gotoRow{ // S40 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S41 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S42 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S43 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + 63, // ID + -1, // OptID + + }, + gotoRow{ // S44 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S45 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S46 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + 65, // ID + -1, // OptID + + }, + gotoRow{ // S47 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + 15, // StmtList + 66, // OptStmtList + 16, // Stmt + -1, // OptSemi + 17, // NodeStmt + 18, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + 19, // AttrStmt + 24, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + 20, // Attr + 21, // Subgraph + 28, // OptSubgraphID + 23, // Vertex + 22, // Node + -1, // Port + -1, // OptPort + 27, // ID + -1, // OptID + + }, + gotoRow{ // S48 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S49 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S50 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + 53, // AList + 67, // OptAList + -1, // OptSep + 51, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + 54, // ID + -1, // OptID + + }, + gotoRow{ // S51 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + 69, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S52 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S53 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + 72, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + 54, // ID + -1, // OptID + + }, + gotoRow{ // S54 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S55 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S56 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S57 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S58 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S59 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + 74, // Edge + 39, // DirectedEdge + 75, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S60 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + 45, // Port + 44, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S61 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S62 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S63 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S64 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S65 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S66 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S67 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S68 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S69 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S70 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S71 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S72 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + 80, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S73 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + 81, // ID + -1, // OptID + + }, + gotoRow{ // S74 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S75 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S76 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + 15, // StmtList + 83, // OptStmtList + 16, // Stmt + -1, // OptSemi + 17, // NodeStmt + 18, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + 19, // AttrStmt + 24, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + 20, // Attr + 21, // Subgraph + 28, // OptSubgraphID + 23, // Vertex + 22, // Node + -1, // Port + -1, // OptPort + 27, // ID + -1, // OptID + + }, + gotoRow{ // S77 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + 84, // ID + -1, // OptID + + }, + gotoRow{ // S78 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S79 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S80 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S81 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S82 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S83 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S84 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S85 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, + gotoRow{ // S86 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + + }, +} diff --git a/graph/formats/dot/internal/parser/parser.go b/graph/formats/dot/internal/parser/parser.go new file mode 100644 index 00000000..b63555cf --- /dev/null +++ b/graph/formats/dot/internal/parser/parser.go @@ -0,0 +1,222 @@ +// Code generated by gocc; DO NOT EDIT. + +// This file is dual licensed under CC0 and The gonum license. +// +// Copyright ©2017 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// +// Copyright ©2017 Robin Eklind. +// This file is made available under a Creative Commons CC0 1.0 +// Universal Public Domain Dedication. + +package parser + +import ( + "bytes" + "fmt" + + parseError "github.com/gonum/graph/formats/dot/internal/errors" + "github.com/gonum/graph/formats/dot/internal/token" +) + +const ( + numProductions = 55 + numStates = 87 + numSymbols = 50 +) + +// Stack + +type stack struct { + state []int + attrib []Attrib +} + +const iNITIAL_STACK_SIZE = 100 + +func newStack() *stack { + return &stack{state: make([]int, 0, iNITIAL_STACK_SIZE), + attrib: make([]Attrib, 0, iNITIAL_STACK_SIZE), + } +} + +func (this *stack) reset() { + this.state = this.state[0:0] + this.attrib = this.attrib[0:0] +} + +func (this *stack) push(s int, a Attrib) { + this.state = append(this.state, s) + this.attrib = append(this.attrib, a) +} + +func (this *stack) top() int { + return this.state[len(this.state)-1] +} + +func (this *stack) peek(pos int) int { + return this.state[pos] +} + +func (this *stack) topIndex() int { + return len(this.state) - 1 +} + +func (this *stack) popN(items int) []Attrib { + lo, hi := len(this.state)-items, len(this.state) + + attrib := this.attrib[lo:hi] + + this.state = this.state[:lo] + this.attrib = this.attrib[:lo] + + return attrib +} + +func (S *stack) String() string { + w := new(bytes.Buffer) + fmt.Fprintf(w, "stack:\n") + for i, st := range S.state { + fmt.Fprintf(w, "\t%d:%d , ", i, st) + if S.attrib[i] == nil { + fmt.Fprintf(w, "nil") + } else { + fmt.Fprintf(w, "%v", S.attrib[i]) + } + fmt.Fprintf(w, "\n") + } + return w.String() +} + +// Parser + +type Parser struct { + stack *stack + nextToken *token.Token + pos int +} + +type Scanner interface { + Scan() (tok *token.Token) +} + +func NewParser() *Parser { + p := &Parser{stack: newStack()} + p.Reset() + return p +} + +func (P *Parser) Reset() { + P.stack.reset() + P.stack.push(0, nil) +} + +func (P *Parser) Error(err error, scanner Scanner) (recovered bool, errorAttrib *parseError.Error) { + errorAttrib = &parseError.Error{ + Err: err, + ErrorToken: P.nextToken, + ErrorSymbols: P.popNonRecoveryStates(), + ExpectedTokens: make([]string, 0, 8), + } + for t, action := range actionTab[P.stack.top()].actions { + if action != nil { + errorAttrib.ExpectedTokens = append(errorAttrib.ExpectedTokens, token.TokMap.Id(token.Type(t))) + } + } + + if action := actionTab[P.stack.top()].actions[token.TokMap.Type("error")]; action != nil { + P.stack.push(int(action.(shift)), errorAttrib) // action can only be shift + } else { + return + } + + if action := actionTab[P.stack.top()].actions[P.nextToken.Type]; action != nil { + recovered = true + } + for !recovered && P.nextToken.Type != token.EOF { + P.nextToken = scanner.Scan() + if action := actionTab[P.stack.top()].actions[P.nextToken.Type]; action != nil { + recovered = true + } + } + + return +} + +func (P *Parser) popNonRecoveryStates() (removedAttribs []parseError.ErrorSymbol) { + if rs, ok := P.firstRecoveryState(); ok { + errorSymbols := P.stack.popN(int(P.stack.topIndex() - rs)) + removedAttribs = make([]parseError.ErrorSymbol, len(errorSymbols)) + for i, e := range errorSymbols { + removedAttribs[i] = e + } + } else { + removedAttribs = []parseError.ErrorSymbol{} + } + return +} + +// recoveryState points to the highest state on the stack, which can recover +func (P *Parser) firstRecoveryState() (recoveryState int, canRecover bool) { + recoveryState, canRecover = P.stack.topIndex(), actionTab[P.stack.top()].canRecover + for recoveryState > 0 && !canRecover { + recoveryState-- + canRecover = actionTab[P.stack.peek(recoveryState)].canRecover + } + return +} + +func (P *Parser) newError(err error) error { + e := &parseError.Error{ + Err: err, + StackTop: P.stack.top(), + ErrorToken: P.nextToken, + } + actRow := actionTab[P.stack.top()] + for i, t := range actRow.actions { + if t != nil { + e.ExpectedTokens = append(e.ExpectedTokens, token.TokMap.Id(token.Type(i))) + } + } + return e +} + +func (this *Parser) Parse(scanner Scanner) (res interface{}, err error) { + this.Reset() + this.nextToken = scanner.Scan() + for acc := false; !acc; { + action := actionTab[this.stack.top()].actions[this.nextToken.Type] + if action == nil { + if recovered, errAttrib := this.Error(nil, scanner); !recovered { + this.nextToken = errAttrib.ErrorToken + return nil, this.newError(nil) + } + if action = actionTab[this.stack.top()].actions[this.nextToken.Type]; action == nil { + panic("Error recovery led to invalid action") + } + } + + // fmt.Printf("S%d %s %s\n", this.stack.top(), token.TokMap.TokenString(this.nextToken), action.String()) + + switch act := action.(type) { + case accept: + res = this.stack.popN(1)[0] + acc = true + case shift: + this.stack.push(int(act), this.nextToken) + this.nextToken = scanner.Scan() + case reduce: + prod := productionsTable[int(act)] + attrib, err := prod.ReduceFunc(this.stack.popN(prod.NumSymbols)) + if err != nil { + return nil, this.newError(err) + } else { + this.stack.push(gotoTab[this.stack.top()][prod.NTType], attrib) + } + default: + panic("unknown action: " + action.String()) + } + } + return res, nil +} diff --git a/graph/formats/dot/internal/parser/parser_test.go b/graph/formats/dot/internal/parser/parser_test.go new file mode 100644 index 00000000..6b37df76 --- /dev/null +++ b/graph/formats/dot/internal/parser/parser_test.go @@ -0,0 +1,114 @@ +// This file is dual licensed under CC0 and The gonum license. +// +// Copyright ©2017 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// +// Copyright ©2017 Robin Eklind. +// This file is made available under a Creative Commons CC0 1.0 +// Universal Public Domain Dedication. + +package parser_test + +import ( + "bytes" + "io/ioutil" + "testing" + + "github.com/gonum/graph/formats/dot" +) + +func TestParseFile(t *testing.T) { + golden := []struct { + in string + out string + }{ + {in: "../testdata/empty.dot"}, + {in: "../testdata/graph.dot"}, + {in: "../testdata/digraph.dot"}, + {in: "../testdata/strict.dot"}, + {in: "../testdata/multi.dot"}, + {in: "../testdata/named_graph.dot"}, + {in: "../testdata/node_stmt.dot"}, + {in: "../testdata/edge_stmt.dot"}, + {in: "../testdata/attr_stmt.dot"}, + {in: "../testdata/attr.dot"}, + { + in: "../testdata/subgraph.dot", + out: "../testdata/subgraph.golden", + }, + { + in: "../testdata/semi.dot", + out: "../testdata/semi.golden", + }, + { + in: "../testdata/empty_attr.dot", + out: "../testdata/empty_attr.golden", + }, + { + in: "../testdata/attr_lists.dot", + out: "../testdata/attr_lists.golden", + }, + { + in: "../testdata/attr_sep.dot", + out: "../testdata/attr_sep.golden", + }, + {in: "../testdata/subgraph_vertex.dot"}, + { + in: "../testdata/port.dot", + out: "../testdata/port.golden", + }, + {in: "../testdata/quoted_id.dot"}, + { + in: "../testdata/backslash_newline_id.dot", + out: "../testdata/backslash_newline_id.golden", + }, + } + for _, g := range golden { + file, err := dot.ParseFile(g.in) + if err != nil { + t.Errorf("%q: unable to parse file; %v", g.in, err) + continue + } + // If no output path is specified, the input is already golden. + out := g.in + if len(g.out) > 0 { + out = g.out + } + buf, err := ioutil.ReadFile(out) + if err != nil { + t.Errorf("%q: unable to read file; %v", g.in, err) + continue + } + got := file.String() + // Remove trailing newline. + want := string(bytes.TrimSpace(buf)) + if got != want { + t.Errorf("%q: graph mismatch; expected `%s`, got `%s`", g.in, want, got) + } + } +} + +func TestParseError(t *testing.T) { + golden := []struct { + path string + want string + }{ + { + path: "../testdata/error.dot", + want: `Error in S30: INVALID(0,~), Pos(offset=13, line=2, column=7), expected one of: { } graphx ; -- -> node edge [ = subgraph : id `, + }, + } + for _, g := range golden { + _, err := dot.ParseFile(g.path) + if err == nil { + t.Errorf("%q: expected error, got nil", g.path) + continue + } + got := err.Error() + if got != g.want { + t.Errorf("%q: error mismatch; expected `%v`, got `%v`", g.path, g.want, got) + continue + } + } +} diff --git a/graph/formats/dot/internal/parser/productionstable.go b/graph/formats/dot/internal/parser/productionstable.go new file mode 100644 index 00000000..5b05144c --- /dev/null +++ b/graph/formats/dot/internal/parser/productionstable.go @@ -0,0 +1,586 @@ +// Code generated by gocc; DO NOT EDIT. + +// This file is dual licensed under CC0 and The gonum license. +// +// Copyright ©2017 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// +// Copyright ©2017 Robin Eklind. +// This file is made available under a Creative Commons CC0 1.0 +// Universal Public Domain Dedication. + +package parser + +import ( + "github.com/gonum/graph/formats/dot/ast" + "github.com/gonum/graph/formats/dot/internal/astx" +) + +type ( + //TODO: change type and variable names to be consistent with other tables + ProdTab [numProductions]ProdTabEntry + ProdTabEntry struct { + String string + Id string + NTType int + Index int + NumSymbols int + ReduceFunc func([]Attrib) (Attrib, error) + } + Attrib interface { + } +) + +var productionsTable = ProdTab{ + ProdTabEntry{ + String: `S' : File << >>`, + Id: "S'", + NTType: 0, + Index: 0, + NumSymbols: 1, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return X[0], nil + }, + }, + ProdTabEntry{ + String: `File : Graph << astx.NewFile(X[0]) >>`, + Id: "File", + NTType: 1, + Index: 1, + NumSymbols: 1, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return astx.NewFile(X[0]) + }, + }, + ProdTabEntry{ + String: `File : File Graph << astx.AppendGraph(X[0], X[1]) >>`, + Id: "File", + NTType: 1, + Index: 2, + NumSymbols: 2, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return astx.AppendGraph(X[0], X[1]) + }, + }, + ProdTabEntry{ + String: `Graph : OptStrict DirectedGraph OptID "{" OptStmtList "}" << astx.NewGraph(X[0], X[1], X[2], X[4]) >>`, + Id: "Graph", + NTType: 2, + Index: 3, + NumSymbols: 6, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return astx.NewGraph(X[0], X[1], X[2], X[4]) + }, + }, + ProdTabEntry{ + String: `OptStrict : empty << false, nil >>`, + Id: "OptStrict", + NTType: 3, + Index: 4, + NumSymbols: 0, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return false, nil + }, + }, + ProdTabEntry{ + String: `OptStrict : strict << true, nil >>`, + Id: "OptStrict", + NTType: 3, + Index: 5, + NumSymbols: 1, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return true, nil + }, + }, + ProdTabEntry{ + String: `DirectedGraph : graphx << false, nil >>`, + Id: "DirectedGraph", + NTType: 4, + Index: 6, + NumSymbols: 1, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return false, nil + }, + }, + ProdTabEntry{ + String: `DirectedGraph : digraph << true, nil >>`, + Id: "DirectedGraph", + NTType: 4, + Index: 7, + NumSymbols: 1, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return true, nil + }, + }, + ProdTabEntry{ + String: `StmtList : Stmt OptSemi << astx.NewStmtList(X[0]) >>`, + Id: "StmtList", + NTType: 5, + Index: 8, + NumSymbols: 2, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return astx.NewStmtList(X[0]) + }, + }, + ProdTabEntry{ + String: `StmtList : StmtList Stmt OptSemi << astx.AppendStmt(X[0], X[1]) >>`, + Id: "StmtList", + NTType: 5, + Index: 9, + NumSymbols: 3, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return astx.AppendStmt(X[0], X[1]) + }, + }, + ProdTabEntry{ + String: `OptStmtList : empty << >>`, + Id: "OptStmtList", + NTType: 6, + Index: 10, + NumSymbols: 0, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return nil, nil + }, + }, + ProdTabEntry{ + String: `OptStmtList : StmtList << >>`, + Id: "OptStmtList", + NTType: 6, + Index: 11, + NumSymbols: 1, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return X[0], nil + }, + }, + ProdTabEntry{ + String: `Stmt : NodeStmt << >>`, + Id: "Stmt", + NTType: 7, + Index: 12, + NumSymbols: 1, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return X[0], nil + }, + }, + ProdTabEntry{ + String: `Stmt : EdgeStmt << >>`, + Id: "Stmt", + NTType: 7, + Index: 13, + NumSymbols: 1, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return X[0], nil + }, + }, + ProdTabEntry{ + String: `Stmt : AttrStmt << >>`, + Id: "Stmt", + NTType: 7, + Index: 14, + NumSymbols: 1, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return X[0], nil + }, + }, + ProdTabEntry{ + String: `Stmt : Attr << >>`, + Id: "Stmt", + NTType: 7, + Index: 15, + NumSymbols: 1, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return X[0], nil + }, + }, + ProdTabEntry{ + String: `Stmt : Subgraph << >>`, + Id: "Stmt", + NTType: 7, + Index: 16, + NumSymbols: 1, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return X[0], nil + }, + }, + ProdTabEntry{ + String: `OptSemi : empty << >>`, + Id: "OptSemi", + NTType: 8, + Index: 17, + NumSymbols: 0, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return nil, nil + }, + }, + ProdTabEntry{ + String: `OptSemi : ";" << >>`, + Id: "OptSemi", + NTType: 8, + Index: 18, + NumSymbols: 1, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return X[0], nil + }, + }, + ProdTabEntry{ + String: `NodeStmt : Node OptAttrList << astx.NewNodeStmt(X[0], X[1]) >>`, + Id: "NodeStmt", + NTType: 9, + Index: 19, + NumSymbols: 2, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return astx.NewNodeStmt(X[0], X[1]) + }, + }, + ProdTabEntry{ + String: `EdgeStmt : Vertex Edge OptAttrList << astx.NewEdgeStmt(X[0], X[1], X[2]) >>`, + Id: "EdgeStmt", + NTType: 10, + Index: 20, + NumSymbols: 3, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return astx.NewEdgeStmt(X[0], X[1], X[2]) + }, + }, + ProdTabEntry{ + String: `Edge : DirectedEdge Vertex OptEdge << astx.NewEdge(X[0], X[1], X[2]) >>`, + Id: "Edge", + NTType: 11, + Index: 21, + NumSymbols: 3, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return astx.NewEdge(X[0], X[1], X[2]) + }, + }, + ProdTabEntry{ + String: `DirectedEdge : "--" << false, nil >>`, + Id: "DirectedEdge", + NTType: 12, + Index: 22, + NumSymbols: 1, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return false, nil + }, + }, + ProdTabEntry{ + String: `DirectedEdge : "->" << true, nil >>`, + Id: "DirectedEdge", + NTType: 12, + Index: 23, + NumSymbols: 1, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return true, nil + }, + }, + ProdTabEntry{ + String: `OptEdge : empty << >>`, + Id: "OptEdge", + NTType: 13, + Index: 24, + NumSymbols: 0, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return nil, nil + }, + }, + ProdTabEntry{ + String: `OptEdge : Edge << >>`, + Id: "OptEdge", + NTType: 13, + Index: 25, + NumSymbols: 1, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return X[0], nil + }, + }, + ProdTabEntry{ + String: `AttrStmt : Component AttrList << astx.NewAttrStmt(X[0], X[1]) >>`, + Id: "AttrStmt", + NTType: 14, + Index: 26, + NumSymbols: 2, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return astx.NewAttrStmt(X[0], X[1]) + }, + }, + ProdTabEntry{ + String: `Component : graphx << ast.KindGraph, nil >>`, + Id: "Component", + NTType: 15, + Index: 27, + NumSymbols: 1, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return ast.KindGraph, nil + }, + }, + ProdTabEntry{ + String: `Component : node << ast.KindNode, nil >>`, + Id: "Component", + NTType: 15, + Index: 28, + NumSymbols: 1, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return ast.KindNode, nil + }, + }, + ProdTabEntry{ + String: `Component : edge << ast.KindEdge, nil >>`, + Id: "Component", + NTType: 15, + Index: 29, + NumSymbols: 1, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return ast.KindEdge, nil + }, + }, + ProdTabEntry{ + String: `AttrList : "[" OptAList "]" << X[1], nil >>`, + Id: "AttrList", + NTType: 16, + Index: 30, + NumSymbols: 3, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return X[1], nil + }, + }, + ProdTabEntry{ + String: `AttrList : AttrList "[" OptAList "]" << astx.AppendAttrList(X[0], X[2]) >>`, + Id: "AttrList", + NTType: 16, + Index: 31, + NumSymbols: 4, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return astx.AppendAttrList(X[0], X[2]) + }, + }, + ProdTabEntry{ + String: `OptAttrList : empty << >>`, + Id: "OptAttrList", + NTType: 17, + Index: 32, + NumSymbols: 0, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return nil, nil + }, + }, + ProdTabEntry{ + String: `OptAttrList : AttrList << >>`, + Id: "OptAttrList", + NTType: 17, + Index: 33, + NumSymbols: 1, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return X[0], nil + }, + }, + ProdTabEntry{ + String: `AList : Attr OptSep << astx.NewAttrList(X[0]) >>`, + Id: "AList", + NTType: 18, + Index: 34, + NumSymbols: 2, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return astx.NewAttrList(X[0]) + }, + }, + ProdTabEntry{ + String: `AList : AList Attr OptSep << astx.AppendAttr(X[0], X[1]) >>`, + Id: "AList", + NTType: 18, + Index: 35, + NumSymbols: 3, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return astx.AppendAttr(X[0], X[1]) + }, + }, + ProdTabEntry{ + String: `OptAList : empty << >>`, + Id: "OptAList", + NTType: 19, + Index: 36, + NumSymbols: 0, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return nil, nil + }, + }, + ProdTabEntry{ + String: `OptAList : AList << >>`, + Id: "OptAList", + NTType: 19, + Index: 37, + NumSymbols: 1, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return X[0], nil + }, + }, + ProdTabEntry{ + String: `OptSep : empty << >>`, + Id: "OptSep", + NTType: 20, + Index: 38, + NumSymbols: 0, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return nil, nil + }, + }, + ProdTabEntry{ + String: `OptSep : ";" << >>`, + Id: "OptSep", + NTType: 20, + Index: 39, + NumSymbols: 1, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return X[0], nil + }, + }, + ProdTabEntry{ + String: `OptSep : "," << >>`, + Id: "OptSep", + NTType: 20, + Index: 40, + NumSymbols: 1, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return X[0], nil + }, + }, + ProdTabEntry{ + String: `Attr : ID "=" ID << astx.NewAttr(X[0], X[2]) >>`, + Id: "Attr", + NTType: 21, + Index: 41, + NumSymbols: 3, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return astx.NewAttr(X[0], X[2]) + }, + }, + ProdTabEntry{ + String: `Subgraph : OptSubgraphID "{" OptStmtList "}" << astx.NewSubgraph(X[0], X[2]) >>`, + Id: "Subgraph", + NTType: 22, + Index: 42, + NumSymbols: 4, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return astx.NewSubgraph(X[0], X[2]) + }, + }, + ProdTabEntry{ + String: `OptSubgraphID : empty << >>`, + Id: "OptSubgraphID", + NTType: 23, + Index: 43, + NumSymbols: 0, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return nil, nil + }, + }, + ProdTabEntry{ + String: `OptSubgraphID : subgraph OptID << X[1], nil >>`, + Id: "OptSubgraphID", + NTType: 23, + Index: 44, + NumSymbols: 2, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return X[1], nil + }, + }, + ProdTabEntry{ + String: `Vertex : Node << >>`, + Id: "Vertex", + NTType: 24, + Index: 45, + NumSymbols: 1, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return X[0], nil + }, + }, + ProdTabEntry{ + String: `Vertex : Subgraph << >>`, + Id: "Vertex", + NTType: 24, + Index: 46, + NumSymbols: 1, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return X[0], nil + }, + }, + ProdTabEntry{ + String: `Node : ID OptPort << astx.NewNode(X[0], X[1]) >>`, + Id: "Node", + NTType: 25, + Index: 47, + NumSymbols: 2, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return astx.NewNode(X[0], X[1]) + }, + }, + ProdTabEntry{ + String: `Port : ":" ID << astx.NewPort(X[1], nil) >>`, + Id: "Port", + NTType: 26, + Index: 48, + NumSymbols: 2, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return astx.NewPort(X[1], nil) + }, + }, + ProdTabEntry{ + String: `Port : ":" ID ":" ID << astx.NewPort(X[1], X[3]) >>`, + Id: "Port", + NTType: 26, + Index: 49, + NumSymbols: 4, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return astx.NewPort(X[1], X[3]) + }, + }, + ProdTabEntry{ + String: `OptPort : empty << >>`, + Id: "OptPort", + NTType: 27, + Index: 50, + NumSymbols: 0, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return nil, nil + }, + }, + ProdTabEntry{ + String: `OptPort : Port << >>`, + Id: "OptPort", + NTType: 27, + Index: 51, + NumSymbols: 1, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return X[0], nil + }, + }, + ProdTabEntry{ + String: `ID : id << astx.NewID(X[0]) >>`, + Id: "ID", + NTType: 28, + Index: 52, + NumSymbols: 1, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return astx.NewID(X[0]) + }, + }, + ProdTabEntry{ + String: `OptID : empty << "", nil >>`, + Id: "OptID", + NTType: 29, + Index: 53, + NumSymbols: 0, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return "", nil + }, + }, + ProdTabEntry{ + String: `OptID : ID << >>`, + Id: "OptID", + NTType: 29, + Index: 54, + NumSymbols: 1, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return X[0], nil + }, + }, +} diff --git a/graph/formats/dot/internal/paste_copyright.bash b/graph/formats/dot/internal/paste_copyright.bash new file mode 100755 index 00000000..83319810 --- /dev/null +++ b/graph/formats/dot/internal/paste_copyright.bash @@ -0,0 +1,17 @@ +#!/usr/bin/env bash + +find . -type f -name '*.go' \ +| xargs sed -i -e "s|// Code generated by gocc; DO NOT EDIT.|\ +// Code generated by gocc; DO NOT EDIT.\n\ +\n\ +// This file is dual licensed under CC0 and The gonum license.\n\ +//\n\ +// Copyright ©2017 The gonum Authors. All rights reserved.\n\ +// Use of this source code is governed by a BSD-style\n\ +// license that can be found in the LICENSE file.\n\ +//\n\ +// Copyright ©2017 Robin Eklind.\n\ +// This file is made available under a Creative Commons CC0 1.0\n\ +// Universal Public Domain Dedication.\n\ +|" + diff --git a/graph/formats/dot/internal/testdata/attr.dot b/graph/formats/dot/internal/testdata/attr.dot new file mode 100644 index 00000000..dabe9734 --- /dev/null +++ b/graph/formats/dot/internal/testdata/attr.dot @@ -0,0 +1,4 @@ +digraph { + bgcolor=transparent + A +} diff --git a/graph/formats/dot/internal/testdata/attr_lists.dot b/graph/formats/dot/internal/testdata/attr_lists.dot new file mode 100644 index 00000000..b71411cd --- /dev/null +++ b/graph/formats/dot/internal/testdata/attr_lists.dot @@ -0,0 +1,3 @@ +digraph { + A [style=filled] [fillcolor=red] +} diff --git a/graph/formats/dot/internal/testdata/attr_lists.golden b/graph/formats/dot/internal/testdata/attr_lists.golden new file mode 100644 index 00000000..14316d93 --- /dev/null +++ b/graph/formats/dot/internal/testdata/attr_lists.golden @@ -0,0 +1,3 @@ +digraph { + A [style=filled fillcolor=red] +} diff --git a/graph/formats/dot/internal/testdata/attr_sep.dot b/graph/formats/dot/internal/testdata/attr_sep.dot new file mode 100644 index 00000000..ef47a120 --- /dev/null +++ b/graph/formats/dot/internal/testdata/attr_sep.dot @@ -0,0 +1,3 @@ +digraph { + A [style=filled, fillcolor=red; color=blue] +} diff --git a/graph/formats/dot/internal/testdata/attr_sep.golden b/graph/formats/dot/internal/testdata/attr_sep.golden new file mode 100644 index 00000000..95bfd972 --- /dev/null +++ b/graph/formats/dot/internal/testdata/attr_sep.golden @@ -0,0 +1,3 @@ +digraph { + A [style=filled fillcolor=red color=blue] +} diff --git a/graph/formats/dot/internal/testdata/attr_stmt.dot b/graph/formats/dot/internal/testdata/attr_stmt.dot new file mode 100644 index 00000000..e87d70a4 --- /dev/null +++ b/graph/formats/dot/internal/testdata/attr_stmt.dot @@ -0,0 +1,6 @@ +digraph { + graph [bgcolor=transparent] + node [style=filled fillcolor=white] + edge [minlen=2] + A -> B +} diff --git a/graph/formats/dot/internal/testdata/backslash_newline_id.dot b/graph/formats/dot/internal/testdata/backslash_newline_id.dot new file mode 100644 index 00000000..81051384 --- /dev/null +++ b/graph/formats/dot/internal/testdata/backslash_newline_id.dot @@ -0,0 +1,4 @@ +digraph { + A [name="hello \ +world"] +} diff --git a/graph/formats/dot/internal/testdata/backslash_newline_id.golden b/graph/formats/dot/internal/testdata/backslash_newline_id.golden new file mode 100644 index 00000000..449c3a3e --- /dev/null +++ b/graph/formats/dot/internal/testdata/backslash_newline_id.golden @@ -0,0 +1,3 @@ +digraph { + A [name="hello world"] +} diff --git a/graph/formats/dot/internal/testdata/digraph.dot b/graph/formats/dot/internal/testdata/digraph.dot new file mode 100644 index 00000000..1063ebfa --- /dev/null +++ b/graph/formats/dot/internal/testdata/digraph.dot @@ -0,0 +1,3 @@ +digraph { + A -> B +} diff --git a/graph/formats/dot/internal/testdata/edge_stmt.dot b/graph/formats/dot/internal/testdata/edge_stmt.dot new file mode 100644 index 00000000..019d23d7 --- /dev/null +++ b/graph/formats/dot/internal/testdata/edge_stmt.dot @@ -0,0 +1,4 @@ +digraph { + A -> B -> C + D -> E [color=red minlen=2] +} diff --git a/graph/formats/dot/internal/testdata/empty.dot b/graph/formats/dot/internal/testdata/empty.dot new file mode 100644 index 00000000..418b7bca --- /dev/null +++ b/graph/formats/dot/internal/testdata/empty.dot @@ -0,0 +1,2 @@ +graph { +} diff --git a/graph/formats/dot/internal/testdata/empty_attr.dot b/graph/formats/dot/internal/testdata/empty_attr.dot new file mode 100644 index 00000000..b92d60e5 --- /dev/null +++ b/graph/formats/dot/internal/testdata/empty_attr.dot @@ -0,0 +1,3 @@ +digraph { + A [] +} diff --git a/graph/formats/dot/internal/testdata/empty_attr.golden b/graph/formats/dot/internal/testdata/empty_attr.golden new file mode 100644 index 00000000..369e4495 --- /dev/null +++ b/graph/formats/dot/internal/testdata/empty_attr.golden @@ -0,0 +1,3 @@ +digraph { + A +} diff --git a/graph/formats/dot/internal/testdata/error.dot b/graph/formats/dot/internal/testdata/error.dot new file mode 100644 index 00000000..ad77f61f --- /dev/null +++ b/graph/formats/dot/internal/testdata/error.dot @@ -0,0 +1,3 @@ +digraph { + A ~ B +} diff --git a/graph/formats/dot/internal/testdata/graph.dot b/graph/formats/dot/internal/testdata/graph.dot new file mode 100644 index 00000000..0524c144 --- /dev/null +++ b/graph/formats/dot/internal/testdata/graph.dot @@ -0,0 +1,3 @@ +graph { + A -- B +} diff --git a/graph/formats/dot/internal/testdata/multi.dot b/graph/formats/dot/internal/testdata/multi.dot new file mode 100644 index 00000000..21945188 --- /dev/null +++ b/graph/formats/dot/internal/testdata/multi.dot @@ -0,0 +1,6 @@ +digraph { + A -> B +} +digraph { + C -> D +} diff --git a/graph/formats/dot/internal/testdata/named_graph.dot b/graph/formats/dot/internal/testdata/named_graph.dot new file mode 100644 index 00000000..3fc680da --- /dev/null +++ b/graph/formats/dot/internal/testdata/named_graph.dot @@ -0,0 +1,3 @@ +graph G { + A +} diff --git a/graph/formats/dot/internal/testdata/node_stmt.dot b/graph/formats/dot/internal/testdata/node_stmt.dot new file mode 100644 index 00000000..369e4495 --- /dev/null +++ b/graph/formats/dot/internal/testdata/node_stmt.dot @@ -0,0 +1,3 @@ +digraph { + A +} diff --git a/graph/formats/dot/internal/testdata/port.dot b/graph/formats/dot/internal/testdata/port.dot new file mode 100644 index 00000000..1d4bf415 --- /dev/null +++ b/graph/formats/dot/internal/testdata/port.dot @@ -0,0 +1,11 @@ +digraph { + A:ne -> B:sw + C:foo -> D:bar:se + E:_ -> F + G:n + H:e + I:s + J:w + K:nw + L:c +} diff --git a/graph/formats/dot/internal/testdata/port.golden b/graph/formats/dot/internal/testdata/port.golden new file mode 100644 index 00000000..18ca1f30 --- /dev/null +++ b/graph/formats/dot/internal/testdata/port.golden @@ -0,0 +1,11 @@ +digraph { + A:ne -> B:sw + C:foo -> D:bar:se + E -> F + G:n + H:e + I:s + J:w + K:nw + L:c +} diff --git a/graph/formats/dot/internal/testdata/quoted_id.dot b/graph/formats/dot/internal/testdata/quoted_id.dot new file mode 100644 index 00000000..0c3b5652 --- /dev/null +++ b/graph/formats/dot/internal/testdata/quoted_id.dot @@ -0,0 +1,3 @@ +digraph { + "A" -> "B" ["color"="red"] +} diff --git a/graph/formats/dot/internal/testdata/semi.dot b/graph/formats/dot/internal/testdata/semi.dot new file mode 100644 index 00000000..c8bcbf34 --- /dev/null +++ b/graph/formats/dot/internal/testdata/semi.dot @@ -0,0 +1,3 @@ +digraph { + A -> B; C +} diff --git a/graph/formats/dot/internal/testdata/semi.golden b/graph/formats/dot/internal/testdata/semi.golden new file mode 100644 index 00000000..61bca268 --- /dev/null +++ b/graph/formats/dot/internal/testdata/semi.golden @@ -0,0 +1,4 @@ +digraph { + A -> B + C +} diff --git a/graph/formats/dot/internal/testdata/strict.dot b/graph/formats/dot/internal/testdata/strict.dot new file mode 100644 index 00000000..bf222ad9 --- /dev/null +++ b/graph/formats/dot/internal/testdata/strict.dot @@ -0,0 +1,4 @@ +strict digraph { + A -> B + A -> B +} diff --git a/graph/formats/dot/internal/testdata/subgraph.dot b/graph/formats/dot/internal/testdata/subgraph.dot new file mode 100644 index 00000000..a03667dd --- /dev/null +++ b/graph/formats/dot/internal/testdata/subgraph.dot @@ -0,0 +1,5 @@ +digraph { + {A} + subgraph {B} + subgraph S {C} +} diff --git a/graph/formats/dot/internal/testdata/subgraph.golden b/graph/formats/dot/internal/testdata/subgraph.golden new file mode 100644 index 00000000..0cd6fe72 --- /dev/null +++ b/graph/formats/dot/internal/testdata/subgraph.golden @@ -0,0 +1,5 @@ +digraph { + {A} + {B} + subgraph S {C} +} diff --git a/graph/formats/dot/internal/testdata/subgraph_vertex.dot b/graph/formats/dot/internal/testdata/subgraph_vertex.dot new file mode 100644 index 00000000..b414ad84 --- /dev/null +++ b/graph/formats/dot/internal/testdata/subgraph_vertex.dot @@ -0,0 +1,3 @@ +digraph { + {A B} -> C +} diff --git a/graph/formats/dot/internal/token/token.go b/graph/formats/dot/internal/token/token.go new file mode 100644 index 00000000..31db6035 --- /dev/null +++ b/graph/formats/dot/internal/token/token.go @@ -0,0 +1,116 @@ +// Code generated by gocc; DO NOT EDIT. + +// This file is dual licensed under CC0 and The gonum license. +// +// Copyright ©2017 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// +// Copyright ©2017 Robin Eklind. +// This file is made available under a Creative Commons CC0 1.0 +// Universal Public Domain Dedication. + +package token + +import ( + "fmt" +) + +type Token struct { + Type + Lit []byte + Pos +} + +type Type int + +const ( + INVALID Type = iota + EOF +) + +type Pos struct { + Offset int + Line int + Column int +} + +func (this Pos) String() string { + return fmt.Sprintf("Pos(offset=%d, line=%d, column=%d)", this.Offset, this.Line, this.Column) +} + +type TokenMap struct { + typeMap []string + idMap map[string]Type +} + +func (this TokenMap) Id(tok Type) string { + if int(tok) < len(this.typeMap) { + return this.typeMap[tok] + } + return "unknown" +} + +func (this TokenMap) Type(tok string) Type { + if typ, exist := this.idMap[tok]; exist { + return typ + } + return INVALID +} + +func (this TokenMap) TokenString(tok *Token) string { + //TODO: refactor to print pos & token string properly + return fmt.Sprintf("%s(%d,%s)", this.Id(tok.Type), tok.Type, tok.Lit) +} + +func (this TokenMap) StringType(typ Type) string { + return fmt.Sprintf("%s(%d)", this.Id(typ), typ) +} + +var TokMap = TokenMap{ + typeMap: []string{ + "INVALID", + "$", + "{", + "}", + "empty", + "strict", + "graphx", + "digraph", + ";", + "--", + "->", + "node", + "edge", + "[", + "]", + ",", + "=", + "subgraph", + ":", + "id", + }, + + idMap: map[string]Type{ + "INVALID": 0, + "$": 1, + "{": 2, + "}": 3, + "empty": 4, + "strict": 5, + "graphx": 6, + "digraph": 7, + ";": 8, + "--": 9, + "->": 10, + "node": 11, + "edge": 12, + "[": 13, + "]": 14, + ",": 15, + "=": 16, + "subgraph": 17, + ":": 18, + "id": 19, + }, +} diff --git a/graph/formats/dot/internal/util/litconv.go b/graph/formats/dot/internal/util/litconv.go new file mode 100644 index 00000000..ed6e348b --- /dev/null +++ b/graph/formats/dot/internal/util/litconv.go @@ -0,0 +1,118 @@ +// Code generated by gocc; DO NOT EDIT. + +// This file is dual licensed under CC0 and The gonum license. +// +// Copyright ©2017 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// +// Copyright ©2017 Robin Eklind. +// This file is made available under a Creative Commons CC0 1.0 +// Universal Public Domain Dedication. + +package util + +import ( + "fmt" + "strconv" + "unicode" + "unicode/utf8" +) + +/* Interface */ + +/* +Convert the literal value of a scanned token to rune +*/ +func RuneValue(lit []byte) rune { + if lit[1] == '\\' { + return escapeCharVal(lit) + } + r, size := utf8.DecodeRune(lit[1:]) + if size != len(lit)-2 { + panic(fmt.Sprintf("Error decoding rune. Lit: %s, rune: %d, size%d\n", lit, r, size)) + } + return r +} + +/* +Convert the literal value of a scanned token to int64 +*/ +func IntValue(lit []byte) (int64, error) { + return strconv.ParseInt(string(lit), 10, 64) +} + +/* +Convert the literal value of a scanned token to uint64 +*/ +func UintValue(lit []byte) (uint64, error) { + return strconv.ParseUint(string(lit), 10, 64) +} + +/* Util */ + +func escapeCharVal(lit []byte) rune { + var i, base, max uint32 + offset := 2 + switch lit[offset] { + case 'a': + return '\a' + case 'b': + return '\b' + case 'f': + return '\f' + case 'n': + return '\n' + case 'r': + return '\r' + case 't': + return '\t' + case 'v': + return '\v' + case '\\': + return '\\' + case '\'': + return '\'' + case '0', '1', '2', '3', '4', '5', '6', '7': + i, base, max = 3, 8, 255 + case 'x': + i, base, max = 2, 16, 255 + offset++ + case 'u': + i, base, max = 4, 16, unicode.MaxRune + offset++ + case 'U': + i, base, max = 8, 16, unicode.MaxRune + offset++ + default: + panic(fmt.Sprintf("Error decoding character literal: %s\n", lit)) + } + + var x uint32 + for ; i > 0 && offset < len(lit)-1; i-- { + ch, size := utf8.DecodeRune(lit[offset:]) + offset += size + d := uint32(digitVal(ch)) + if d >= base { + panic(fmt.Sprintf("charVal(%s): illegal character (%c) in escape sequence. size=%d, offset=%d", lit, ch, size, offset)) + } + x = x*base + d + } + if x > max || 0xD800 <= x && x < 0xE000 { + panic(fmt.Sprintf("Error decoding escape char value. Lit:%s, offset:%d, escape sequence is invalid Unicode code point\n", lit, offset)) + } + + return rune(x) +} + +func digitVal(ch rune) int { + switch { + case '0' <= ch && ch <= '9': + return int(ch) - '0' + case 'a' <= ch && ch <= 'f': + return int(ch) - 'a' + 10 + case 'A' <= ch && ch <= 'F': + return int(ch) - 'A' + 10 + } + return 16 // larger than any legal digit val +} diff --git a/graph/formats/dot/internal/util/rune.go b/graph/formats/dot/internal/util/rune.go new file mode 100644 index 00000000..583b1e06 --- /dev/null +++ b/graph/formats/dot/internal/util/rune.go @@ -0,0 +1,49 @@ +// Code generated by gocc; DO NOT EDIT. + +// This file is dual licensed under CC0 and The gonum license. +// +// Copyright ©2017 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// +// Copyright ©2017 Robin Eklind. +// This file is made available under a Creative Commons CC0 1.0 +// Universal Public Domain Dedication. + +package util + +import ( + "fmt" +) + +func RuneToString(r rune) string { + if r >= 0x20 && r < 0x7f { + return fmt.Sprintf("'%c'", r) + } + switch r { + case 0x07: + return "'\\a'" + case 0x08: + return "'\\b'" + case 0x0C: + return "'\\f'" + case 0x0A: + return "'\\n'" + case 0x0D: + return "'\\r'" + case 0x09: + return "'\\t'" + case 0x0b: + return "'\\v'" + case 0x5c: + return "'\\\\\\'" + case 0x27: + return "'\\''" + case 0x22: + return "'\\\"'" + } + if r < 0x10000 { + return fmt.Sprintf("\\u%04x", r) + } + return fmt.Sprintf("\\U%08x", r) +} diff --git a/graph/formats/dot/sem.go b/graph/formats/dot/sem.go new file mode 100644 index 00000000..d4643e03 --- /dev/null +++ b/graph/formats/dot/sem.go @@ -0,0 +1,160 @@ +// This file is dual licensed under CC0 and The gonum license. +// +// Copyright ©2017 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// +// Copyright ©2017 Robin Eklind. +// This file is made available under a Creative Commons CC0 1.0 +// Universal Public Domain Dedication. + +package dot + +import ( + "fmt" + + "github.com/gonum/graph/formats/dot/ast" +) + +// check validates the semantics of the given DOT file. +func check(file *ast.File) error { + for _, graph := range file.Graphs { + // TODO: Check graph.ID for duplicates? + if err := checkGraph(graph); err != nil { + return err + } + } + return nil +} + +// check validates the semantics of the given graph. +func checkGraph(graph *ast.Graph) error { + for _, stmt := range graph.Stmts { + if err := checkStmt(graph, stmt); err != nil { + return err + } + } + return nil +} + +// check validates the semantics of the given statement. +func checkStmt(graph *ast.Graph, stmt ast.Stmt) error { + switch stmt := stmt.(type) { + case *ast.NodeStmt: + return checkNodeStmt(graph, stmt) + case *ast.EdgeStmt: + return checkEdgeStmt(graph, stmt) + case *ast.AttrStmt: + return checkAttrStmt(graph, stmt) + case *ast.Attr: + // TODO: Verify that the attribute is indeed of graph component kind. + return checkAttr(graph, ast.KindGraph, stmt) + case *ast.Subgraph: + return checkSubgraph(graph, stmt) + default: + panic(fmt.Sprintf("support for statement of type %T not yet implemented", stmt)) + } +} + +// checkNodeStmt validates the semantics of the given node statement. +func checkNodeStmt(graph *ast.Graph, stmt *ast.NodeStmt) error { + if err := checkNode(graph, stmt.Node); err != nil { + return err + } + for _, attr := range stmt.Attrs { + // TODO: Verify that the attribute is indeed of node component kind. + if err := checkAttr(graph, ast.KindNode, attr); err != nil { + return err + } + } + return nil +} + +// checkEdgeStmt validates the semantics of the given edge statement. +func checkEdgeStmt(graph *ast.Graph, stmt *ast.EdgeStmt) error { + // TODO: if graph.Strict, check for multi-edges. + if err := checkVertex(graph, stmt.From); err != nil { + return err + } + for _, attr := range stmt.Attrs { + // TODO: Verify that the attribute is indeed of edge component kind. + if err := checkAttr(graph, ast.KindEdge, attr); err != nil { + return err + } + } + return checkEdge(graph, stmt.From, stmt.To) +} + +// checkEdge validates the semantics of the given edge. +func checkEdge(graph *ast.Graph, from ast.Vertex, to *ast.Edge) error { + if !graph.Directed && to.Directed { + return fmt.Errorf("undirected graph %q contains directed edge from %q to %q", graph.ID, from, to.Vertex) + } + if err := checkVertex(graph, to.Vertex); err != nil { + return err + } + if to.To != nil { + return checkEdge(graph, to.Vertex, to.To) + } + return nil +} + +// checkAttrStmt validates the semantics of the given attribute statement. +func checkAttrStmt(graph *ast.Graph, stmt *ast.AttrStmt) error { + for _, attr := range stmt.Attrs { + if err := checkAttr(graph, stmt.Kind, attr); err != nil { + return err + } + } + return nil +} + +// checkAttr validates the semantics of the given attribute for the given +// component kind. +func checkAttr(graph *ast.Graph, kind ast.Kind, attr *ast.Attr) error { + switch kind { + case ast.KindGraph: + // TODO: Validate key-value pairs for graphs. + return nil + case ast.KindNode: + // TODO: Validate key-value pairs for nodes. + return nil + case ast.KindEdge: + // TODO: Validate key-value pairs for edges. + return nil + default: + panic(fmt.Sprintf("support for component kind %v not yet supported", kind)) + } +} + +// checkSubgraph validates the semantics of the given subgraph. +func checkSubgraph(graph *ast.Graph, subgraph *ast.Subgraph) error { + // TODO: Check subgraph.ID for duplicates? + for _, stmt := range subgraph.Stmts { + // TODO: Refine handling of subgraph statements? + // checkSubgraphStmt(graph, subgraph, stmt) + if err := checkStmt(graph, stmt); err != nil { + return err + } + } + return nil +} + +// checkVertex validates the semantics of the given vertex. +func checkVertex(graph *ast.Graph, vertex ast.Vertex) error { + switch vertex := vertex.(type) { + case *ast.Node: + return checkNode(graph, vertex) + case *ast.Subgraph: + return checkSubgraph(graph, vertex) + default: + panic(fmt.Sprintf("support for vertex of type %T not yet supported", vertex)) + } +} + +// checNode validates the semantics of the given node. +func checkNode(graph *ast.Graph, node *ast.Node) error { + // TODO: Check node.ID for duplicates? + // TODO: Validate node.Port. + return nil +} diff --git a/graph/formats/dot/testdata/.gitignore b/graph/formats/dot/testdata/.gitignore new file mode 100644 index 00000000..90453be2 --- /dev/null +++ b/graph/formats/dot/testdata/.gitignore @@ -0,0 +1,5 @@ +*.dot +*.png +graphviz +input +output diff --git a/graph/formats/dot/testdata/Makefile b/graph/formats/dot/testdata/Makefile new file mode 100644 index 00000000..c3bfcb9b --- /dev/null +++ b/graph/formats/dot/testdata/Makefile @@ -0,0 +1,106 @@ +# Dependencies: +# +# * imgcmp +# go get github.com/mewkiz/cmd/imgcmp +# * dotfmt +# go get github.com/graphism/dot/cmd/dotfmt +# * dot +# sudo pacman -S graphviz +# * recode +# sudo pacman -S recode + +DOT=$(wildcard *.dot) + +# Skip DOT files for which the generated PNG images mismatch. +# +# ref: https://github.com/graphism/dot/issues/2 +# +# pixel colors differ at x=550, y=1885 +DOT:=$(filter-out b51.dot, $(DOT)) +# pixel colors differ at x=5395, y=1920 +DOT:=$(filter-out b106.dot, $(DOT)) + +# Skip segfaulting files. +# +# Segmentation fault (core dumped) +DOT:=$(filter-out b15.dot, $(DOT)) +# Segmentation fault (core dumped) +DOT:=$(filter-out b81.dot, $(DOT)) +# *** stack smashing detected ***: dot terminated +DOT:=$(filter-out sides.dot, $(DOT)) +# *** stack smashing detected ***: dot terminated +DOT:=$(filter-out tee.dot, $(DOT)) + +# Skip DOT files above 100 kB. +DOT:=$(filter-out 4elt.dot, $(DOT)) +DOT:=$(filter-out b29.dot, $(DOT)) +DOT:=$(filter-out b81.dot, $(DOT)) +DOT:=$(filter-out b100.dot, $(DOT)) +DOT:=$(filter-out b102.dot, $(DOT)) +DOT:=$(filter-out b103.dot, $(DOT)) +DOT:=$(filter-out b104.dot, $(DOT)) +DOT:=$(filter-out root.dot, $(DOT)) +DOT:=$(filter-out root_circo.dot, $(DOT)) +DOT:=$(filter-out root_twopi.dot, $(DOT)) + +# Skip invalid DOT file. +# +# Error: No or improper image file="eqn.png" +# in label of node struct1 +DOT:=$(filter-out html4.dot, $(DOT)) + +# Skip multi-graph DOT file which outputs to standard output. +DOT:=$(filter-out multi.dot, $(DOT)) + +# *.dot -> *.png +PNG=$(DOT:.dot=.png) + +INPUT_PNG=$(addprefix input/,$(PNG)) +OUTPUT_PNG=$(addprefix output/,$(PNG)) + +all: + +test: input $(INPUT_PNG) output $(OUTPUT_PNG) + @echo "PASS" + +input: + mkdir -p $@ + dot -V + +input/%.png: %.dot + dot -Tpng -o $@ $< + +output: + mkdir -p $@ + +output/%.png: %.dot + dotfmt -o "output/$<" $< + dot -Tpng -o $@ "output/$<" + imgcmp "input/$(notdir $@)" $@ + +fetch: graphviz + # Copy *.gv and *.dot files. + find graphviz -type f -name '*.gv' -not -wholename "graphviz/rtest/share/b545.gv" -not -name "base.gv" | xargs -I '{}' cp "{}" . + find graphviz -type f -name '*.dot' | xargs -I '{}' cp "{}" . + + # Rename *.gv to *.dot. + #rename .gv .dot *.gv + ls *.gv | xargs -I '{}' basename "{}" .gv | xargs -I '{}' mv "{}.gv" "{}.dot" + + # Remove execute permissions. + chmod 0644 *.dot + + # Convert Latin1 encoded files to UTF-8. + grep -l "charset=latin1" *.dot | xargs -I '{}' recode ISO-8859-1..UTF8 "{}" + recode ISO-8859-1..UTF8 Latin1.dot + + # Clean up. + rm -rf graphviz + +graphviz: + git clone https://github.com/ellson/graphviz.git + +clean: + rm -rf *.dot input output + +.PHONY: all test fetch clean diff --git a/graph/graph.go b/graph/graph.go new file mode 100644 index 00000000..adade5d7 --- /dev/null +++ b/graph/graph.go @@ -0,0 +1,153 @@ +// Copyright ©2014 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package graph + +// Node is a graph node. It returns a graph-unique integer ID. +type Node interface { + ID() int +} + +// Edge is a graph edge. In directed graphs, the direction of the +// edge is given from -> to, otherwise the edge is semantically +// unordered. +type Edge interface { + From() Node + To() Node + Weight() float64 +} + +// Graph is a generalized graph. +type Graph interface { + // Has returns whether the node exists within the graph. + Has(Node) bool + + // Nodes returns all the nodes in the graph. + Nodes() []Node + + // From returns all nodes that can be reached directly + // from the given node. + From(Node) []Node + + // HasEdgeBeteen returns whether an edge exists between + // nodes x and y without considering direction. + HasEdgeBetween(x, y Node) bool + + // Edge returns the edge from u to v if such an edge + // exists and nil otherwise. The node v must be directly + // reachable from u as defined by the From method. + Edge(u, v Node) Edge +} + +// Undirected is an undirected graph. +type Undirected interface { + Graph + + // EdgeBetween returns the edge between nodes x and y. + EdgeBetween(x, y Node) Edge +} + +// Directed is a directed graph. +type Directed interface { + Graph + + // HasEdgeFromTo returns whether an edge exists + // in the graph from u to v. + HasEdgeFromTo(u, v Node) bool + + // To returns all nodes that can reach directly + // to the given node. + To(Node) []Node +} + +// Weighter defines graphs that can report edge weights. +type Weighter interface { + // Weight returns the weight for the edge between + // x and y if Edge(x, y) returns a non-nil Edge. + // If x and y are the same node or there is no + // joining edge between the two nodes the weight + // value returned is implementation dependent. + // Weight returns true if an edge exists between + // x and y or if x and y have the same ID, false + // otherwise. + Weight(x, y Node) (w float64, ok bool) +} + +// NodeAdder is an interface for adding arbitrary nodes to a graph. +type NodeAdder interface { + // NewNodeID returns a new unique arbitrary ID. + NewNodeID() int + + // Adds a node to the graph. AddNode panics if + // the added node ID matches an existing node ID. + AddNode(Node) +} + +// NodeRemover is an interface for removing nodes from a graph. +type NodeRemover interface { + // RemoveNode removes a node from the graph, as + // well as any edges attached to it. If the node + // is not in the graph it is a no-op. + RemoveNode(Node) +} + +// EdgeSetter is an interface for adding edges to a graph. +type EdgeSetter interface { + // SetEdge adds an edge from one node to another. + // If the graph supports node addition the nodes + // will be added if they do not exist, otherwise + // SetEdge will panic. + // If the IDs returned by e.From and e.To are + // equal, SetEdge will panic. + SetEdge(e Edge) +} + +// EdgeRemover is an interface for removing nodes from a graph. +type EdgeRemover interface { + // RemoveEdge removes the given edge, leaving the + // terminal nodes. If the edge does not exist it + // is a no-op. + RemoveEdge(Edge) +} + +// Builder is a graph that can have nodes and edges added. +type Builder interface { + NodeAdder + EdgeSetter +} + +// UndirectedBuilder is an undirected graph builder. +type UndirectedBuilder interface { + Undirected + Builder +} + +// DirectedBuilder is a directed graph builder. +type DirectedBuilder interface { + Directed + Builder +} + +// Copy copies nodes and edges as undirected edges from the source to the destination +// without first clearing the destination. Copy will panic if a node ID in the source +// graph matches a node ID in the destination. +// +// If the source is undirected and the destination is directed both directions will +// be present in the destination after the copy is complete. +// +// If the source is a directed graph, the destination is undirected, and a fundamental +// cycle exists with two nodes where the edge weights differ, the resulting destination +// graph's edge weight between those nodes is undefined. If there is a defined function +// to resolve such conflicts, an Undirect may be used to do this. +func Copy(dst Builder, src Graph) { + nodes := src.Nodes() + for _, n := range nodes { + dst.AddNode(n) + } + for _, u := range nodes { + for _, v := range src.From(u) { + dst.SetEdge(src.Edge(u, v)) + } + } +} diff --git a/graph/graphs/gen/batagelj_brandes.go b/graph/graphs/gen/batagelj_brandes.go new file mode 100644 index 00000000..942954de --- /dev/null +++ b/graph/graphs/gen/batagelj_brandes.go @@ -0,0 +1,357 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// The functions in this file are random graph generators from the paper +// by Batagelj and Brandes http://algo.uni-konstanz.de/publications/bb-eglrn-05.pdf + +package gen + +import ( + "fmt" + "math" + "math/rand" + + "github.com/gonum/graph" + "github.com/gonum/graph/simple" +) + +// Gnp constructs a Gilbert’s model graph in the destination, dst, of order n. Edges +// between nodes are formed with the probability, p. If src is not nil it is used +// as the random source, otherwise rand.Float64 is used. The graph is constructed +// in O(n+m) time where m is the number of edges added. +func Gnp(dst GraphBuilder, n int, p float64, src *rand.Rand) error { + if p == 0 { + return nil + } + if p < 0 || p > 1 { + return fmt.Errorf("gen: bad probability: p=%v", p) + } + var r func() float64 + if src == nil { + r = rand.Float64 + } else { + r = src.Float64 + } + + for i := 0; i < n; i++ { + if !dst.Has(simple.Node(i)) { + dst.AddNode(simple.Node(i)) + } + } + + lp := math.Log(1 - p) + + // Add forward edges for all graphs. + for v, w := 1, -1; v < n; { + w += 1 + int(math.Log(1-r())/lp) + for w >= v && v < n { + w -= v + v++ + } + if v < n { + dst.SetEdge(simple.Edge{F: simple.Node(w), T: simple.Node(v), W: 1}) + } + } + + // Add backward edges for directed graphs. + if _, ok := dst.(graph.Directed); !ok { + return nil + } + for v, w := 1, -1; v < n; { + w += 1 + int(math.Log(1-r())/lp) + for w >= v && v < n { + w -= v + v++ + } + if v < n { + dst.SetEdge(simple.Edge{F: simple.Node(v), T: simple.Node(w), W: 1}) + } + } + + return nil +} + +// edgeNodesFor returns the pair of nodes for the ith edge in a simple +// undirected graph. The pair is returned such that w.ID < v.ID. +func edgeNodesFor(i int) (v, w simple.Node) { + // This is an algebraic simplification of the expressions described + // on p3 of http://algo.uni-konstanz.de/publications/bb-eglrn-05.pdf + v = simple.Node(0.5 + math.Sqrt(float64(1+8*i))/2) + w = simple.Node(i) - v*(v-1)/2 + return v, w +} + +// Gnm constructs a Erdős-Rényi model graph in the destination, dst, of +// order n and size m. If src is not nil it is used as the random source, +// otherwise rand.Intn is used. The graph is constructed in O(m) expected +// time for m ≤ (n choose 2)/2. +func Gnm(dst GraphBuilder, n, m int, src *rand.Rand) error { + if m == 0 { + return nil + } + + hasEdge := dst.HasEdgeBetween + d, isDirected := dst.(graph.Directed) + if isDirected { + m /= 2 + hasEdge = d.HasEdgeFromTo + } + + nChoose2 := (n - 1) * n / 2 + if m < 0 || m > nChoose2 { + return fmt.Errorf("gen: bad size: m=%d", m) + } + + var rnd func(int) int + if src == nil { + rnd = rand.Intn + } else { + rnd = src.Intn + } + + for i := 0; i < n; i++ { + if !dst.Has(simple.Node(i)) { + dst.AddNode(simple.Node(i)) + } + } + + // Add forward edges for all graphs. + for i := 0; i < m; i++ { + for { + v, w := edgeNodesFor(rnd(nChoose2)) + e := simple.Edge{F: w, T: v, W: 1} + if !hasEdge(e.F, e.T) { + dst.SetEdge(e) + break + } + } + } + + // Add backward edges for directed graphs. + if !isDirected { + return nil + } + for i := 0; i < m; i++ { + for { + v, w := edgeNodesFor(rnd(nChoose2)) + e := simple.Edge{F: v, T: w, W: 1} + if !hasEdge(e.F, e.T) { + dst.SetEdge(e) + break + } + } + } + + return nil +} + +// SmallWorldsBB constructs a small worlds graph of order n in the destination, dst. +// Node degree is specified by d and edge replacement by the probability, p. +// If src is not nil it is used as the random source, otherwise rand.Float64 is used. +// The graph is constructed in O(nd) time. +// +// The algorithm used is described in http://algo.uni-konstanz.de/publications/bb-eglrn-05.pdf +func SmallWorldsBB(dst GraphBuilder, n, d int, p float64, src *rand.Rand) error { + if d < 1 || d > (n-1)/2 { + return fmt.Errorf("gen: bad degree: d=%d", d) + } + if p == 0 { + return nil + } + if p < 0 || p >= 1 { + return fmt.Errorf("gen: bad replacement: p=%v", p) + } + var ( + rnd func() float64 + rndN func(int) int + ) + if src == nil { + rnd = rand.Float64 + rndN = rand.Intn + } else { + rnd = src.Float64 + rndN = src.Intn + } + + hasEdge := dst.HasEdgeBetween + dg, isDirected := dst.(graph.Directed) + if isDirected { + hasEdge = dg.HasEdgeFromTo + } + + for i := 0; i < n; i++ { + if !dst.Has(simple.Node(i)) { + dst.AddNode(simple.Node(i)) + } + } + + nChoose2 := (n - 1) * n / 2 + + lp := math.Log(1 - p) + + // Add forward edges for all graphs. + k := int(math.Log(1-rnd()) / lp) + m := 0 + replace := make(map[int]int) + for v := 0; v < n; v++ { + for i := 1; i <= d; i++ { + if k > 0 { + j := v*(v-1)/2 + (v+i)%n + ej := simple.Edge{W: 1} + ej.T, ej.F = edgeNodesFor(j) + if !hasEdge(ej.From(), ej.To()) { + dst.SetEdge(ej) + } + k-- + m++ + em := simple.Edge{W: 1} + em.T, em.F = edgeNodesFor(m) + if !hasEdge(em.From(), em.To()) { + replace[j] = m + } else { + replace[j] = replace[m] + } + } else { + k = int(math.Log(1-rnd()) / lp) + } + } + } + for i := m + 1; i <= n*d && i < nChoose2; i++ { + r := rndN(nChoose2-i) + i + er := simple.Edge{W: 1} + er.T, er.F = edgeNodesFor(r) + if !hasEdge(er.From(), er.To()) { + dst.SetEdge(er) + } else { + er.T, er.F = edgeNodesFor(replace[r]) + if !hasEdge(er.From(), er.To()) { + dst.SetEdge(er) + } + } + ei := simple.Edge{W: 1} + ei.T, ei.F = edgeNodesFor(i) + if !hasEdge(ei.From(), ei.To()) { + replace[r] = i + } else { + replace[r] = replace[i] + } + } + + // Add backward edges for directed graphs. + if !isDirected { + return nil + } + k = int(math.Log(1-rnd()) / lp) + m = 0 + replace = make(map[int]int) + for v := 0; v < n; v++ { + for i := 1; i <= d; i++ { + if k > 0 { + j := v*(v-1)/2 + (v+i)%n + ej := simple.Edge{W: 1} + ej.F, ej.T = edgeNodesFor(j) + if !hasEdge(ej.From(), ej.To()) { + dst.SetEdge(ej) + } + k-- + m++ + if !hasEdge(edgeNodesFor(m)) { + replace[j] = m + } else { + replace[j] = replace[m] + } + } else { + k = int(math.Log(1-rnd()) / lp) + } + } + } + for i := m + 1; i <= n*d && i < nChoose2; i++ { + r := rndN(nChoose2-i) + i + er := simple.Edge{W: 1} + er.F, er.T = edgeNodesFor(r) + if !hasEdge(er.From(), er.To()) { + dst.SetEdge(er) + } else { + er.F, er.T = edgeNodesFor(replace[r]) + if !hasEdge(er.From(), er.To()) { + dst.SetEdge(er) + } + } + if !hasEdge(edgeNodesFor(i)) { + replace[r] = i + } else { + replace[r] = replace[i] + } + } + + return nil +} + +/* +// Multigraph generators. + +type EdgeAdder interface { + AddEdge(graph.Edge) +} + +func PreferentialAttachment(dst EdgeAdder, n, d int, src *rand.Rand) { + if d < 1 { + panic("gen: bad d") + } + var rnd func(int) int + if src == nil { + rnd = rand.Intn + } else { + rnd = src.Intn + } + + m := make([]simple.Node, 2*n*d) + for v := 0; v < n; v++ { + for i := 0; i < d; i++ { + m[2*(v*d+i)] = simple.Node(v) + m[2*(v*d+i)+1] = simple.Node(m[rnd(2*v*d+i+1)]) + } + } + for i := 0; i < n*d; i++ { + dst.AddEdge(simple.Edge{F: m[2*i], T: m[2*i+1], W: 1}) + } +} + +func BipartitePreferentialAttachment(dst EdgeAdder, n, d int, src *rand.Rand) { + if d < 1 { + panic("gen: bad d") + } + var rnd func(int) int + if src == nil { + rnd = rand.Intn + } else { + rnd = src.Intn + } + + m1 := make([]simple.Node, 2*n*d) + m2 := make([]simple.Node, 2*n*d) + for v := 0; v < n; v++ { + for i := 0; i < d; i++ { + m1[2*(v*d+i)] = simple.Node(v) + m2[2*(v*d+i)] = simple.Node(n + v) + + if r := rnd(2*v*d + i + 1); r&0x1 == 0 { + m1[2*(v*d+i)+1] = m2[r] + } else { + m1[2*(v*d+i)+1] = m1[r] + } + + if r := rnd(2*v*d + i + 1); r&0x1 == 0 { + m2[2*(v*d+i)+1] = m1[r] + } else { + m2[2*(v*d+i)+1] = m2[r] + } + } + } + for i := 0; i < n*d; i++ { + dst.AddEdge(simple.Edge{F: m1[2*i], T: m1[2*i+1], W: 1}) + dst.AddEdge(simple.Edge{F: m2[2*i], T: m2[2*i+1], W: 1}) + } +} +*/ diff --git a/graph/graphs/gen/batagelj_brandes_test.go b/graph/graphs/gen/batagelj_brandes_test.go new file mode 100644 index 00000000..21603b4f --- /dev/null +++ b/graph/graphs/gen/batagelj_brandes_test.go @@ -0,0 +1,175 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gen + +import ( + "math" + "testing" + + "github.com/gonum/graph" + "github.com/gonum/graph/simple" +) + +type gnUndirected struct { + graph.UndirectedBuilder + addBackwards bool + addSelfLoop bool + addMultipleEdge bool +} + +func (g *gnUndirected) SetEdge(e graph.Edge) { + switch { + case e.From().ID() == e.To().ID(): + g.addSelfLoop = true + return + case e.From().ID() > e.To().ID(): + g.addBackwards = true + case g.UndirectedBuilder.HasEdgeBetween(e.From(), e.To()): + g.addMultipleEdge = true + } + + g.UndirectedBuilder.SetEdge(e) +} + +type gnDirected struct { + graph.DirectedBuilder + addSelfLoop bool + addMultipleEdge bool +} + +func (g *gnDirected) SetEdge(e graph.Edge) { + switch { + case e.From().ID() == e.To().ID(): + g.addSelfLoop = true + return + case g.DirectedBuilder.HasEdgeFromTo(e.From(), e.To()): + g.addMultipleEdge = true + } + + g.DirectedBuilder.SetEdge(e) +} + +func TestGnpUndirected(t *testing.T) { + for n := 2; n <= 20; n++ { + for p := 0.; p <= 1; p += 0.1 { + g := &gnUndirected{UndirectedBuilder: simple.NewUndirectedGraph(0, math.Inf(1))} + err := Gnp(g, n, p, nil) + if err != nil { + t.Fatalf("unexpected error: n=%d, p=%v: %v", n, p, err) + } + if g.addBackwards { + t.Errorf("edge added with From.ID > To.ID: n=%d, p=%v", n, p) + } + if g.addSelfLoop { + t.Errorf("unexpected self edge: n=%d, p=%v", n, p) + } + if g.addMultipleEdge { + t.Errorf("unexpected multiple edge: n=%d, p=%v", n, p) + } + } + } +} + +func TestGnpDirected(t *testing.T) { + for n := 2; n <= 20; n++ { + for p := 0.; p <= 1; p += 0.1 { + g := &gnDirected{DirectedBuilder: simple.NewDirectedGraph(0, math.Inf(1))} + err := Gnp(g, n, p, nil) + if err != nil { + t.Fatalf("unexpected error: n=%d, p=%v: %v", n, p, err) + } + if g.addSelfLoop { + t.Errorf("unexpected self edge: n=%d, p=%v", n, p) + } + if g.addMultipleEdge { + t.Errorf("unexpected multiple edge: n=%d, p=%v", n, p) + } + } + } +} + +func TestGnmUndirected(t *testing.T) { + for n := 2; n <= 20; n++ { + nChoose2 := (n - 1) * n / 2 + for m := 0; m <= nChoose2; m++ { + g := &gnUndirected{UndirectedBuilder: simple.NewUndirectedGraph(0, math.Inf(1))} + err := Gnm(g, n, m, nil) + if err != nil { + t.Fatalf("unexpected error: n=%d, m=%d: %v", n, m, err) + } + if g.addBackwards { + t.Errorf("edge added with From.ID > To.ID: n=%d, m=%d", n, m) + } + if g.addSelfLoop { + t.Errorf("unexpected self edge: n=%d, m=%d", n, m) + } + if g.addMultipleEdge { + t.Errorf("unexpected multiple edge: n=%d, m=%d", n, m) + } + } + } +} + +func TestGnmDirected(t *testing.T) { + for n := 2; n <= 20; n++ { + nChoose2 := (n - 1) * n / 2 + for m := 0; m <= nChoose2*2; m++ { + g := &gnDirected{DirectedBuilder: simple.NewDirectedGraph(0, math.Inf(1))} + err := Gnm(g, n, m, nil) + if err != nil { + t.Fatalf("unexpected error: n=%d, m=%d: %v", n, m, err) + } + if g.addSelfLoop { + t.Errorf("unexpected self edge: n=%d, m=%d", n, m) + } + if g.addMultipleEdge { + t.Errorf("unexpected multiple edge: n=%d, m=%d", n, m) + } + } + } +} + +func TestSmallWorldsBBUndirected(t *testing.T) { + for n := 2; n <= 20; n++ { + for d := 1; d <= (n-1)/2; d++ { + for p := 0.; p < 1; p += 0.1 { + g := &gnUndirected{UndirectedBuilder: simple.NewUndirectedGraph(0, math.Inf(1))} + err := SmallWorldsBB(g, n, d, p, nil) + if err != nil { + t.Fatalf("unexpected error: n=%d, d=%d, p=%v: %v", n, d, p, err) + } + if g.addBackwards { + t.Errorf("edge added with From.ID > To.ID: n=%d, d=%d, p=%v", n, d, p) + } + if g.addSelfLoop { + t.Errorf("unexpected self edge: n=%d, d=%d, p=%v", n, d, p) + } + if g.addMultipleEdge { + t.Errorf("unexpected multiple edge: n=%d, d=%d, p=%v", n, d, p) + } + } + } + } +} + +func TestSmallWorldsBBDirected(t *testing.T) { + for n := 2; n <= 20; n++ { + for d := 1; d <= (n-1)/2; d++ { + for p := 0.; p < 1; p += 0.1 { + g := &gnDirected{DirectedBuilder: simple.NewDirectedGraph(0, math.Inf(1))} + err := SmallWorldsBB(g, n, d, p, nil) + if err != nil { + t.Fatalf("unexpected error: n=%d, d=%d, p=%v: %v", n, d, p, err) + } + if g.addSelfLoop { + t.Errorf("unexpected self edge: n=%d, d=%d, p=%v", n, d, p) + } + if g.addMultipleEdge { + t.Errorf("unexpected multiple edge: n=%d, d=%d, p=%v", n, d, p) + } + } + } + } +} diff --git a/graph/graphs/gen/duplication.go b/graph/graphs/gen/duplication.go new file mode 100644 index 00000000..d40cbedf --- /dev/null +++ b/graph/graphs/gen/duplication.go @@ -0,0 +1,125 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gen + +import ( + "fmt" + "math" + "math/rand" + "sort" + + "github.com/gonum/graph" + "github.com/gonum/graph/internal/ordered" + "github.com/gonum/graph/simple" +) + +// UndirectedMutator is an undirected graph builder that can remove edges. +type UndirectedMutator interface { + graph.UndirectedBuilder + graph.EdgeRemover +} + +// Duplication constructs a graph in the destination, dst, of order n. New nodes +// are created by duplicating an existing node and all its edges. Each new edge is +// deleted with probability delta. Additional edges are added between the new node +// and existing nodes with probability alpha/|V|. An exception to this addition +// rule is made for the parent node when sigma is not NaN; in this case an edge is +// created with probability sigma. With the exception of the sigma parameter, this +// corresponds to the completely correlated case in doi:10.1016/S0022-5193(03)00028-6. +// If src is not nil it is used as the random source, otherwise rand.Float64 is used. +func Duplication(dst UndirectedMutator, n int, delta, alpha, sigma float64, src *rand.Rand) error { + // As described in doi:10.1016/S0022-5193(03)00028-6 but + // also clarified in doi:10.1186/gb-2007-8-4-r51. + + if delta < 0 || delta > 1 { + return fmt.Errorf("gen: bad delta: delta=%v", delta) + } + if alpha <= 0 || alpha > 1 { + return fmt.Errorf("gen: bad alpha: alpha=%v", alpha) + } + if sigma < 0 || sigma > 1 { + return fmt.Errorf("gen: bad sigma: sigma=%v", sigma) + } + + var ( + rnd func() float64 + rndN func(int) int + ) + if src == nil { + rnd = rand.Float64 + rndN = rand.Intn + } else { + rnd = src.Float64 + rndN = src.Intn + } + + nodes := dst.Nodes() + sort.Sort(ordered.ByID(nodes)) + if len(nodes) == 0 { + n-- + dst.AddNode(simple.Node(0)) + nodes = append(nodes, simple.Node(0)) + } + for i := 0; i < n; i++ { + u := nodes[rndN(len(nodes))] + d := simple.Node(dst.NewNodeID()) + + // Add the duplicate node. + dst.AddNode(d) + + // Loop until we have connectivity + // into the rest of the graph. + for { + // Add edges to parent's neigbours. + to := dst.From(u) + sort.Sort(ordered.ByID(to)) + for _, v := range to { + if rnd() < delta || dst.HasEdgeBetween(v, d) { + continue + } + if v.ID() < d.ID() { + dst.SetEdge(simple.Edge{F: v, T: d, W: 1}) + } else { + dst.SetEdge(simple.Edge{F: d, T: v, W: 1}) + } + } + + // Add edges to old nodes. + scaledAlpha := alpha / float64(len(nodes)) + for _, v := range nodes { + switch v.ID() { + case u.ID(): + if !math.IsNaN(sigma) { + if i == 0 || rnd() < sigma { + if v.ID() < d.ID() { + dst.SetEdge(simple.Edge{F: v, T: d, W: 1}) + } else { + dst.SetEdge(simple.Edge{F: d, T: v, W: 1}) + } + } + continue + } + fallthrough + default: + if rnd() < scaledAlpha && !dst.HasEdgeBetween(v, d) { + if v.ID() < d.ID() { + dst.SetEdge(simple.Edge{F: v, T: d, W: 1}) + } else { + dst.SetEdge(simple.Edge{F: d, T: v, W: 1}) + } + } + } + } + + if len(dst.From(d)) != 0 { + break + } + } + + nodes = append(nodes, d) + } + + return nil +} diff --git a/graph/graphs/gen/duplication_test.go b/graph/graphs/gen/duplication_test.go new file mode 100644 index 00000000..d82a1376 --- /dev/null +++ b/graph/graphs/gen/duplication_test.go @@ -0,0 +1,59 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gen + +import ( + "math" + "testing" + + "github.com/gonum/graph" + "github.com/gonum/graph/simple" +) + +type duplication struct { + UndirectedMutator + addBackwards bool + addSelfLoop bool + addMultipleEdge bool +} + +func (g *duplication) SetEdge(e graph.Edge) { + switch { + case e.From().ID() == e.To().ID(): + g.addSelfLoop = true + return + case e.From().ID() > e.To().ID(): + g.addBackwards = true + case g.UndirectedMutator.HasEdgeBetween(e.From(), e.To()): + g.addMultipleEdge = true + } + + g.UndirectedMutator.SetEdge(e) +} + +func TestDuplication(t *testing.T) { + for n := 2; n <= 50; n++ { + for alpha := 0.1; alpha <= 1; alpha += 0.1 { + for delta := 0.; delta <= 1; delta += 0.2 { + for sigma := 0.; sigma <= 1; sigma += 0.2 { + g := &duplication{UndirectedMutator: simple.NewUndirectedGraph(0, math.Inf(1))} + err := Duplication(g, n, delta, alpha, sigma, nil) + if err != nil { + t.Fatalf("unexpected error: n=%d, alpha=%v, delta=%v sigma=%v: %v", n, alpha, delta, sigma, err) + } + if g.addBackwards { + t.Errorf("edge added with From.ID > To.ID: n=%d, alpha=%v, delta=%v sigma=%v", n, alpha, delta, sigma) + } + if g.addSelfLoop { + t.Errorf("unexpected self edge: n=%d, alpha=%v, delta=%v sigma=%v", n, alpha, delta, sigma) + } + if g.addMultipleEdge { + t.Errorf("unexpected multiple edge: n=%d, alpha=%v, delta=%v sigma=%v", n, alpha, delta, sigma) + } + } + } + } + } +} diff --git a/graph/graphs/gen/gen.go b/graph/graphs/gen/gen.go new file mode 100644 index 00000000..33cb03d9 --- /dev/null +++ b/graph/graphs/gen/gen.go @@ -0,0 +1,22 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package gen provides random graph generation functions. +package gen + +import "github.com/gonum/graph" + +// GraphBuilder is a graph that can have nodes and edges added. +type GraphBuilder interface { + Has(graph.Node) bool + HasEdgeBetween(x, y graph.Node) bool + graph.Builder +} + +func abs(a int) int { + if a < 0 { + return -a + } + return a +} diff --git a/graph/graphs/gen/holme_kim.go b/graph/graphs/gen/holme_kim.go new file mode 100644 index 00000000..ded9abe7 --- /dev/null +++ b/graph/graphs/gen/holme_kim.go @@ -0,0 +1,160 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gen + +import ( + "errors" + "fmt" + "math/rand" + + "github.com/gonum/graph" + "github.com/gonum/graph/simple" + "github.com/gonum/stat/sampleuv" +) + +// TunableClusteringScaleFree constructs a graph in the destination, dst, of order n. +// The graph is constructed successively starting from an m order graph with one node +// having degree m-1. At each iteration of graph addition, one node is added with m +// additional edges joining existing nodes with probability proportional to the nodes' +// degrees. The edges are formed as a triad with probability, p. +// If src is not nil it is used as the random source, otherwise rand.Float64 and +// rand.Intn are used. +// +// The algorithm is essentially as described in http://arxiv.org/abs/cond-mat/0110452. +func TunableClusteringScaleFree(dst graph.UndirectedBuilder, n, m int, p float64, src *rand.Rand) error { + if p < 0 || p > 1 { + return fmt.Errorf("gen: bad probability: p=%v", p) + } + if n <= m { + return fmt.Errorf("gen: n <= m: n=%v m=%d", n, m) + } + + var ( + rnd func() float64 + rndN func(int) int + ) + if src == nil { + rnd = rand.Float64 + rndN = rand.Intn + } else { + rnd = src.Float64 + rndN = src.Intn + } + + // Initial condition. + wt := make([]float64, n) + for u := 0; u < m; u++ { + if !dst.Has(simple.Node(u)) { + dst.AddNode(simple.Node(u)) + } + // We need to give equal probability for + // adding the first generation of edges. + wt[u] = 1 + } + ws := sampleuv.NewWeighted(wt, src) + for i := range wt { + // These weights will organically grow + // after the first growth iteration. + wt[i] = 0 + } + + // Growth. + for v := m; v < n; v++ { + var u int + pa: + for i := 0; i < m; i++ { + // Triad formation. + if i != 0 && rnd() < p { + for _, w := range permute(dst.From(simple.Node(u)), rndN) { + wid := w.ID() + if wid == v || dst.HasEdgeBetween(w, simple.Node(v)) { + continue + } + dst.SetEdge(simple.Edge{F: w, T: simple.Node(v), W: 1}) + wt[wid]++ + wt[v]++ + continue pa + } + } + + // Preferential attachment. + for { + var ok bool + u, ok = ws.Take() + if !ok { + return errors.New("gen: depleted distribution") + } + if u == v || dst.HasEdgeBetween(simple.Node(u), simple.Node(v)) { + continue + } + dst.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v), W: 1}) + wt[u]++ + wt[v]++ + break + } + } + + ws.ReweightAll(wt) + } + + return nil +} + +func permute(n []graph.Node, rnd func(int) int) []graph.Node { + for i := range n[:len(n)-1] { + j := rnd(len(n)-i) + i + n[i], n[j] = n[j], n[i] + } + return n +} + +// PreferentialAttachment constructs a graph in the destination, dst, of order n. +// The graph is constructed successively starting from an m order graph with one +// node having degree m-1. At each iteration of graph addition, one node is added +// with m additional edges joining existing nodes with probability proportional +// to the nodes' degrees. If src is not nil it is used as the random source, +// otherwise rand.Float64 is used. +// +// The algorithm is essentially as described in http://arxiv.org/abs/cond-mat/0110452 +// after 10.1126/science.286.5439.509. +func PreferentialAttachment(dst graph.UndirectedBuilder, n, m int, src *rand.Rand) error { + if n <= m { + return fmt.Errorf("gen: n <= m: n=%v m=%d", n, m) + } + + // Initial condition. + wt := make([]float64, n) + for u := 0; u < m; u++ { + if !dst.Has(simple.Node(u)) { + dst.AddNode(simple.Node(u)) + } + // We need to give equal probability for + // adding the first generation of edges. + wt[u] = 1 + } + ws := sampleuv.NewWeighted(wt, src) + for i := range wt { + // These weights will organically grow + // after the first growth iteration. + wt[i] = 0 + } + + // Growth. + for v := m; v < n; v++ { + for i := 0; i < m; i++ { + // Preferential attachment. + u, ok := ws.Take() + if !ok { + return errors.New("gen: depleted distribution") + } + dst.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v), W: 1}) + wt[u]++ + wt[v]++ + } + ws.ReweightAll(wt) + } + + return nil +} diff --git a/graph/graphs/gen/holme_kim_test.go b/graph/graphs/gen/holme_kim_test.go new file mode 100644 index 00000000..567ddd52 --- /dev/null +++ b/graph/graphs/gen/holme_kim_test.go @@ -0,0 +1,56 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gen + +import ( + "math" + "testing" + + "github.com/gonum/graph/simple" +) + +func TestTunableClusteringScaleFree(t *testing.T) { + for n := 2; n <= 20; n++ { + for m := 0; m < n; m++ { + for p := 0.; p <= 1; p += 0.1 { + g := &gnUndirected{UndirectedBuilder: simple.NewUndirectedGraph(0, math.Inf(1))} + err := TunableClusteringScaleFree(g, n, m, p, nil) + if err != nil { + t.Fatalf("unexpected error: n=%d, m=%d, p=%v: %v", n, m, p, err) + } + if g.addBackwards { + t.Errorf("edge added with From.ID > To.ID: n=%d, m=%d, p=%v", n, m, p) + } + if g.addSelfLoop { + t.Errorf("unexpected self edge: n=%d, m=%d, p=%v", n, m, p) + } + if g.addMultipleEdge { + t.Errorf("unexpected multiple edge: n=%d, m=%d, p=%v", n, m, p) + } + } + } + } +} + +func TestPreferentialAttachment(t *testing.T) { + for n := 2; n <= 20; n++ { + for m := 0; m < n; m++ { + g := &gnUndirected{UndirectedBuilder: simple.NewUndirectedGraph(0, math.Inf(1))} + err := PreferentialAttachment(g, n, m, nil) + if err != nil { + t.Fatalf("unexpected error: n=%d, m=%d: %v", n, m, err) + } + if g.addBackwards { + t.Errorf("edge added with From.ID > To.ID: n=%d, m=%d", n, m) + } + if g.addSelfLoop { + t.Errorf("unexpected self edge: n=%d, m=%d", n, m) + } + if g.addMultipleEdge { + t.Errorf("unexpected multiple edge: n=%d, m=%d", n, m) + } + } + } +} diff --git a/graph/graphs/gen/small_world.go b/graph/graphs/gen/small_world.go new file mode 100644 index 00000000..baacca75 --- /dev/null +++ b/graph/graphs/gen/small_world.go @@ -0,0 +1,204 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gen + +import ( + "errors" + "fmt" + "math" + "math/rand" + + "github.com/gonum/graph" + "github.com/gonum/graph/simple" + "github.com/gonum/stat/sampleuv" +) + +// NavigableSmallWorld constructs an N-dimensional grid with guaranteed local connectivity +// and random long-range connectivity in the destination, dst. The dims parameters specifies +// the length of each of the N dimensions, p defines the Manhattan distance between local +// nodes, and q defines the number of out-going long-range connections from each node. Long- +// range connections are made with a probability proportional to |d(u,v)|^-r where d is the +// Manhattan distance between non-local nodes. +// +// The algorithm is essentially as described on p4 of http://www.cs.cornell.edu/home/kleinber/swn.pdf. +func NavigableSmallWorld(dst GraphBuilder, dims []int, p, q int, r float64, src *rand.Rand) (err error) { + if p < 1 { + return fmt.Errorf("gen: bad local distance: p=%v", p) + } + if q < 0 { + return fmt.Errorf("gen: bad distant link count: q=%v", q) + } + if r < 0 { + return fmt.Errorf("gen: bad decay constant: r=%v", r) + } + + n := 1 + for _, d := range dims { + n *= d + } + for i := 0; i < n; i++ { + if !dst.Has(simple.Node(i)) { + dst.AddNode(simple.Node(i)) + } + } + + hasEdge := dst.HasEdgeBetween + d, isDirected := dst.(graph.Directed) + if isDirected { + hasEdge = d.HasEdgeFromTo + } + + locality := make([]int, len(dims)) + for i := range locality { + locality[i] = p*2 + 1 + } + iterateOver(dims, func(u []int) { + uid := idFrom(u, dims) + iterateOver(locality, func(delta []int) { + d := manhattanDelta(u, delta, dims, -p) + if d == 0 || d > p { + return + } + vid := idFromDelta(u, delta, dims, -p) + e := simple.Edge{F: simple.Node(uid), T: simple.Node(vid), W: 1} + if uid > vid { + e.F, e.T = e.T, e.F + } + if !hasEdge(e.From(), e.To()) { + dst.SetEdge(e) + } + if !isDirected { + return + } + e.F, e.T = e.T, e.F + if !hasEdge(e.From(), e.To()) { + dst.SetEdge(e) + } + }) + }) + + defer func() { + r := recover() + if r != nil { + if r != "depleted distribution" { + panic(r) + } + err = errors.New("depleted distribution") + } + }() + w := make([]float64, n) + ws := sampleuv.NewWeighted(w, src) + iterateOver(dims, func(u []int) { + uid := idFrom(u, dims) + iterateOver(dims, func(v []int) { + d := manhattanBetween(u, v) + if d <= p { + return + } + w[idFrom(v, dims)] = math.Pow(float64(d), -r) + }) + ws.ReweightAll(w) + for i := 0; i < q; i++ { + vid, ok := ws.Take() + if !ok { + panic("depleted distribution") + } + e := simple.Edge{F: simple.Node(uid), T: simple.Node(vid), W: 1} + if !isDirected && uid > vid { + e.F, e.T = e.T, e.F + } + if !hasEdge(e.From(), e.To()) { + dst.SetEdge(e) + } + } + for i := range w { + w[i] = 0 + } + }) + + return nil +} + +// iterateOver performs an iteration over all dimensions of dims, calling fn +// for each state. The elements of state must not be mutated by fn. +func iterateOver(dims []int, fn func(state []int)) { + iterator(0, dims, make([]int, len(dims)), fn) +} + +func iterator(d int, dims, state []int, fn func(state []int)) { + if d >= len(dims) { + fn(state) + return + } + for i := 0; i < dims[d]; i++ { + state[d] = i + iterator(d+1, dims, state, fn) + } +} + +// manhattanBetween returns the Manhattan distance between a and b. +func manhattanBetween(a, b []int) int { + if len(a) != len(b) { + panic("gen: unexpected dimension") + } + var d int + for i, v := range a { + d += abs(v - b[i]) + } + return d +} + +// manhattanDelta returns the Manhattan norm of delta+translate. If a +// translated by delta+translate is out of the range given by dims, +// zero is returned. +func manhattanDelta(a, delta, dims []int, translate int) int { + if len(a) != len(dims) { + panic("gen: unexpected dimension") + } + if len(delta) != len(dims) { + panic("gen: unexpected dimension") + } + var d int + for i, v := range delta { + v += translate + t := a[i] + v + if t < 0 || t >= dims[i] { + return 0 + } + d += abs(v) + } + return d +} + +// idFrom returns a node id for the slice n over the given dimensions. +func idFrom(n, dims []int) int { + s := 1 + var id int + for d, m := range dims { + p := n[d] + if p < 0 || p >= m { + panic("gen: element out of range") + } + id += p * s + s *= m + } + return id +} + +// idFromDelta returns a node id for the slice base plus the delta over the given +// dimensions and applying the translation. +func idFromDelta(base, delta, dims []int, translate int) int { + s := 1 + var id int + for d, m := range dims { + n := base[d] + delta[d] + translate + if n < 0 || n >= m { + panic("gen: element out of range") + } + id += n * s + s *= m + } + return id +} diff --git a/graph/graphs/gen/small_world_test.go b/graph/graphs/gen/small_world_test.go new file mode 100644 index 00000000..4dfcd4d7 --- /dev/null +++ b/graph/graphs/gen/small_world_test.go @@ -0,0 +1,73 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gen + +import ( + "math" + "testing" + + "github.com/gonum/graph/simple" +) + +var smallWorldDimensionParameters = [][]int{ + {50}, + {10, 10}, + {6, 5, 4}, +} + +func TestNavigableSmallWorldUndirected(t *testing.T) { + for p := 1; p < 5; p++ { + for q := 0; q < 10; q++ { + for r := 0.5; r < 10; r++ { + for _, dims := range smallWorldDimensionParameters { + g := &gnUndirected{UndirectedBuilder: simple.NewUndirectedGraph(0, math.Inf(1))} + err := NavigableSmallWorld(g, dims, p, q, r, nil) + n := 1 + for _, d := range dims { + n *= d + } + if err != nil { + t.Fatalf("unexpected error: dims=%v n=%d, p=%d, q=%d, r=%v: %v", dims, n, p, q, r, err) + } + if g.addBackwards { + t.Errorf("edge added with From.ID > To.ID: dims=%v n=%d, p=%d, q=%d, r=%v", dims, n, p, q, r) + } + if g.addSelfLoop { + t.Errorf("unexpected self edge: dims=%v n=%d, p=%d, q=%d, r=%v", dims, n, p, q, r) + } + if g.addMultipleEdge { + t.Errorf("unexpected multiple edge: dims=%v n=%d, p=%d, q=%d, r=%v", dims, n, p, q, r) + } + } + } + } + } +} + +func TestNavigableSmallWorldDirected(t *testing.T) { + for p := 1; p < 5; p++ { + for q := 0; q < 10; q++ { + for r := 0.5; r < 10; r++ { + for _, dims := range smallWorldDimensionParameters { + g := &gnDirected{DirectedBuilder: simple.NewDirectedGraph(0, math.Inf(1))} + err := NavigableSmallWorld(g, dims, p, q, r, nil) + n := 1 + for _, d := range dims { + n *= d + } + if err != nil { + t.Fatalf("unexpected error: dims=%v n=%d, p=%d, q=%d, r=%v, r=%v: %v", dims, n, p, q, r, err) + } + if g.addSelfLoop { + t.Errorf("unexpected self edge: dims=%v n=%d, p=%d, q=%d, r=%v", dims, n, p, q, r) + } + if g.addMultipleEdge { + t.Errorf("unexpected multiple edge: dims=%v n=%d, p=%d, q=%d, r=%v", dims, n, p, q, r) + } + } + } + } + } +} diff --git a/graph/internal/linear/linear.go b/graph/internal/linear/linear.go new file mode 100644 index 00000000..532226bb --- /dev/null +++ b/graph/internal/linear/linear.go @@ -0,0 +1,74 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package linear provides common linear data structures. +package linear + +import ( + "github.com/gonum/graph" +) + +// NodeStack implements a LIFO stack of graph.Node. +type NodeStack []graph.Node + +// Len returns the number of graph.Nodes on the stack. +func (s *NodeStack) Len() int { return len(*s) } + +// Pop returns the last graph.Node on the stack and removes it +// from the stack. +func (s *NodeStack) Pop() graph.Node { + v := *s + v, n := v[:len(v)-1], v[len(v)-1] + *s = v + return n +} + +// Push adds the node n to the stack at the last position. +func (s *NodeStack) Push(n graph.Node) { *s = append(*s, n) } + +// NodeQueue implements a FIFO queue. +type NodeQueue struct { + head int + data []graph.Node +} + +// Len returns the number of graph.Nodes in the queue. +func (q *NodeQueue) Len() int { return len(q.data) - q.head } + +// Enqueue adds the node n to the back of the queue. +func (q *NodeQueue) Enqueue(n graph.Node) { + if len(q.data) == cap(q.data) && q.head > 0 { + l := q.Len() + copy(q.data, q.data[q.head:]) + q.head = 0 + q.data = append(q.data[:l], n) + } else { + q.data = append(q.data, n) + } +} + +// Dequeue returns the graph.Node at the front of the queue and +// removes it from the queue. +func (q *NodeQueue) Dequeue() graph.Node { + if q.Len() == 0 { + panic("queue: empty queue") + } + + var n graph.Node + n, q.data[q.head] = q.data[q.head], nil + q.head++ + + if q.Len() == 0 { + q.head = 0 + q.data = q.data[:0] + } + + return n +} + +// Reset clears the queue for reuse. +func (q *NodeQueue) Reset() { + q.head = 0 + q.data = q.data[:0] +} diff --git a/graph/internal/ordered/sort.go b/graph/internal/ordered/sort.go new file mode 100644 index 00000000..8cc9657e --- /dev/null +++ b/graph/internal/ordered/sort.go @@ -0,0 +1,62 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package ordered provides common sort ordering types. +package ordered + +import "github.com/gonum/graph" + +// ByID implements the sort.Interface sorting a slice of graph.Node +// by ID. +type ByID []graph.Node + +func (n ByID) Len() int { return len(n) } +func (n ByID) Less(i, j int) bool { return n[i].ID() < n[j].ID() } +func (n ByID) Swap(i, j int) { n[i], n[j] = n[j], n[i] } + +// BySliceValues implements the sort.Interface sorting a slice of +// []int lexically by the values of the []int. +type BySliceValues [][]int + +func (c BySliceValues) Len() int { return len(c) } +func (c BySliceValues) Less(i, j int) bool { + a, b := c[i], c[j] + l := len(a) + if len(b) < l { + l = len(b) + } + for k, v := range a[:l] { + if v < b[k] { + return true + } + if v > b[k] { + return false + } + } + return len(a) < len(b) +} +func (c BySliceValues) Swap(i, j int) { c[i], c[j] = c[j], c[i] } + +// BySliceIDs implements the sort.Interface sorting a slice of +// []graph.Node lexically by the IDs of the []graph.Node. +type BySliceIDs [][]graph.Node + +func (c BySliceIDs) Len() int { return len(c) } +func (c BySliceIDs) Less(i, j int) bool { + a, b := c[i], c[j] + l := len(a) + if len(b) < l { + l = len(b) + } + for k, v := range a[:l] { + if v.ID() < b[k].ID() { + return true + } + if v.ID() > b[k].ID() { + return false + } + } + return len(a) < len(b) +} +func (c BySliceIDs) Swap(i, j int) { c[i], c[j] = c[j], c[i] } diff --git a/graph/internal/set/same.go b/graph/internal/set/same.go new file mode 100644 index 00000000..d2555782 --- /dev/null +++ b/graph/internal/set/same.go @@ -0,0 +1,18 @@ +// Copyright ©2014 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//+build !appengine + +package set + +import "unsafe" + +// same determines whether two sets are backed by the same store. In the +// current implementation using hash maps it makes use of the fact that +// hash maps are passed as a pointer to a runtime Hmap struct. A map is +// not seen by the runtime as a pointer though, so we use unsafe to get +// the maps' pointer values to compare. +func same(a, b Nodes) bool { + return *(*uintptr)(unsafe.Pointer(&a)) == *(*uintptr)(unsafe.Pointer(&b)) +} diff --git a/graph/internal/set/same_appengine.go b/graph/internal/set/same_appengine.go new file mode 100644 index 00000000..53780411 --- /dev/null +++ b/graph/internal/set/same_appengine.go @@ -0,0 +1,18 @@ +// Copyright ©2014 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//+build appengine + +package set + +import "reflect" + +// same determines whether two sets are backed by the same store. In the +// current implementation using hash maps it makes use of the fact that +// hash maps are passed as a pointer to a runtime Hmap struct. A map is +// not seen by the runtime as a pointer though, so we use reflect to get +// the maps' pointer values to compare. +func same(a, b Nodes) bool { + return reflect.ValueOf(a).Pointer() == reflect.ValueOf(b).Pointer() +} diff --git a/graph/internal/set/set.go b/graph/internal/set/set.go new file mode 100644 index 00000000..f2a01ea4 --- /dev/null +++ b/graph/internal/set/set.go @@ -0,0 +1,190 @@ +// Copyright ©2014 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package set provides integer and graph.Node sets. +package set + +import "github.com/gonum/graph" + +// Ints is a set of integer identifiers. +type Ints map[int]struct{} + +// The simple accessor methods for Ints are provided to allow ease of +// implementation change should the need arise. + +// Add inserts an element into the set. +func (s Ints) Add(e int) { + s[e] = struct{}{} +} + +// Has reports the existence of the element in the set. +func (s Ints) Has(e int) bool { + _, ok := s[e] + return ok +} + +// Remove deletes the specified element from the set. +func (s Ints) Remove(e int) { + delete(s, e) +} + +// Count reports the number of elements stored in the set. +func (s Ints) Count() int { + return len(s) +} + +// Nodes is a set of nodes keyed in their integer identifiers. +type Nodes map[int]graph.Node + +// The simple accessor methods for Nodes are provided to allow ease of +// implementation change should the need arise. + +// Add inserts an element into the set. +func (s Nodes) Add(n graph.Node) { + s[n.ID()] = n +} + +// Remove deletes the specified element from the set. +func (s Nodes) Remove(e graph.Node) { + delete(s, e.ID()) +} + +// Has reports the existence of the element in the set. +func (s Nodes) Has(n graph.Node) bool { + _, ok := s[n.ID()] + return ok +} + +// clear clears the set, possibly using the same backing store. +func (s *Nodes) clear() { + if len(*s) != 0 { + *s = make(Nodes) + } +} + +// Copy performs a perfect copy from src to dst (meaning the sets will +// be equal). +func (dst Nodes) Copy(src Nodes) Nodes { + if same(src, dst) { + return dst + } + + if len(dst) > 0 { + dst = make(Nodes, len(src)) + } + + for e, n := range src { + dst[e] = n + } + + return dst +} + +// Equal reports set equality between the parameters. Sets are equal if +// and only if they have the same elements. +func Equal(a, b Nodes) bool { + if same(a, b) { + return true + } + + if len(a) != len(b) { + return false + } + + for e := range a { + if _, ok := b[e]; !ok { + return false + } + } + + return true +} + +// Union takes the union of a and b, and stores it in dst. +// +// The union of two sets, a and b, is the set containing all the +// elements of each, for instance: +// +// {a,b,c} UNION {d,e,f} = {a,b,c,d,e,f} +// +// Since sets may not have repetition, unions of two sets that overlap +// do not contain repeat elements, that is: +// +// {a,b,c} UNION {b,c,d} = {a,b,c,d} +// +func (dst Nodes) Union(a, b Nodes) Nodes { + if same(a, b) { + return dst.Copy(a) + } + + if !same(a, dst) && !same(b, dst) { + dst.clear() + } + + if !same(dst, a) { + for e, n := range a { + dst[e] = n + } + } + + if !same(dst, b) { + for e, n := range b { + dst[e] = n + } + } + + return dst +} + +// Intersect takes the intersection of a and b, and stores it in dst. +// +// The intersection of two sets, a and b, is the set containing all +// the elements shared between the two sets, for instance: +// +// {a,b,c} INTERSECT {b,c,d} = {b,c} +// +// The intersection between a set and itself is itself, and thus +// effectively a copy operation: +// +// {a,b,c} INTERSECT {a,b,c} = {a,b,c} +// +// The intersection between two sets that share no elements is the empty +// set: +// +// {a,b,c} INTERSECT {d,e,f} = {} +// +func (dst Nodes) Intersect(a, b Nodes) Nodes { + var swap Nodes + + if same(a, b) { + return dst.Copy(a) + } + if same(a, dst) { + swap = b + } else if same(b, dst) { + swap = a + } else { + dst.clear() + + if len(a) > len(b) { + a, b = b, a + } + + for e, n := range a { + if _, ok := b[e]; ok { + dst[e] = n + } + } + + return dst + } + + for e := range dst { + if _, ok := swap[e]; !ok { + delete(dst, e) + } + } + + return dst +} diff --git a/graph/internal/set/set_test.go b/graph/internal/set/set_test.go new file mode 100644 index 00000000..bc8de46f --- /dev/null +++ b/graph/internal/set/set_test.go @@ -0,0 +1,413 @@ +// Copyright ©2014 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package set + +import "testing" + +type node int + +func (n node) ID() int { return int(n) } + +// count reports the number of elements stored in the node set. +func (s Nodes) count() int { + return len(s) +} + +// TestSame tests the assumption that pointer equality via unsafe conversion +// of a map[int]struct{} to uintptr is a valid test for perfect identity between +// set values. If any of the tests in TestSame fail, the package is broken and same +// must be reimplemented to conform to the runtime map implementation. The relevant +// code to look at (at least for gc) is in runtime/hashmap.{h,goc}. +func TestSame(t *testing.T) { + var ( + a = make(Nodes) + b = make(Nodes) + c = a + ) + + if same(a, b) { + t.Error("Independently created sets test as same") + } + if !same(a, c) { + t.Error("Set copy and original test as not same.") + } + a.Add(node(1)) + if !same(a, c) { + t.Error("Set copy and original test as not same after addition.") + } + if !same(nil, nil) { + t.Error("nil sets test as not same.") + } + if same(b, nil) { + t.Error("nil and empty sets test as same.") + } +} + +func TestAdd(t *testing.T) { + s := make(Nodes) + if s == nil { + t.Fatal("Set cannot be created successfully") + } + + if s.count() != 0 { + t.Error("Set somehow contains new elements upon creation") + } + + s.Add(node(1)) + s.Add(node(3)) + s.Add(node(5)) + + if s.count() != 3 { + t.Error("Incorrect number of set elements after adding") + } + + if !s.Has(node(1)) || !s.Has(node(3)) || !s.Has(node(5)) { + t.Error("Set doesn't contain element that was added") + } + + s.Add(node(1)) + + if s.count() > 3 { + t.Error("Set double-adds element (element not unique)") + } else if s.count() < 3 { + t.Error("Set double-add lowered len") + } + + if !s.Has(node(1)) { + t.Error("Set doesn't contain double-added element") + } + + if !s.Has(node(3)) || !s.Has(node(5)) { + t.Error("Set removes element on double-add") + } + + for e, n := range s { + if e != n.ID() { + t.Error("Element ID did not match key: %d != %d", e, n.ID()) + } + } +} + +func TestRemove(t *testing.T) { + s := make(Nodes) + + s.Add(node(1)) + s.Add(node(3)) + s.Add(node(5)) + + s.Remove(node(1)) + + if s.count() != 2 { + t.Error("Incorrect number of set elements after removing an element") + } + + if s.Has(node(1)) { + t.Error("Element present after removal") + } + + if !s.Has(node(3)) || !s.Has(node(5)) { + t.Error("Set remove removed wrong element") + } + + s.Remove(node(1)) + + if s.count() != 2 || s.Has(node(1)) { + t.Error("Double set remove does something strange") + } + + s.Add(node(1)) + + if s.count() != 3 || !s.Has(node(1)) { + t.Error("Cannot add element after removal") + } +} + +func TestClear(t *testing.T) { + s := make(Nodes) + + s.Add(node(8)) + s.Add(node(9)) + s.Add(node(10)) + + s.clear() + + if s.count() != 0 { + t.Error("clear did not properly reset set to size 0") + } +} + +func TestSelfEqual(t *testing.T) { + s := make(Nodes) + + if !Equal(s, s) { + t.Error("Set is not equal to itself") + } + + s.Add(node(1)) + + if !Equal(s, s) { + t.Error("Set ceases self equality after adding element") + } +} + +func TestEqual(t *testing.T) { + a := make(Nodes) + b := make(Nodes) + + if !Equal(a, b) { + t.Error("Two different empty sets not equal") + } + + a.Add(node(1)) + if Equal(a, b) { + t.Error("Two different sets with different elements not equal") + } + + b.Add(node(1)) + if !Equal(a, b) { + t.Error("Two sets with same element not equal") + } +} + +func TestCopy(t *testing.T) { + a := make(Nodes) + b := make(Nodes) + + a.Add(node(1)) + a.Add(node(2)) + a.Add(node(3)) + + b.Copy(a) + + if !Equal(a, b) { + t.Fatalf("Two sets not equal after copy") + } + + b.Remove(node(1)) + + if Equal(a, b) { + t.Errorf("Mutating one set mutated another after copy") + } +} + +func TestSelfCopy(t *testing.T) { + a := make(Nodes) + + a.Add(node(1)) + a.Add(node(2)) + + a.Copy(a) + + if a.count() != 2 { + t.Error("Something strange happened when copying into self") + } +} + +func TestUnionSame(t *testing.T) { + a := make(Nodes) + b := make(Nodes) + c := make(Nodes) + + a.Add(node(1)) + a.Add(node(2)) + + b.Add(node(1)) + b.Add(node(2)) + + c.Union(a, b) + + if c.count() != 2 { + t.Error("Union of same sets yields set with wrong len") + } + + if !c.Has(node(1)) || !c.Has(node(2)) { + t.Error("Union of same sets yields wrong elements") + } + + for i, s := range []Nodes{a, b, c} { + for e, n := range s { + if e != n.ID() { + t.Error("Element ID did not match key in s%d: %d != %d", i+1, e, n.ID()) + } + } + } +} + +func TestUnionDiff(t *testing.T) { + a := make(Nodes) + b := make(Nodes) + c := make(Nodes) + + a.Add(node(1)) + a.Add(node(2)) + + b.Add(node(3)) + + c.Union(a, b) + + if c.count() != 3 { + t.Error("Union of different sets yields set with wrong len") + } + + if !c.Has(node(1)) || !c.Has(node(2)) || !c.Has(node(3)) { + t.Error("Union of different sets yields set with wrong elements") + } + + if a.Has(node(3)) || !a.Has(node(2)) || !a.Has(node(1)) || a.count() != 2 { + t.Error("Union of sets mutates non-destination set (argument 1)") + } + + if !b.Has(node(3)) || b.Has(node(1)) || b.Has(node(2)) || b.count() != 1 { + t.Error("Union of sets mutates non-destination set (argument 2)") + } + + for i, s := range []Nodes{a, b, c} { + for e, n := range s { + if e != n.ID() { + t.Error("Element ID did not match key in s%d: %d != %d", i+1, e, n.ID()) + } + } + } +} + +func TestUnionOverlapping(t *testing.T) { + a := make(Nodes) + b := make(Nodes) + c := make(Nodes) + + a.Add(node(1)) + a.Add(node(2)) + + b.Add(node(2)) + b.Add(node(3)) + + c.Union(a, b) + + if c.count() != 3 { + t.Error("Union of overlapping sets yields set with wrong len") + } + + if !c.Has(node(1)) || !c.Has(node(2)) || !c.Has(node(3)) { + t.Error("Union of overlapping sets yields set with wrong elements") + } + + if a.Has(node(3)) || !a.Has(node(2)) || !a.Has(node(1)) || a.count() != 2 { + t.Error("Union of sets mutates non-destination set (argument 1)") + } + + if !b.Has(node(3)) || b.Has(node(1)) || !b.Has(node(2)) || b.count() != 2 { + t.Error("Union of sets mutates non-destination set (argument 2)") + } + + for i, s := range []Nodes{a, b, c} { + for e, n := range s { + if e != n.ID() { + t.Error("Element ID did not match key in s%d: %d != %d", i+1, e, n.ID()) + } + } + } +} + +func TestIntersectSame(t *testing.T) { + a := make(Nodes) + b := make(Nodes) + c := make(Nodes) + + a.Add(node(2)) + a.Add(node(3)) + + b.Add(node(2)) + b.Add(node(3)) + + c.Intersect(a, b) + + if card := c.count(); card != 2 { + t.Errorf("Intersection of identical sets yields set of wrong len %d", card) + } + + if !c.Has(node(2)) || !c.Has(node(3)) { + t.Error("Intersection of identical sets yields set of wrong elements") + } + + for i, s := range []Nodes{a, b, c} { + for e, n := range s { + if e != n.ID() { + t.Error("Element ID did not match key in s%d: %d != %d", i+1, e, n.ID()) + } + } + } +} + +func TestIntersectDiff(t *testing.T) { + a := make(Nodes) + b := make(Nodes) + c := make(Nodes) + + a.Add(node(2)) + a.Add(node(3)) + + b.Add(node(1)) + b.Add(node(4)) + + c.Intersect(a, b) + + if card := c.count(); card != 0 { + t.Errorf("Intersection of different yields non-empty set %d", card) + } + + if !a.Has(node(2)) || !a.Has(node(3)) || a.Has(node(1)) || a.Has(node(4)) || a.count() != 2 { + t.Error("Intersection of sets mutates non-destination set (argument 1)") + } + + if b.Has(node(2)) || b.Has(node(3)) || !b.Has(node(1)) || !b.Has(node(4)) || b.count() != 2 { + t.Error("Intersection of sets mutates non-destination set (argument 1)") + } + + for i, s := range []Nodes{a, b, c} { + for e, n := range s { + if e != n.ID() { + t.Error("Element ID did not match key in s%d: %d != %d", i+1, e, n.ID()) + } + } + } +} + +func TestIntersectOverlapping(t *testing.T) { + a := make(Nodes) + b := make(Nodes) + c := make(Nodes) + + a.Add(node(2)) + a.Add(node(3)) + + b.Add(node(3)) + b.Add(node(4)) + + c.Intersect(a, b) + + if card := c.count(); card != 1 { + t.Errorf("Intersection of overlapping sets yields set of incorrect len %d", card) + } + + if !c.Has(node(3)) { + t.Errorf("Intersection of overlapping sets yields set with wrong element") + } + + if !a.Has(node(2)) || !a.Has(node(3)) || a.Has(node(4)) || a.count() != 2 { + t.Error("Intersection of sets mutates non-destination set (argument 1)") + } + + if b.Has(node(2)) || !b.Has(node(3)) || !b.Has(node(4)) || b.count() != 2 { + t.Error("Intersection of sets mutates non-destination set (argument 1)") + } + + for i, s := range []Nodes{a, b, c} { + for e, n := range s { + if e != n.ID() { + t.Error("Element ID did not match key in s%d: %d != %d", i+1, e, n.ID()) + } + } + } +} diff --git a/graph/network/betweenness.go b/graph/network/betweenness.go new file mode 100644 index 00000000..c5154c9c --- /dev/null +++ b/graph/network/betweenness.go @@ -0,0 +1,256 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package network + +import ( + "math" + + "github.com/gonum/graph" + "github.com/gonum/graph/internal/linear" + "github.com/gonum/graph/path" +) + +// Betweenness returns the non-zero betweenness centrality for nodes in the unweighted graph g. +// +// C_B(v) = \sum_{s ≠ v ≠ t ∈ V} (\sigma_{st}(v) / \sigma_{st}) +// +// where \sigma_{st} and \sigma_{st}(v) are the number of shortest paths from s to t, +// and the subset of those paths containing v respectively. +func Betweenness(g graph.Graph) map[int]float64 { + // Brandes' algorithm for finding betweenness centrality for nodes in + // and unweighted graph: + // + // http://www.inf.uni-konstanz.de/algo/publications/b-fabc-01.pdf + + // TODO(kortschak): Consider using the parallel algorithm when + // GOMAXPROCS != 1. + // + // http://htor.inf.ethz.ch/publications/img/edmonds-hoefler-lumsdaine-bc.pdf + + // Also note special case for sparse networks: + // http://wwwold.iit.cnr.it/staff/marco.pellegrini/papiri/asonam-final.pdf + + cb := make(map[int]float64) + brandes(g, func(s graph.Node, stack linear.NodeStack, p map[int][]graph.Node, delta, sigma map[int]float64) { + for stack.Len() != 0 { + w := stack.Pop() + for _, v := range p[w.ID()] { + delta[v.ID()] += sigma[v.ID()] / sigma[w.ID()] * (1 + delta[w.ID()]) + } + if w.ID() != s.ID() { + if d := delta[w.ID()]; d != 0 { + cb[w.ID()] += d + } + } + } + }) + return cb +} + +// EdgeBetweenness returns the non-zero betweenness centrality for edges in the +// unweighted graph g. For an edge e the centrality C_B is computed as +// +// C_B(e) = \sum_{s ≠ t ∈ V} (\sigma_{st}(e) / \sigma_{st}), +// +// where \sigma_{st} and \sigma_{st}(e) are the number of shortest paths from s +// to t, and the subset of those paths containing e, respectively. +// +// If g is undirected, edges are retained such that u.ID < v.ID where u and v are +// the nodes of e. +func EdgeBetweenness(g graph.Graph) map[[2]int]float64 { + // Modified from Brandes' original algorithm as described in Algorithm 7 + // with the exception that node betweenness is not calculated: + // + // http://algo.uni-konstanz.de/publications/b-vspbc-08.pdf + + _, isUndirected := g.(graph.Undirected) + cb := make(map[[2]int]float64) + brandes(g, func(s graph.Node, stack linear.NodeStack, p map[int][]graph.Node, delta, sigma map[int]float64) { + for stack.Len() != 0 { + w := stack.Pop() + for _, v := range p[w.ID()] { + c := sigma[v.ID()] / sigma[w.ID()] * (1 + delta[w.ID()]) + vid := v.ID() + wid := w.ID() + if isUndirected && wid < vid { + vid, wid = wid, vid + } + cb[[2]int{vid, wid}] += c + delta[v.ID()] += c + } + } + }) + return cb +} + +// brandes is the common code for Betweenness and EdgeBetweenness. It corresponds +// to algorithm 1 in http://algo.uni-konstanz.de/publications/b-vspbc-08.pdf with +// the accumulation loop provided by the accumulate closure. +func brandes(g graph.Graph, accumulate func(s graph.Node, stack linear.NodeStack, p map[int][]graph.Node, delta, sigma map[int]float64)) { + var ( + nodes = g.Nodes() + stack linear.NodeStack + p = make(map[int][]graph.Node, len(nodes)) + sigma = make(map[int]float64, len(nodes)) + d = make(map[int]int, len(nodes)) + delta = make(map[int]float64, len(nodes)) + queue linear.NodeQueue + ) + for _, s := range nodes { + stack = stack[:0] + + for _, w := range nodes { + p[w.ID()] = p[w.ID()][:0] + } + + for _, t := range nodes { + sigma[t.ID()] = 0 + d[t.ID()] = -1 + } + sigma[s.ID()] = 1 + d[s.ID()] = 0 + + queue.Enqueue(s) + for queue.Len() != 0 { + v := queue.Dequeue() + stack.Push(v) + for _, w := range g.From(v) { + // w found for the first time? + if d[w.ID()] < 0 { + queue.Enqueue(w) + d[w.ID()] = d[v.ID()] + 1 + } + // shortest path to w via v? + if d[w.ID()] == d[v.ID()]+1 { + sigma[w.ID()] += sigma[v.ID()] + p[w.ID()] = append(p[w.ID()], v) + } + } + } + + for _, v := range nodes { + delta[v.ID()] = 0 + } + + // S returns vertices in order of non-increasing distance from s + accumulate(s, stack, p, delta, sigma) + } +} + +// WeightedGraph is a graph with edge weights. +type WeightedGraph interface { + graph.Graph + graph.Weighter +} + +// BetweennessWeighted returns the non-zero betweenness centrality for nodes in the weighted +// graph g used to construct the given shortest paths. +// +// C_B(v) = \sum_{s ≠ v ≠ t ∈ V} (\sigma_{st}(v) / \sigma_{st}) +// +// where \sigma_{st} and \sigma_{st}(v) are the number of shortest paths from s to t, +// and the subset of those paths containing v respectively. +func BetweennessWeighted(g WeightedGraph, p path.AllShortest) map[int]float64 { + cb := make(map[int]float64) + + nodes := g.Nodes() + for i, s := range nodes { + for j, t := range nodes { + if i == j { + continue + } + d := p.Weight(s, t) + if math.IsInf(d, 0) { + continue + } + + // If we have a unique path, don't do the + // extra work needed to get all paths. + path, _, unique := p.Between(s, t) + if unique { + for _, v := range path[1 : len(path)-1] { + // For undirected graphs we double count + // passage though nodes. This is consistent + // with Brandes' algorithm's behaviour. + cb[v.ID()]++ + } + continue + } + + // Otherwise iterate over all paths. + paths, _ := p.AllBetween(s, t) + stFrac := 1 / float64(len(paths)) + for _, path := range paths { + for _, v := range path[1 : len(path)-1] { + cb[v.ID()] += stFrac + } + } + } + } + + return cb +} + +// EdgeBetweennessWeighted returns the non-zero betweenness centrality for edges in +// the weighted graph g. For an edge e the centrality C_B is computed as +// +// C_B(e) = \sum_{s ≠ t ∈ V} (\sigma_{st}(e) / \sigma_{st}), +// +// where \sigma_{st} and \sigma_{st}(e) are the number of shortest paths from s +// to t, and the subset of those paths containing e, respectively. +// +// If g is undirected, edges are retained such that u.ID < v.ID where u and v are +// the nodes of e. +func EdgeBetweennessWeighted(g WeightedGraph, p path.AllShortest) map[[2]int]float64 { + cb := make(map[[2]int]float64) + + _, isUndirected := g.(graph.Undirected) + nodes := g.Nodes() + for i, s := range nodes { + for j, t := range nodes { + if i == j { + continue + } + d := p.Weight(s, t) + if math.IsInf(d, 0) { + continue + } + + // If we have a unique path, don't do the + // extra work needed to get all paths. + path, _, unique := p.Between(s, t) + if unique { + for k, v := range path[1:] { + // For undirected graphs we double count + // passage though edges. This is consistent + // with Brandes' algorithm's behaviour. + uid := path[k].ID() + vid := v.ID() + if isUndirected && vid < uid { + uid, vid = vid, uid + } + cb[[2]int{uid, vid}]++ + } + continue + } + + // Otherwise iterate over all paths. + paths, _ := p.AllBetween(s, t) + stFrac := 1 / float64(len(paths)) + for _, path := range paths { + for k, v := range path[1:] { + uid := path[k].ID() + vid := v.ID() + if isUndirected && vid < uid { + uid, vid = vid, uid + } + cb[[2]int{uid, vid}] += stFrac + } + } + } + } + + return cb +} diff --git a/graph/network/betweenness_test.go b/graph/network/betweenness_test.go new file mode 100644 index 00000000..0c5d6db1 --- /dev/null +++ b/graph/network/betweenness_test.go @@ -0,0 +1,340 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package network + +import ( + "fmt" + "math" + "sort" + "testing" + + "github.com/gonum/floats" + "github.com/gonum/graph/path" + "github.com/gonum/graph/simple" +) + +var betweennessTests = []struct { + g []set + + wantTol float64 + want map[int]float64 + wantEdges map[[2]int]float64 +}{ + { + // Example graph from http://en.wikipedia.org/wiki/File:PageRanks-Example.svg 16:17, 8 July 2009 + g: []set{ + A: nil, + B: linksTo(C), + C: linksTo(B), + D: linksTo(A, B), + E: linksTo(D, B, F), + F: linksTo(B, E), + G: linksTo(B, E), + H: linksTo(B, E), + I: linksTo(B, E), + J: linksTo(E), + K: linksTo(E), + }, + + wantTol: 1e-1, + want: map[int]float64{ + B: 32, + D: 18, + E: 48, + }, + wantEdges: map[[2]int]float64{ + [2]int{A, D}: 20, + [2]int{B, C}: 20, + [2]int{B, D}: 16, + [2]int{B, E}: 12, + [2]int{B, F}: 9, + [2]int{B, G}: 9, + [2]int{B, H}: 9, + [2]int{B, I}: 9, + [2]int{D, E}: 20, + [2]int{E, F}: 11, + [2]int{E, G}: 11, + [2]int{E, H}: 11, + [2]int{E, I}: 11, + [2]int{E, J}: 20, + [2]int{E, K}: 20, + }, + }, + { + // Example graph from http://en.wikipedia.org/w/index.php?title=PageRank&oldid=659286279#Power_Method + g: []set{ + A: linksTo(B, C), + B: linksTo(D), + C: linksTo(D, E), + D: linksTo(E), + E: linksTo(A), + }, + + wantTol: 1e-3, + want: map[int]float64{ + A: 2, + B: 0.6667, + C: 0.6667, + D: 2, + E: 0.6667, + }, + wantEdges: map[[2]int]float64{ + [2]int{A, B}: 2 + 2/3. + 4/2., + [2]int{A, C}: 2 + 2/3. + 2/2., + [2]int{A, E}: 2 + 2/3. + 2/2., + [2]int{B, D}: 2 + 2/3. + 4/2., + [2]int{C, D}: 2 + 2/3. + 2/2., + [2]int{C, E}: 2, + [2]int{D, E}: 2 + 2/3. + 2/2., + }, + }, + { + g: []set{ + A: linksTo(B), + B: linksTo(C), + C: nil, + }, + + wantTol: 1e-3, + want: map[int]float64{ + B: 2, + }, + wantEdges: map[[2]int]float64{ + [2]int{A, B}: 4, + [2]int{B, C}: 4, + }, + }, + { + g: []set{ + A: linksTo(B), + B: linksTo(C), + C: linksTo(D), + D: linksTo(E), + E: nil, + }, + + wantTol: 1e-3, + want: map[int]float64{ + B: 6, + C: 8, + D: 6, + }, + wantEdges: map[[2]int]float64{ + [2]int{A, B}: 8, + [2]int{B, C}: 12, + [2]int{C, D}: 12, + [2]int{D, E}: 8, + }, + }, + { + g: []set{ + A: linksTo(C), + B: linksTo(C), + C: nil, + D: linksTo(C), + E: linksTo(C), + }, + + wantTol: 1e-3, + want: map[int]float64{ + C: 12, + }, + wantEdges: map[[2]int]float64{ + [2]int{A, C}: 8, + [2]int{B, C}: 8, + [2]int{C, D}: 8, + [2]int{C, E}: 8, + }, + }, + { + g: []set{ + A: linksTo(B, C, D, E), + B: linksTo(C, D, E), + C: linksTo(D, E), + D: linksTo(E), + E: nil, + }, + + wantTol: 1e-3, + want: map[int]float64{}, + wantEdges: map[[2]int]float64{ + [2]int{A, B}: 2, + [2]int{A, C}: 2, + [2]int{A, D}: 2, + [2]int{A, E}: 2, + [2]int{B, C}: 2, + [2]int{B, D}: 2, + [2]int{B, E}: 2, + [2]int{C, D}: 2, + [2]int{C, E}: 2, + [2]int{D, E}: 2, + }, + }, +} + +func TestBetweenness(t *testing.T) { + for i, test := range betweennessTests { + g := simple.NewUndirectedGraph(0, math.Inf(1)) + for u, e := range test.g { + // Add nodes that are not defined by an edge. + if !g.Has(simple.Node(u)) { + g.AddNode(simple.Node(u)) + } + for v := range e { + // Weight omitted to show weight-independence. + g.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v), W: 0}) + } + } + got := Betweenness(g) + prec := 1 - int(math.Log10(test.wantTol)) + for n := range test.g { + wantN, gotOK := got[n] + gotN, wantOK := test.want[n] + if gotOK != wantOK { + t.Errorf("unexpected betweenness result for test %d, node %c", i, n+'A') + } + if !floats.EqualWithinAbsOrRel(gotN, wantN, test.wantTol, test.wantTol) { + t.Errorf("unexpected betweenness result for test %d:\ngot: %v\nwant:%v", + i, orderedFloats(got, prec), orderedFloats(test.want, prec)) + break + } + } + } +} + +func TestEdgeBetweenness(t *testing.T) { + for i, test := range betweennessTests { + g := simple.NewUndirectedGraph(0, math.Inf(1)) + for u, e := range test.g { + // Add nodes that are not defined by an edge. + if !g.Has(simple.Node(u)) { + g.AddNode(simple.Node(u)) + } + for v := range e { + // Weight omitted to show weight-independence. + g.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v), W: 0}) + } + } + got := EdgeBetweenness(g) + prec := 1 - int(math.Log10(test.wantTol)) + outer: + for u := range test.g { + for v := range test.g { + wantQ, gotOK := got[[2]int{u, v}] + gotQ, wantOK := test.wantEdges[[2]int{u, v}] + if gotOK != wantOK { + t.Errorf("unexpected betweenness result for test %d, edge (%c,%c)", i, u+'A', v+'A') + } + if !floats.EqualWithinAbsOrRel(gotQ, wantQ, test.wantTol, test.wantTol) { + t.Errorf("unexpected betweenness result for test %d:\ngot: %v\nwant:%v", + i, orderedPairFloats(got, prec), orderedPairFloats(test.wantEdges, prec)) + break outer + } + } + } + } +} + +func TestBetweennessWeighted(t *testing.T) { + for i, test := range betweennessTests { + g := simple.NewUndirectedGraph(0, math.Inf(1)) + for u, e := range test.g { + // Add nodes that are not defined by an edge. + if !g.Has(simple.Node(u)) { + g.AddNode(simple.Node(u)) + } + for v := range e { + g.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v), W: 1}) + } + } + + p, ok := path.FloydWarshall(g) + if !ok { + t.Errorf("unexpected negative cycle in test %d", i) + continue + } + + got := BetweennessWeighted(g, p) + prec := 1 - int(math.Log10(test.wantTol)) + for n := range test.g { + gotN, gotOK := got[n] + wantN, wantOK := test.want[n] + if gotOK != wantOK { + t.Errorf("unexpected betweenness existence for test %d, node %c", i, n+'A') + } + if !floats.EqualWithinAbsOrRel(gotN, wantN, test.wantTol, test.wantTol) { + t.Errorf("unexpected betweenness result for test %d:\ngot: %v\nwant:%v", + i, orderedFloats(got, prec), orderedFloats(test.want, prec)) + break + } + } + } +} + +func TestEdgeBetweennessWeighted(t *testing.T) { + for i, test := range betweennessTests { + g := simple.NewUndirectedGraph(0, math.Inf(1)) + for u, e := range test.g { + // Add nodes that are not defined by an edge. + if !g.Has(simple.Node(u)) { + g.AddNode(simple.Node(u)) + } + for v := range e { + g.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v), W: 1}) + } + } + + p, ok := path.FloydWarshall(g) + if !ok { + t.Errorf("unexpected negative cycle in test %d", i) + continue + } + + got := EdgeBetweennessWeighted(g, p) + prec := 1 - int(math.Log10(test.wantTol)) + outer: + for u := range test.g { + for v := range test.g { + wantQ, gotOK := got[[2]int{u, v}] + gotQ, wantOK := test.wantEdges[[2]int{u, v}] + if gotOK != wantOK { + t.Errorf("unexpected betweenness result for test %d, edge (%c,%c)", i, u+'A', v+'A') + } + if !floats.EqualWithinAbsOrRel(gotQ, wantQ, test.wantTol, test.wantTol) { + t.Errorf("unexpected betweenness result for test %d:\ngot: %v\nwant:%v", + i, orderedPairFloats(got, prec), orderedPairFloats(test.wantEdges, prec)) + break outer + } + } + } + } +} + +func orderedPairFloats(w map[[2]int]float64, prec int) []pairKeyFloatVal { + o := make(orderedPairFloatsMap, 0, len(w)) + for k, v := range w { + o = append(o, pairKeyFloatVal{prec: prec, key: k, val: v}) + } + sort.Sort(o) + return o +} + +type pairKeyFloatVal struct { + prec int + key [2]int + val float64 +} + +func (kv pairKeyFloatVal) String() string { + return fmt.Sprintf("(%c,%c):%.*f", kv.key[0]+'A', kv.key[1]+'A', kv.prec, kv.val) +} + +type orderedPairFloatsMap []pairKeyFloatVal + +func (o orderedPairFloatsMap) Len() int { return len(o) } +func (o orderedPairFloatsMap) Less(i, j int) bool { + return o[i].key[0] < o[j].key[0] || (o[i].key[0] == o[j].key[0] && o[i].key[1] < o[j].key[1]) +} +func (o orderedPairFloatsMap) Swap(i, j int) { o[i], o[j] = o[j], o[i] } diff --git a/graph/network/distance.go b/graph/network/distance.go new file mode 100644 index 00000000..63551a35 --- /dev/null +++ b/graph/network/distance.go @@ -0,0 +1,124 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package network + +import ( + "math" + + "github.com/gonum/graph" + "github.com/gonum/graph/path" +) + +// Closeness returns the closeness centrality for nodes in the graph g used to +// construct the given shortest paths. +// +// C(v) = 1 / \sum_u d(u,v) +// +// For directed graphs the incoming paths are used. Infinite distances are +// not considered. +func Closeness(g graph.Graph, p path.AllShortest) map[int]float64 { + nodes := g.Nodes() + c := make(map[int]float64, len(nodes)) + for _, u := range nodes { + var sum float64 + for _, v := range nodes { + // The ordering here is not relevant for + // undirected graphs, but we make sure we + // are counting incoming paths. + d := p.Weight(v, u) + if math.IsInf(d, 0) { + continue + } + sum += d + } + c[u.ID()] = 1 / sum + } + return c +} + +// Farness returns the farness for nodes in the graph g used to construct +// the given shortest paths. +// +// F(v) = \sum_u d(u,v) +// +// For directed graphs the incoming paths are used. Infinite distances are +// not considered. +func Farness(g graph.Graph, p path.AllShortest) map[int]float64 { + nodes := g.Nodes() + f := make(map[int]float64, len(nodes)) + for _, u := range nodes { + var sum float64 + for _, v := range nodes { + // The ordering here is not relevant for + // undirected graphs, but we make sure we + // are counting incoming paths. + d := p.Weight(v, u) + if math.IsInf(d, 0) { + continue + } + sum += d + } + f[u.ID()] = sum + } + return f +} + +// Harmonic returns the harmonic centrality for nodes in the graph g used to +// construct the given shortest paths. +// +// H(v)= \sum_{u ≠ v} 1 / d(u,v) +// +// For directed graphs the incoming paths are used. Infinite distances are +// not considered. +func Harmonic(g graph.Graph, p path.AllShortest) map[int]float64 { + nodes := g.Nodes() + h := make(map[int]float64, len(nodes)) + for i, u := range nodes { + var sum float64 + for j, v := range nodes { + // The ordering here is not relevant for + // undirected graphs, but we make sure we + // are counting incoming paths. + d := p.Weight(v, u) + if math.IsInf(d, 0) { + continue + } + if i != j { + sum += 1 / d + } + } + h[u.ID()] = sum + } + return h +} + +// Residual returns the Dangalchev's residual closeness for nodes in the graph +// g used to construct the given shortest paths. +// +// C(v)= \sum_{u ≠ v} 1 / 2^d(u,v) +// +// For directed graphs the incoming paths are used. Infinite distances are +// not considered. +func Residual(g graph.Graph, p path.AllShortest) map[int]float64 { + nodes := g.Nodes() + r := make(map[int]float64, len(nodes)) + for i, u := range nodes { + var sum float64 + for j, v := range nodes { + // The ordering here is not relevant for + // undirected graphs, but we make sure we + // are counting incoming paths. + d := p.Weight(v, u) + if math.IsInf(d, 0) { + continue + } + if i != j { + sum += math.Exp2(-d) + } + } + r[u.ID()] = sum + } + return r +} diff --git a/graph/network/distance_test.go b/graph/network/distance_test.go new file mode 100644 index 00000000..f06afbbc --- /dev/null +++ b/graph/network/distance_test.go @@ -0,0 +1,394 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package network + +import ( + "math" + "testing" + + "github.com/gonum/floats" + "github.com/gonum/graph/path" + "github.com/gonum/graph/simple" +) + +var undirectedCentralityTests = []struct { + g []set + + farness map[int]float64 + harmonic map[int]float64 + residual map[int]float64 +}{ + { + g: []set{ + A: linksTo(B), + B: linksTo(C), + C: nil, + }, + + farness: map[int]float64{ + A: 1 + 2, + B: 1 + 1, + C: 2 + 1, + }, + harmonic: map[int]float64{ + A: 1 + 1.0/2.0, + B: 1 + 1, + C: 1.0/2.0 + 1, + }, + residual: map[int]float64{ + A: 1/math.Exp2(1) + 1/math.Exp2(2), + B: 1/math.Exp2(1) + 1/math.Exp2(1), + C: 1/math.Exp2(2) + 1/math.Exp2(1), + }, + }, + { + g: []set{ + A: linksTo(B), + B: linksTo(C), + C: linksTo(D), + D: linksTo(E), + E: nil, + }, + + farness: map[int]float64{ + A: 1 + 2 + 3 + 4, + B: 1 + 1 + 2 + 3, + C: 2 + 1 + 1 + 2, + D: 3 + 2 + 1 + 1, + E: 4 + 3 + 2 + 1, + }, + harmonic: map[int]float64{ + A: 1 + 1.0/2.0 + 1.0/3.0 + 1.0/4.0, + B: 1 + 1 + 1.0/2.0 + 1.0/3.0, + C: 1.0/2.0 + 1 + 1 + 1.0/2.0, + D: 1.0/3.0 + 1.0/2.0 + 1 + 1, + E: 1.0/4.0 + 1.0/3.0 + 1.0/2.0 + 1, + }, + residual: map[int]float64{ + A: 1/math.Exp2(1) + 1/math.Exp2(2) + 1/math.Exp2(3) + 1/math.Exp2(4), + B: 1/math.Exp2(1) + 1/math.Exp2(1) + 1/math.Exp2(2) + 1/math.Exp2(3), + C: 1/math.Exp2(2) + 1/math.Exp2(1) + 1/math.Exp2(1) + 1/math.Exp2(2), + D: 1/math.Exp2(3) + 1/math.Exp2(2) + 1/math.Exp2(1) + 1/math.Exp2(1), + E: 1/math.Exp2(4) + 1/math.Exp2(3) + 1/math.Exp2(2) + 1/math.Exp2(1), + }, + }, + { + g: []set{ + A: linksTo(C), + B: linksTo(C), + C: nil, + D: linksTo(C), + E: linksTo(C), + }, + + farness: map[int]float64{ + A: 2 + 2 + 1 + 2, + B: 2 + 1 + 2 + 2, + C: 1 + 1 + 1 + 1, + D: 2 + 1 + 2 + 2, + E: 2 + 2 + 1 + 2, + }, + harmonic: map[int]float64{ + A: 1.0/2.0 + 1.0/2.0 + 1 + 1.0/2.0, + B: 1.0/2.0 + 1 + 1.0/2.0 + 1.0/2.0, + C: 1 + 1 + 1 + 1, + D: 1.0/2.0 + 1 + 1.0/2.0 + 1.0/2.0, + E: 1.0/2.0 + 1.0/2.0 + 1 + 1.0/2.0, + }, + residual: map[int]float64{ + A: 1/math.Exp2(2) + 1/math.Exp2(2) + 1/math.Exp2(1) + 1/math.Exp2(2), + B: 1/math.Exp2(2) + 1/math.Exp2(1) + 1/math.Exp2(2) + 1/math.Exp2(2), + C: 1/math.Exp2(1) + 1/math.Exp2(1) + 1/math.Exp2(1) + 1/math.Exp2(1), + D: 1/math.Exp2(2) + 1/math.Exp2(1) + 1/math.Exp2(2) + 1/math.Exp2(2), + E: 1/math.Exp2(2) + 1/math.Exp2(2) + 1/math.Exp2(1) + 1/math.Exp2(2), + }, + }, + { + g: []set{ + A: linksTo(B, C, D, E), + B: linksTo(C, D, E), + C: linksTo(D, E), + D: linksTo(E), + E: nil, + }, + + farness: map[int]float64{ + A: 1 + 1 + 1 + 1, + B: 1 + 1 + 1 + 1, + C: 1 + 1 + 1 + 1, + D: 1 + 1 + 1 + 1, + E: 1 + 1 + 1 + 1, + }, + harmonic: map[int]float64{ + A: 1 + 1 + 1 + 1, + B: 1 + 1 + 1 + 1, + C: 1 + 1 + 1 + 1, + D: 1 + 1 + 1 + 1, + E: 1 + 1 + 1 + 1, + }, + residual: map[int]float64{ + A: 1/math.Exp2(1) + 1/math.Exp2(1) + 1/math.Exp2(1) + 1/math.Exp2(1), + B: 1/math.Exp2(1) + 1/math.Exp2(1) + 1/math.Exp2(1) + 1/math.Exp2(1), + C: 1/math.Exp2(1) + 1/math.Exp2(1) + 1/math.Exp2(1) + 1/math.Exp2(1), + D: 1/math.Exp2(1) + 1/math.Exp2(1) + 1/math.Exp2(1) + 1/math.Exp2(1), + E: 1/math.Exp2(1) + 1/math.Exp2(1) + 1/math.Exp2(1) + 1/math.Exp2(1), + }, + }, +} + +func TestDistanceCentralityUndirected(t *testing.T) { + const tol = 1e-12 + prec := 1 - int(math.Log10(tol)) + + for i, test := range undirectedCentralityTests { + g := simple.NewUndirectedGraph(0, math.Inf(1)) + for u, e := range test.g { + // Add nodes that are not defined by an edge. + if !g.Has(simple.Node(u)) { + g.AddNode(simple.Node(u)) + } + for v := range e { + g.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v), W: 1}) + } + } + p, ok := path.FloydWarshall(g) + if !ok { + t.Errorf("unexpected negative cycle in test %d", i) + continue + } + + var got map[int]float64 + + got = Closeness(g, p) + for n := range test.g { + if !floats.EqualWithinAbsOrRel(got[n], 1/test.farness[n], tol, tol) { + want := make(map[int]float64) + for n, v := range test.farness { + want[n] = 1 / v + } + t.Errorf("unexpected closeness centrality for test %d:\ngot: %v\nwant:%v", + i, orderedFloats(got, prec), orderedFloats(want, prec)) + break + } + } + + got = Farness(g, p) + for n := range test.g { + if !floats.EqualWithinAbsOrRel(got[n], test.farness[n], tol, tol) { + t.Errorf("unexpected farness for test %d:\ngot: %v\nwant:%v", + i, orderedFloats(got, prec), orderedFloats(test.farness, prec)) + break + } + } + + got = Harmonic(g, p) + for n := range test.g { + if !floats.EqualWithinAbsOrRel(got[n], test.harmonic[n], tol, tol) { + t.Errorf("unexpected harmonic centrality for test %d:\ngot: %v\nwant:%v", + i, orderedFloats(got, prec), orderedFloats(test.harmonic, prec)) + break + } + } + + got = Residual(g, p) + for n := range test.g { + if !floats.EqualWithinAbsOrRel(got[n], test.residual[n], tol, tol) { + t.Errorf("unexpected residual closeness for test %d:\ngot: %v\nwant:%v", + i, orderedFloats(got, prec), orderedFloats(test.residual, prec)) + break + } + } + } +} + +var directedCentralityTests = []struct { + g []set + + farness map[int]float64 + harmonic map[int]float64 + residual map[int]float64 +}{ + { + g: []set{ + A: linksTo(B), + B: linksTo(C), + C: nil, + }, + + farness: map[int]float64{ + A: 0, + B: 1, + C: 2 + 1, + }, + harmonic: map[int]float64{ + A: 0, + B: 1, + C: 1.0/2.0 + 1, + }, + residual: map[int]float64{ + A: 0, + B: 1 / math.Exp2(1), + C: 1/math.Exp2(2) + 1/math.Exp2(1), + }, + }, + { + g: []set{ + A: linksTo(B), + B: linksTo(C), + C: linksTo(D), + D: linksTo(E), + E: nil, + }, + + farness: map[int]float64{ + A: 0, + B: 1, + C: 2 + 1, + D: 3 + 2 + 1, + E: 4 + 3 + 2 + 1, + }, + harmonic: map[int]float64{ + A: 0, + B: 1, + C: 1.0/2.0 + 1, + D: 1.0/3.0 + 1.0/2.0 + 1, + E: 1.0/4.0 + 1.0/3.0 + 1.0/2.0 + 1, + }, + residual: map[int]float64{ + A: 0, + B: 1 / math.Exp2(1), + C: 1/math.Exp2(2) + 1/math.Exp2(1), + D: 1/math.Exp2(3) + 1/math.Exp2(2) + 1/math.Exp2(1), + E: 1/math.Exp2(4) + 1/math.Exp2(3) + 1/math.Exp2(2) + 1/math.Exp2(1), + }, + }, + { + g: []set{ + A: linksTo(C), + B: linksTo(C), + C: nil, + D: linksTo(C), + E: linksTo(C), + }, + + farness: map[int]float64{ + A: 0, + B: 0, + C: 1 + 1 + 1 + 1, + D: 0, + E: 0, + }, + harmonic: map[int]float64{ + A: 0, + B: 0, + C: 1 + 1 + 1 + 1, + D: 0, + E: 0, + }, + residual: map[int]float64{ + A: 0, + B: 0, + C: 1/math.Exp2(1) + 1/math.Exp2(1) + 1/math.Exp2(1) + 1/math.Exp2(1), + D: 0, + E: 0, + }, + }, + { + g: []set{ + A: linksTo(B, C, D, E), + B: linksTo(C, D, E), + C: linksTo(D, E), + D: linksTo(E), + E: nil, + }, + + farness: map[int]float64{ + A: 0, + B: 1, + C: 1 + 1, + D: 1 + 1 + 1, + E: 1 + 1 + 1 + 1, + }, + harmonic: map[int]float64{ + A: 0, + B: 1, + C: 1 + 1, + D: 1 + 1 + 1, + E: 1 + 1 + 1 + 1, + }, + residual: map[int]float64{ + A: 0, + B: 1 / math.Exp2(1), + C: 1/math.Exp2(1) + 1/math.Exp2(1), + D: 1/math.Exp2(1) + 1/math.Exp2(1) + 1/math.Exp2(1), + E: 1/math.Exp2(1) + 1/math.Exp2(1) + 1/math.Exp2(1) + 1/math.Exp2(1), + }, + }, +} + +func TestDistanceCentralityDirected(t *testing.T) { + const tol = 1e-12 + prec := 1 - int(math.Log10(tol)) + + for i, test := range directedCentralityTests { + g := simple.NewDirectedGraph(0, math.Inf(1)) + for u, e := range test.g { + // Add nodes that are not defined by an edge. + if !g.Has(simple.Node(u)) { + g.AddNode(simple.Node(u)) + } + for v := range e { + g.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v), W: 1}) + } + } + p, ok := path.FloydWarshall(g) + if !ok { + t.Errorf("unexpected negative cycle in test %d", i) + continue + } + + var got map[int]float64 + + got = Closeness(g, p) + for n := range test.g { + if !floats.EqualWithinAbsOrRel(got[n], 1/test.farness[n], tol, tol) { + want := make(map[int]float64) + for n, v := range test.farness { + want[n] = 1 / v + } + t.Errorf("unexpected closeness centrality for test %d:\ngot: %v\nwant:%v", + i, orderedFloats(got, prec), orderedFloats(want, prec)) + break + } + } + + got = Farness(g, p) + for n := range test.g { + if !floats.EqualWithinAbsOrRel(got[n], test.farness[n], tol, tol) { + t.Errorf("unexpected farness for test %d:\ngot: %v\nwant:%v", + i, orderedFloats(got, prec), orderedFloats(test.farness, prec)) + break + } + } + + got = Harmonic(g, p) + for n := range test.g { + if !floats.EqualWithinAbsOrRel(got[n], test.harmonic[n], tol, tol) { + t.Errorf("unexpected harmonic centrality for test %d:\ngot: %v\nwant:%v", + i, orderedFloats(got, prec), orderedFloats(test.harmonic, prec)) + break + } + } + + got = Residual(g, p) + for n := range test.g { + if !floats.EqualWithinAbsOrRel(got[n], test.residual[n], tol, tol) { + t.Errorf("unexpected residual closeness for test %d:\ngot: %v\nwant:%v", + i, orderedFloats(got, prec), orderedFloats(test.residual, prec)) + break + } + } + } +} diff --git a/graph/network/hits.go b/graph/network/hits.go new file mode 100644 index 00000000..b66b1564 --- /dev/null +++ b/graph/network/hits.go @@ -0,0 +1,101 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package network + +import ( + "math" + + "github.com/gonum/floats" + "github.com/gonum/graph" +) + +// HubAuthority is a Hyperlink-Induced Topic Search hub-authority score pair. +type HubAuthority struct { + Hub float64 + Authority float64 +} + +// HITS returns the Hyperlink-Induced Topic Search hub-authority scores for +// nodes of the directed graph g. HITS terminates when the 2-norm of the +// vector difference between iterations is below tol. The returned map is +// keyed on the graph node IDs. +func HITS(g graph.Directed, tol float64) map[int]HubAuthority { + nodes := g.Nodes() + + // Make a topological copy of g with dense node IDs. + indexOf := make(map[int]int, len(nodes)) + for i, n := range nodes { + indexOf[n.ID()] = i + } + nodesLinkingTo := make([][]int, len(nodes)) + nodesLinkedFrom := make([][]int, len(nodes)) + for i, n := range nodes { + for _, u := range g.To(n) { + nodesLinkingTo[i] = append(nodesLinkingTo[i], indexOf[u.ID()]) + } + for _, v := range g.From(n) { + nodesLinkedFrom[i] = append(nodesLinkedFrom[i], indexOf[v.ID()]) + } + } + indexOf = nil + + w := make([]float64, 4*len(nodes)) + auth := w[:len(nodes)] + hub := w[len(nodes) : 2*len(nodes)] + for i := range nodes { + auth[i] = 1 + hub[i] = 1 + } + deltaAuth := w[2*len(nodes) : 3*len(nodes)] + deltaHub := w[3*len(nodes):] + + var norm float64 + for { + norm = 0 + for v := range nodes { + var a float64 + for _, u := range nodesLinkingTo[v] { + a += hub[u] + } + deltaAuth[v] = auth[v] + auth[v] = a + norm += a * a + } + norm = math.Sqrt(norm) + + for i := range auth { + auth[i] /= norm + deltaAuth[i] -= auth[i] + } + + norm = 0 + for u := range nodes { + var h float64 + for _, v := range nodesLinkedFrom[u] { + h += auth[v] + } + deltaHub[u] = hub[u] + hub[u] = h + norm += h * h + } + norm = math.Sqrt(norm) + + for i := range hub { + hub[i] /= norm + deltaHub[i] -= hub[i] + } + + if floats.Norm(deltaAuth, 2) < tol && floats.Norm(deltaHub, 2) < tol { + break + } + } + + hubAuth := make(map[int]HubAuthority, len(nodes)) + for i, n := range nodes { + hubAuth[n.ID()] = HubAuthority{Hub: hub[i], Authority: auth[i]} + } + + return hubAuth +} diff --git a/graph/network/hits_test.go b/graph/network/hits_test.go new file mode 100644 index 00000000..3c20ff3d --- /dev/null +++ b/graph/network/hits_test.go @@ -0,0 +1,98 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package network + +import ( + "fmt" + "math" + "sort" + "testing" + + "github.com/gonum/floats" + "github.com/gonum/graph/simple" +) + +var hitsTests = []struct { + g []set + tol float64 + + wantTol float64 + want map[int]HubAuthority +}{ + { + // Example graph from http://www.cis.hut.fi/Opinnot/T-61.6020/2008/pagerank_hits.pdf page 8. + g: []set{ + A: linksTo(B, C, D), + B: linksTo(C, D), + C: linksTo(B), + D: nil, + }, + tol: 1e-4, + + wantTol: 1e-4, + want: map[int]HubAuthority{ + A: {Hub: 0.7887, Authority: 0}, + B: {Hub: 0.5774, Authority: 0.4597}, + C: {Hub: 0.2113, Authority: 0.6280}, + D: {Hub: 0, Authority: 0.6280}, + }, + }, +} + +func TestHITS(t *testing.T) { + for i, test := range hitsTests { + g := simple.NewDirectedGraph(0, math.Inf(1)) + for u, e := range test.g { + // Add nodes that are not defined by an edge. + if !g.Has(simple.Node(u)) { + g.AddNode(simple.Node(u)) + } + for v := range e { + g.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v)}) + } + } + got := HITS(g, test.tol) + prec := 1 - int(math.Log10(test.wantTol)) + for n := range test.g { + if !floats.EqualWithinAbsOrRel(got[n].Hub, test.want[n].Hub, test.wantTol, test.wantTol) { + t.Errorf("unexpected HITS result for test %d:\ngot: %v\nwant:%v", + i, orderedHubAuth(got, prec), orderedHubAuth(test.want, prec)) + break + } + if !floats.EqualWithinAbsOrRel(got[n].Authority, test.want[n].Authority, test.wantTol, test.wantTol) { + t.Errorf("unexpected HITS result for test %d:\ngot: %v\nwant:%v", + i, orderedHubAuth(got, prec), orderedHubAuth(test.want, prec)) + break + } + } + } +} + +func orderedHubAuth(w map[int]HubAuthority, prec int) []keyHubAuthVal { + o := make(orderedHubAuthMap, 0, len(w)) + for k, v := range w { + o = append(o, keyHubAuthVal{prec: prec, key: k, val: v}) + } + sort.Sort(o) + return o +} + +type keyHubAuthVal struct { + prec int + key int + val HubAuthority +} + +func (kv keyHubAuthVal) String() string { + return fmt.Sprintf("%d:{H:%.*f, A:%.*f}", + kv.key, kv.prec, kv.val.Hub, kv.prec, kv.val.Authority, + ) +} + +type orderedHubAuthMap []keyHubAuthVal + +func (o orderedHubAuthMap) Len() int { return len(o) } +func (o orderedHubAuthMap) Less(i, j int) bool { return o[i].key < o[j].key } +func (o orderedHubAuthMap) Swap(i, j int) { o[i], o[j] = o[j], o[i] } diff --git a/graph/network/network.go b/graph/network/network.go new file mode 100644 index 00000000..f0ead5bc --- /dev/null +++ b/graph/network/network.go @@ -0,0 +1,14 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// TODO(kortschak): Implement: +// * edge-weighted PageRank and HITS +// * PersonalizedPageRank: +// http://infolab.stanford.edu/~backrub/google.html 2.1.2 Intuitive Justification +// http://ilpubs.stanford.edu:8090/596/1/2003-35.pdf +// http://www.vldb.org/pvldb/vol7/p1023-maehara.pdf +// * other centrality measures + +// Package network provides network analysis functions. +package network diff --git a/graph/network/network_test.go b/graph/network/network_test.go new file mode 100644 index 00000000..cd05bf21 --- /dev/null +++ b/graph/network/network_test.go @@ -0,0 +1,33 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package network + +const ( + A = iota + B + C + D + E + F + G + H + I + J + K +) + +// set is an integer set. +type set map[int]struct{} + +func linksTo(i ...int) set { + if len(i) == 0 { + return nil + } + s := make(set) + for _, v := range i { + s[v] = struct{}{} + } + return s +} diff --git a/graph/network/page.go b/graph/network/page.go new file mode 100644 index 00000000..ffa8c27f --- /dev/null +++ b/graph/network/page.go @@ -0,0 +1,228 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package network + +import ( + "math" + "math/rand" + + "github.com/gonum/floats" + "github.com/gonum/graph" + "github.com/gonum/matrix/mat64" +) + +// PageRank returns the PageRank weights for nodes of the directed graph g +// using the given damping factor and terminating when the 2-norm of the +// vector difference between iterations is below tol. The returned map is +// keyed on the graph node IDs. +func PageRank(g graph.Directed, damp, tol float64) map[int]float64 { + // PageRank is implemented according to "How Google Finds Your Needle + // in the Web's Haystack". + // + // G.I^k = alpha.S.I^k + (1-alpha).1/n.1.I^k + // + // http://www.ams.org/samplings/feature-column/fcarc-pagerank + + nodes := g.Nodes() + indexOf := make(map[int]int, len(nodes)) + for i, n := range nodes { + indexOf[n.ID()] = i + } + + m := mat64.NewDense(len(nodes), len(nodes), nil) + dangling := damp / float64(len(nodes)) + for j, u := range nodes { + to := g.From(u) + f := damp / float64(len(to)) + for _, v := range to { + m.Set(indexOf[v.ID()], j, f) + } + if len(to) == 0 { + for i := range nodes { + m.Set(i, j, dangling) + } + } + } + mat := m.RawMatrix().Data + dt := (1 - damp) / float64(len(nodes)) + for i := range mat { + mat[i] += dt + } + + last := make([]float64, len(nodes)) + for i := range last { + last[i] = 1 + } + lastV := mat64.NewVector(len(nodes), last) + + vec := make([]float64, len(nodes)) + var sum float64 + for i := range vec { + r := rand.NormFloat64() + sum += r + vec[i] = r + } + f := 1 / sum + for i := range vec { + vec[i] *= f + } + v := mat64.NewVector(len(nodes), vec) + + for { + lastV, v = v, lastV + v.MulVec(m, lastV) + if normDiff(vec, last) < tol { + break + } + } + + ranks := make(map[int]float64, len(nodes)) + for i, r := range v.RawVector().Data { + ranks[nodes[i].ID()] = r + } + + return ranks +} + +// PageRankSparse returns the PageRank weights for nodes of the sparse directed +// graph g using the given damping factor and terminating when the 2-norm of the +// vector difference between iterations is below tol. The returned map is +// keyed on the graph node IDs. +func PageRankSparse(g graph.Directed, damp, tol float64) map[int]float64 { + // PageRankSparse is implemented according to "How Google Finds Your Needle + // in the Web's Haystack". + // + // G.I^k = alpha.H.I^k + alpha.A.I^k + (1-alpha).1/n.1.I^k + // + // http://www.ams.org/samplings/feature-column/fcarc-pagerank + + nodes := g.Nodes() + indexOf := make(map[int]int, len(nodes)) + for i, n := range nodes { + indexOf[n.ID()] = i + } + + m := make(rowCompressedMatrix, len(nodes)) + var dangling compressedRow + df := damp / float64(len(nodes)) + for j, u := range nodes { + to := g.From(u) + f := damp / float64(len(to)) + for _, v := range to { + m.addTo(indexOf[v.ID()], j, f) + } + if len(to) == 0 { + dangling.addTo(j, df) + } + } + + last := make([]float64, len(nodes)) + for i := range last { + last[i] = 1 + } + lastV := mat64.NewVector(len(nodes), last) + + vec := make([]float64, len(nodes)) + var sum float64 + for i := range vec { + r := rand.NormFloat64() + sum += r + vec[i] = r + } + f := 1 / sum + for i := range vec { + vec[i] *= f + } + v := mat64.NewVector(len(nodes), vec) + + dt := (1 - damp) / float64(len(nodes)) + for { + lastV, v = v, lastV + + m.mulVecUnitary(v, lastV) // First term of the G matrix equation; + with := dangling.dotUnitary(lastV) // Second term; + away := onesDotUnitary(dt, lastV) // Last term. + + floats.AddConst(with+away, v.RawVector().Data) + if normDiff(vec, last) < tol { + break + } + } + + ranks := make(map[int]float64, len(nodes)) + for i, r := range v.RawVector().Data { + ranks[nodes[i].ID()] = r + } + + return ranks +} + +// rowCompressedMatrix implements row-compressed +// matrix/vector multiplication. +type rowCompressedMatrix []compressedRow + +// addTo adds the value v to the matrix element at (i,j). Repeated +// calls to addTo with the same column index will result in +// non-unique element representation. +func (m rowCompressedMatrix) addTo(i, j int, v float64) { m[i].addTo(j, v) } + +// mulVecUnitary multiplies the receiver by the src vector, storing +// the result in dst. It assumes src and dst are the same length as m +// and that both have unitary vector increments. +func (m rowCompressedMatrix) mulVecUnitary(dst, src *mat64.Vector) { + dMat := dst.RawVector().Data + for i, r := range m { + dMat[i] = r.dotUnitary(src) + } +} + +// compressedRow implements a simplified scatter-based Ddot. +type compressedRow []sparseElement + +// addTo adds the value v to the vector element at j. Repeated +// calls to addTo with the same vector index will result in +// non-unique element representation. +func (r *compressedRow) addTo(j int, v float64) { + *r = append(*r, sparseElement{index: j, value: v}) +} + +// dotUnitary performs a simplified scatter-based Ddot operations on +// v and the receiver. v must have have a unitary vector increment. +func (r compressedRow) dotUnitary(v *mat64.Vector) float64 { + var sum float64 + vec := v.RawVector().Data + for _, e := range r { + sum += vec[e.index] * e.value + } + return sum +} + +// sparseElement is a sparse vector or matrix element. +type sparseElement struct { + index int + value float64 +} + +// onesDotUnitary performs the equivalent of a Ddot of v with +// a ones vector of equal length. v must have have a unitary +// vector increment. +func onesDotUnitary(alpha float64, v *mat64.Vector) float64 { + var sum float64 + for _, f := range v.RawVector().Data { + sum += alpha * f + } + return sum +} + +// normDiff returns the 2-norm of the difference between x and y. +// This is a cut down version of gonum/floats.Distance. +func normDiff(x, y []float64) float64 { + var sum float64 + for i, v := range x { + d := v - y[i] + sum += d * d + } + return math.Sqrt(sum) +} diff --git a/graph/network/page_test.go b/graph/network/page_test.go new file mode 100644 index 00000000..14ad7d15 --- /dev/null +++ b/graph/network/page_test.go @@ -0,0 +1,151 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package network + +import ( + "fmt" + "math" + "sort" + "testing" + + "github.com/gonum/floats" + "github.com/gonum/graph/simple" +) + +var pageRankTests = []struct { + g []set + damp float64 + tol float64 + + wantTol float64 + want map[int]float64 +}{ + { + // Example graph from http://en.wikipedia.org/wiki/File:PageRanks-Example.svg 16:17, 8 July 2009 + g: []set{ + A: nil, + B: linksTo(C), + C: linksTo(B), + D: linksTo(A, B), + E: linksTo(D, B, F), + F: linksTo(B, E), + G: linksTo(B, E), + H: linksTo(B, E), + I: linksTo(B, E), + J: linksTo(E), + K: linksTo(E), + }, + damp: 0.85, + tol: 1e-8, + + wantTol: 1e-8, + want: map[int]float64{ + A: 0.03278149, + B: 0.38440095, + C: 0.34291029, + D: 0.03908709, + E: 0.08088569, + F: 0.03908709, + G: 0.01616948, + H: 0.01616948, + I: 0.01616948, + J: 0.01616948, + K: 0.01616948, + }, + }, + { + // Example graph from http://en.wikipedia.org/w/index.php?title=PageRank&oldid=659286279#Power_Method + // Expected result calculated with the given MATLAB code. + g: []set{ + A: linksTo(B, C), + B: linksTo(D), + C: linksTo(D, E), + D: linksTo(E), + E: linksTo(A), + }, + damp: 0.80, + tol: 1e-3, + + wantTol: 1e-3, + want: map[int]float64{ + A: 0.250, + B: 0.140, + C: 0.140, + D: 0.208, + E: 0.262, + }, + }, +} + +func TestPageRank(t *testing.T) { + for i, test := range pageRankTests { + g := simple.NewDirectedGraph(0, math.Inf(1)) + for u, e := range test.g { + // Add nodes that are not defined by an edge. + if !g.Has(simple.Node(u)) { + g.AddNode(simple.Node(u)) + } + for v := range e { + g.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v)}) + } + } + got := PageRank(g, test.damp, test.tol) + prec := 1 - int(math.Log10(test.wantTol)) + for n := range test.g { + if !floats.EqualWithinAbsOrRel(got[n], test.want[n], test.wantTol, test.wantTol) { + t.Errorf("unexpected PageRank result for test %d:\ngot: %v\nwant:%v", + i, orderedFloats(got, prec), orderedFloats(test.want, prec)) + break + } + } + } +} + +func TestPageRankSparse(t *testing.T) { + for i, test := range pageRankTests { + g := simple.NewDirectedGraph(0, math.Inf(1)) + for u, e := range test.g { + // Add nodes that are not defined by an edge. + if !g.Has(simple.Node(u)) { + g.AddNode(simple.Node(u)) + } + for v := range e { + g.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v)}) + } + } + got := PageRankSparse(g, test.damp, test.tol) + prec := 1 - int(math.Log10(test.wantTol)) + for n := range test.g { + if !floats.EqualWithinAbsOrRel(got[n], test.want[n], test.wantTol, test.wantTol) { + t.Errorf("unexpected PageRank result for test %d:\ngot: %v\nwant:%v", + i, orderedFloats(got, prec), orderedFloats(test.want, prec)) + break + } + } + } +} + +func orderedFloats(w map[int]float64, prec int) []keyFloatVal { + o := make(orderedFloatsMap, 0, len(w)) + for k, v := range w { + o = append(o, keyFloatVal{prec: prec, key: k, val: v}) + } + sort.Sort(o) + return o +} + +type keyFloatVal struct { + prec int + key int + val float64 +} + +func (kv keyFloatVal) String() string { return fmt.Sprintf("%c:%.*f", kv.key+'A', kv.prec, kv.val) } + +type orderedFloatsMap []keyFloatVal + +func (o orderedFloatsMap) Len() int { return len(o) } +func (o orderedFloatsMap) Less(i, j int) bool { return o[i].key < o[j].key } +func (o orderedFloatsMap) Swap(i, j int) { o[i], o[j] = o[j], o[i] } diff --git a/graph/path/a_star.go b/graph/path/a_star.go new file mode 100644 index 00000000..5563c1f4 --- /dev/null +++ b/graph/path/a_star.go @@ -0,0 +1,151 @@ +// Copyright ©2014 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package path + +import ( + "container/heap" + + "github.com/gonum/graph" + "github.com/gonum/graph/internal/set" +) + +// AStar finds the A*-shortest path from s to t in g using the heuristic h. The path and +// its cost are returned in a Shortest along with paths and costs to all nodes explored +// during the search. The number of expanded nodes is also returned. This value may help +// with heuristic tuning. +// +// The path will be the shortest path if the heuristic is admissible. A heuristic is +// admissible if for any node, n, in the graph, the heuristic estimate of the cost of +// the path from n to t is less than or equal to the true cost of that path. +// +// If h is nil, AStar will use the g.HeuristicCost method if g implements HeuristicCoster, +// falling back to NullHeuristic otherwise. If the graph does not implement graph.Weighter, +// UniformCost is used. AStar will panic if g has an A*-reachable negative edge weight. +func AStar(s, t graph.Node, g graph.Graph, h Heuristic) (path Shortest, expanded int) { + if !g.Has(s) || !g.Has(t) { + return Shortest{from: s}, 0 + } + var weight Weighting + if wg, ok := g.(graph.Weighter); ok { + weight = wg.Weight + } else { + weight = UniformCost(g) + } + if h == nil { + if g, ok := g.(HeuristicCoster); ok { + h = g.HeuristicCost + } else { + h = NullHeuristic + } + } + + path = newShortestFrom(s, g.Nodes()) + tid := t.ID() + + visited := make(set.Ints) + open := &aStarQueue{indexOf: make(map[int]int)} + heap.Push(open, aStarNode{node: s, gscore: 0, fscore: h(s, t)}) + + for open.Len() != 0 { + u := heap.Pop(open).(aStarNode) + uid := u.node.ID() + i := path.indexOf[uid] + expanded++ + + if uid == tid { + break + } + + visited.Add(uid) + for _, v := range g.From(u.node) { + vid := v.ID() + if visited.Has(vid) { + continue + } + j := path.indexOf[vid] + + w, ok := weight(u.node, v) + if !ok { + panic("A*: unexpected invalid weight") + } + if w < 0 { + panic("A*: negative edge weight") + } + g := u.gscore + w + if n, ok := open.node(vid); !ok { + path.set(j, g, i) + heap.Push(open, aStarNode{node: v, gscore: g, fscore: g + h(v, t)}) + } else if g < n.gscore { + path.set(j, g, i) + open.update(vid, g, g+h(v, t)) + } + } + } + + return path, expanded +} + +// NullHeuristic is an admissible, consistent heuristic that will not speed up computation. +func NullHeuristic(_, _ graph.Node) float64 { + return 0 +} + +// aStarNode adds A* accounting to a graph.Node. +type aStarNode struct { + node graph.Node + gscore float64 + fscore float64 +} + +// aStarQueue is an A* priority queue. +type aStarQueue struct { + indexOf map[int]int + nodes []aStarNode +} + +func (q *aStarQueue) Less(i, j int) bool { + return q.nodes[i].fscore < q.nodes[j].fscore +} + +func (q *aStarQueue) Swap(i, j int) { + q.indexOf[q.nodes[i].node.ID()] = j + q.indexOf[q.nodes[j].node.ID()] = i + q.nodes[i], q.nodes[j] = q.nodes[j], q.nodes[i] +} + +func (q *aStarQueue) Len() int { + return len(q.nodes) +} + +func (q *aStarQueue) Push(x interface{}) { + n := x.(aStarNode) + q.indexOf[n.node.ID()] = len(q.nodes) + q.nodes = append(q.nodes, n) +} + +func (q *aStarQueue) Pop() interface{} { + n := q.nodes[len(q.nodes)-1] + q.nodes = q.nodes[:len(q.nodes)-1] + delete(q.indexOf, n.node.ID()) + return n +} + +func (q *aStarQueue) update(id int, g, f float64) { + i, ok := q.indexOf[id] + if !ok { + return + } + q.nodes[i].gscore = g + q.nodes[i].fscore = f + heap.Fix(q, i) +} + +func (q *aStarQueue) node(id int) (aStarNode, bool) { + loc, ok := q.indexOf[id] + if ok { + return q.nodes[loc], true + } + return aStarNode{}, false +} diff --git a/graph/path/a_star_test.go b/graph/path/a_star_test.go new file mode 100644 index 00000000..21ad9e3a --- /dev/null +++ b/graph/path/a_star_test.go @@ -0,0 +1,310 @@ +// Copyright ©2014 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package path + +import ( + "math" + "reflect" + "testing" + + "github.com/gonum/graph" + "github.com/gonum/graph/path/internal" + "github.com/gonum/graph/path/internal/testgraphs" + "github.com/gonum/graph/simple" + "github.com/gonum/graph/topo" +) + +var aStarTests = []struct { + name string + g graph.Graph + + s, t int + heuristic Heuristic + wantPath []int +}{ + { + name: "simple path", + g: func() graph.Graph { + return internal.NewGridFrom( + "*..*", + "**.*", + "**.*", + "**.*", + ) + }(), + + s: 1, t: 14, + wantPath: []int{1, 2, 6, 10, 14}, + }, + { + name: "small open graph", + g: internal.NewGrid(3, 3, true), + + s: 0, t: 8, + }, + { + name: "large open graph", + g: internal.NewGrid(1000, 1000, true), + + s: 0, t: 999*1000 + 999, + }, + { + name: "no path", + g: func() graph.Graph { + tg := internal.NewGrid(5, 5, true) + + // Create a complete "wall" across the middle row. + tg.Set(2, 0, false) + tg.Set(2, 1, false) + tg.Set(2, 2, false) + tg.Set(2, 3, false) + tg.Set(2, 4, false) + + return tg + }(), + + s: 2, t: 22, + }, + { + name: "partially obstructed", + g: func() graph.Graph { + tg := internal.NewGrid(10, 10, true) + + // Create a partial "wall" accross the middle + // row with a gap at the left-hand end. + tg.Set(4, 1, false) + tg.Set(4, 2, false) + tg.Set(4, 3, false) + tg.Set(4, 4, false) + tg.Set(4, 5, false) + tg.Set(4, 6, false) + tg.Set(4, 7, false) + tg.Set(4, 8, false) + tg.Set(4, 9, false) + + return tg + }(), + + s: 5, t: 9*10 + 9, + }, + { + name: "partially obstructed with heuristic", + g: func() graph.Graph { + tg := internal.NewGrid(10, 10, true) + + // Create a partial "wall" accross the middle + // row with a gap at the left-hand end. + tg.Set(4, 1, false) + tg.Set(4, 2, false) + tg.Set(4, 3, false) + tg.Set(4, 4, false) + tg.Set(4, 5, false) + tg.Set(4, 6, false) + tg.Set(4, 7, false) + tg.Set(4, 8, false) + tg.Set(4, 9, false) + + return tg + }(), + + s: 5, t: 9*10 + 9, + // Manhattan Heuristic + heuristic: func(u, v graph.Node) float64 { + uid := u.ID() + cu := (uid % 10) + ru := (uid - cu) / 10 + + vid := v.ID() + cv := (vid % 10) + rv := (vid - cv) / 10 + + return math.Abs(float64(ru-rv)) + math.Abs(float64(cu-cv)) + }, + }, +} + +func TestAStar(t *testing.T) { + for _, test := range aStarTests { + pt, _ := AStar(simple.Node(test.s), simple.Node(test.t), test.g, test.heuristic) + + p, cost := pt.To(simple.Node(test.t)) + + if !topo.IsPathIn(test.g, p) { + t.Error("got path that is not path in input graph for %q", test.name) + } + + bfp, ok := BellmanFordFrom(simple.Node(test.s), test.g) + if !ok { + t.Fatalf("unexpected negative cycle in %q", test.name) + } + if want := bfp.WeightTo(simple.Node(test.t)); cost != want { + t.Errorf("unexpected cost for %q: got:%v want:%v", test.name, cost, want) + } + + var got = make([]int, 0, len(p)) + for _, n := range p { + got = append(got, n.ID()) + } + if test.wantPath != nil && !reflect.DeepEqual(got, test.wantPath) { + t.Errorf("unexpected result for %q:\ngot: %v\nwant:%v", test.name, got, test.wantPath) + } + } +} + +func TestExhaustiveAStar(t *testing.T) { + g := simple.NewUndirectedGraph(0, math.Inf(1)) + nodes := []locatedNode{ + {id: 1, x: 0, y: 6}, + {id: 2, x: 1, y: 0}, + {id: 3, x: 8, y: 7}, + {id: 4, x: 16, y: 0}, + {id: 5, x: 17, y: 6}, + {id: 6, x: 9, y: 8}, + } + for _, n := range nodes { + g.AddNode(n) + } + + edges := []weightedEdge{ + {from: g.Node(1), to: g.Node(2), cost: 7}, + {from: g.Node(1), to: g.Node(3), cost: 9}, + {from: g.Node(1), to: g.Node(6), cost: 14}, + {from: g.Node(2), to: g.Node(3), cost: 10}, + {from: g.Node(2), to: g.Node(4), cost: 15}, + {from: g.Node(3), to: g.Node(4), cost: 11}, + {from: g.Node(3), to: g.Node(6), cost: 2}, + {from: g.Node(4), to: g.Node(5), cost: 7}, + {from: g.Node(5), to: g.Node(6), cost: 9}, + } + for _, e := range edges { + g.SetEdge(e) + } + + heuristic := func(u, v graph.Node) float64 { + lu := u.(locatedNode) + lv := v.(locatedNode) + return math.Hypot(lu.x-lv.x, lu.y-lv.y) + } + + if ok, edge, goal := isMonotonic(g, heuristic); !ok { + t.Fatalf("non-monotonic heuristic at edge:%v for goal:%v", edge, goal) + } + + ps := DijkstraAllPaths(g) + for _, start := range g.Nodes() { + for _, goal := range g.Nodes() { + pt, _ := AStar(start, goal, g, heuristic) + gotPath, gotWeight := pt.To(goal) + wantPath, wantWeight, _ := ps.Between(start, goal) + if gotWeight != wantWeight { + t.Errorf("unexpected path weight from %v to %v result: got:%s want:%s", + start, goal, gotWeight, wantWeight) + } + if !reflect.DeepEqual(gotPath, wantPath) { + t.Errorf("unexpected path from %v to %v result:\ngot: %v\nwant:%v", + start, goal, gotPath, wantPath) + } + } + } +} + +type locatedNode struct { + id int + x, y float64 +} + +func (n locatedNode) ID() int { return n.id } + +type weightedEdge struct { + from, to graph.Node + cost float64 +} + +func (e weightedEdge) From() graph.Node { return e.from } +func (e weightedEdge) To() graph.Node { return e.to } +func (e weightedEdge) Weight() float64 { return e.cost } + +func isMonotonic(g UndirectedWeightLister, h Heuristic) (ok bool, at graph.Edge, goal graph.Node) { + for _, goal := range g.Nodes() { + for _, edge := range g.Edges() { + from := edge.From() + to := edge.To() + w, ok := g.Weight(from, to) + if !ok { + panic("A*: unexpected invalid weight") + } + if h(from, goal) > w+h(to, goal) { + return false, edge, goal + } + } + } + return true, nil, nil +} + +func TestAStarNullHeuristic(t *testing.T) { + for _, test := range testgraphs.ShortestPathTests { + g := test.Graph() + for _, e := range test.Edges { + g.SetEdge(e) + } + + var ( + pt Shortest + + panicked bool + ) + func() { + defer func() { + panicked = recover() != nil + }() + pt, _ = AStar(test.Query.From(), test.Query.To(), g.(graph.Graph), nil) + }() + if panicked || test.HasNegativeWeight { + if !test.HasNegativeWeight { + t.Errorf("%q: unexpected panic", test.Name) + } + if !panicked { + t.Errorf("%q: expected panic for negative edge weight", test.Name) + } + continue + } + + if pt.From().ID() != test.Query.From().ID() { + t.Fatalf("%q: unexpected from node ID: got:%d want:%d", pt.From().ID(), test.Query.From().ID()) + } + + p, weight := pt.To(test.Query.To()) + if weight != test.Weight { + t.Errorf("%q: unexpected weight from Between: got:%f want:%f", + test.Name, weight, test.Weight) + } + if weight := pt.WeightTo(test.Query.To()); weight != test.Weight { + t.Errorf("%q: unexpected weight from Weight: got:%f want:%f", + test.Name, weight, test.Weight) + } + + var got []int + for _, n := range p { + got = append(got, n.ID()) + } + ok := len(got) == 0 && len(test.WantPaths) == 0 + for _, sp := range test.WantPaths { + if reflect.DeepEqual(got, sp) { + ok = true + break + } + } + if !ok { + t.Errorf("%q: unexpected shortest path:\ngot: %v\nwant from:%v", + test.Name, p, test.WantPaths) + } + + np, weight := pt.To(test.NoPathFor.To()) + if pt.From().ID() == test.NoPathFor.From().ID() && (np != nil || !math.IsInf(weight, 1)) { + t.Errorf("%q: unexpected path:\ngot: path=%v weight=%f\nwant:path= weight=+Inf", + test.Name, np, weight) + } + } +} diff --git a/graph/path/bellman_ford_moore.go b/graph/path/bellman_ford_moore.go new file mode 100644 index 00000000..86bcf728 --- /dev/null +++ b/graph/path/bellman_ford_moore.go @@ -0,0 +1,67 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package path + +import "github.com/gonum/graph" + +// BellmanFordFrom returns a shortest-path tree for a shortest path from u to all nodes in +// the graph g, or false indicating that a negative cycle exists in the graph. If the graph +// does not implement graph.Weighter, UniformCost is used. +// +// The time complexity of BellmanFordFrom is O(|V|.|E|). +func BellmanFordFrom(u graph.Node, g graph.Graph) (path Shortest, ok bool) { + if !g.Has(u) { + return Shortest{from: u}, true + } + var weight Weighting + if wg, ok := g.(graph.Weighter); ok { + weight = wg.Weight + } else { + weight = UniformCost(g) + } + + nodes := g.Nodes() + + path = newShortestFrom(u, nodes) + path.dist[path.indexOf[u.ID()]] = 0 + + // TODO(kortschak): Consider adding further optimisations + // from http://arxiv.org/abs/1111.5414. + for i := 1; i < len(nodes); i++ { + changed := false + for j, u := range nodes { + for _, v := range g.From(u) { + k := path.indexOf[v.ID()] + w, ok := weight(u, v) + if !ok { + panic("bellman-ford: unexpected invalid weight") + } + joint := path.dist[j] + w + if joint < path.dist[k] { + path.set(k, joint, j) + changed = true + } + } + } + if !changed { + break + } + } + + for j, u := range nodes { + for _, v := range g.From(u) { + k := path.indexOf[v.ID()] + w, ok := weight(u, v) + if !ok { + panic("bellman-ford: unexpected invalid weight") + } + if path.dist[j]+w < path.dist[k] { + return path, false + } + } + } + + return path, true +} diff --git a/graph/path/bellman_ford_moore_test.go b/graph/path/bellman_ford_moore_test.go new file mode 100644 index 00000000..49be58d6 --- /dev/null +++ b/graph/path/bellman_ford_moore_test.go @@ -0,0 +1,70 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package path + +import ( + "math" + "reflect" + "testing" + + "github.com/gonum/graph" + "github.com/gonum/graph/path/internal/testgraphs" +) + +func TestBellmanFordFrom(t *testing.T) { + for _, test := range testgraphs.ShortestPathTests { + g := test.Graph() + for _, e := range test.Edges { + g.SetEdge(e) + } + + pt, ok := BellmanFordFrom(test.Query.From(), g.(graph.Graph)) + if test.HasNegativeCycle { + if ok { + t.Errorf("%q: expected negative cycle", test.Name) + } + continue + } + if !ok { + t.Fatalf("%q: unexpected negative cycle", test.Name) + } + + if pt.From().ID() != test.Query.From().ID() { + t.Fatalf("%q: unexpected from node ID: got:%d want:%d", pt.From().ID(), test.Query.From().ID()) + } + + p, weight := pt.To(test.Query.To()) + if weight != test.Weight { + t.Errorf("%q: unexpected weight from Between: got:%f want:%f", + test.Name, weight, test.Weight) + } + if weight := pt.WeightTo(test.Query.To()); weight != test.Weight { + t.Errorf("%q: unexpected weight from Weight: got:%f want:%f", + test.Name, weight, test.Weight) + } + + var got []int + for _, n := range p { + got = append(got, n.ID()) + } + ok = len(got) == 0 && len(test.WantPaths) == 0 + for _, sp := range test.WantPaths { + if reflect.DeepEqual(got, sp) { + ok = true + break + } + } + if !ok { + t.Errorf("%q: unexpected shortest path:\ngot: %v\nwant from:%v", + test.Name, p, test.WantPaths) + } + + np, weight := pt.To(test.NoPathFor.To()) + if pt.From().ID() == test.NoPathFor.From().ID() && (np != nil || !math.IsInf(weight, 1)) { + t.Errorf("%q: unexpected path:\ngot: path=%v weight=%f\nwant:path= weight=+Inf", + test.Name, np, weight) + } + } +} diff --git a/graph/path/bench_test.go b/graph/path/bench_test.go new file mode 100644 index 00000000..103a4713 --- /dev/null +++ b/graph/path/bench_test.go @@ -0,0 +1,142 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package path + +import ( + "math" + "testing" + + "github.com/gonum/graph" + "github.com/gonum/graph/graphs/gen" + "github.com/gonum/graph/simple" +) + +var ( + gnpUndirected_10_tenth = gnpUndirected(10, 0.1) + gnpUndirected_100_tenth = gnpUndirected(100, 0.1) + gnpUndirected_1000_tenth = gnpUndirected(1000, 0.1) + gnpUndirected_10_half = gnpUndirected(10, 0.5) + gnpUndirected_100_half = gnpUndirected(100, 0.5) + gnpUndirected_1000_half = gnpUndirected(1000, 0.5) +) + +func gnpUndirected(n int, p float64) graph.Undirected { + g := simple.NewUndirectedGraph(0, math.Inf(1)) + gen.Gnp(g, n, p, nil) + return g +} + +func benchmarkAStarNilHeuristic(b *testing.B, g graph.Undirected) { + var expanded int + for i := 0; i < b.N; i++ { + _, expanded = AStar(simple.Node(0), simple.Node(1), g, nil) + } + if expanded == 0 { + b.Fatal("unexpected number of expanded nodes") + } +} + +func BenchmarkAStarGnp_10_tenth(b *testing.B) { + benchmarkAStarNilHeuristic(b, gnpUndirected_10_tenth) +} +func BenchmarkAStarGnp_100_tenth(b *testing.B) { + benchmarkAStarNilHeuristic(b, gnpUndirected_100_tenth) +} +func BenchmarkAStarGnp_1000_tenth(b *testing.B) { + benchmarkAStarNilHeuristic(b, gnpUndirected_1000_tenth) +} +func BenchmarkAStarGnp_10_half(b *testing.B) { + benchmarkAStarNilHeuristic(b, gnpUndirected_10_half) +} +func BenchmarkAStarGnp_100_half(b *testing.B) { + benchmarkAStarNilHeuristic(b, gnpUndirected_100_half) +} +func BenchmarkAStarGnp_1000_half(b *testing.B) { + benchmarkAStarNilHeuristic(b, gnpUndirected_1000_half) +} + +var ( + nswUndirected_10_2_2_2 = navigableSmallWorldUndirected(10, 2, 2, 2) + nswUndirected_10_2_5_2 = navigableSmallWorldUndirected(10, 2, 5, 2) + nswUndirected_100_5_10_2 = navigableSmallWorldUndirected(100, 5, 10, 2) + nswUndirected_100_5_20_2 = navigableSmallWorldUndirected(100, 5, 20, 2) +) + +func navigableSmallWorldUndirected(n, p, q int, r float64) graph.Undirected { + g := simple.NewUndirectedGraph(0, math.Inf(1)) + gen.NavigableSmallWorld(g, []int{n, n}, p, q, r, nil) + return g +} + +func coordinatesForID(n graph.Node, c, r int) [2]int { + id := n.ID() + if id >= c*r { + panic("out of range") + } + return [2]int{id / r, id % r} +} + +// manhattanBetween returns the Manhattan distance between a and b. +func manhattanBetween(a, b [2]int) float64 { + var d int + for i, v := range a { + d += abs(v - b[i]) + } + return float64(d) +} + +func abs(a int) int { + if a < 0 { + return -a + } + return a +} + +func benchmarkAStarHeuristic(b *testing.B, g graph.Undirected, h Heuristic) { + var expanded int + for i := 0; i < b.N; i++ { + _, expanded = AStar(simple.Node(0), simple.Node(1), g, h) + } + if expanded == 0 { + b.Fatal("unexpected number of expanded nodes") + } +} + +func BenchmarkAStarUndirectedmallWorld_10_2_2_2(b *testing.B) { + benchmarkAStarHeuristic(b, nswUndirected_10_2_2_2, nil) +} +func BenchmarkAStarUndirectedmallWorld_10_2_2_2_Heur(b *testing.B) { + h := func(x, y graph.Node) float64 { + return manhattanBetween(coordinatesForID(x, 10, 10), coordinatesForID(y, 10, 10)) + } + benchmarkAStarHeuristic(b, nswUndirected_10_2_2_2, h) +} +func BenchmarkAStarUndirectedmallWorld_10_2_5_2(b *testing.B) { + benchmarkAStarHeuristic(b, nswUndirected_10_2_5_2, nil) +} +func BenchmarkAStarUndirectedmallWorld_10_2_5_2_Heur(b *testing.B) { + h := func(x, y graph.Node) float64 { + return manhattanBetween(coordinatesForID(x, 10, 10), coordinatesForID(y, 10, 10)) + } + benchmarkAStarHeuristic(b, nswUndirected_10_2_5_2, h) +} +func BenchmarkAStarUndirectedmallWorld_100_5_10_2(b *testing.B) { + benchmarkAStarHeuristic(b, nswUndirected_100_5_10_2, nil) +} +func BenchmarkAStarUndirectedmallWorld_100_5_10_2_Heur(b *testing.B) { + h := func(x, y graph.Node) float64 { + return manhattanBetween(coordinatesForID(x, 100, 100), coordinatesForID(y, 100, 100)) + } + benchmarkAStarHeuristic(b, nswUndirected_100_5_10_2, h) +} +func BenchmarkAStarUndirectedmallWorld_100_5_20_2(b *testing.B) { + benchmarkAStarHeuristic(b, nswUndirected_100_5_20_2, nil) +} +func BenchmarkAStarUndirectedmallWorld_100_5_20_2_Heur(b *testing.B) { + h := func(x, y graph.Node) float64 { + return manhattanBetween(coordinatesForID(x, 100, 100), coordinatesForID(y, 100, 100)) + } + benchmarkAStarHeuristic(b, nswUndirected_100_5_20_2, h) +} diff --git a/graph/path/control_flow.go b/graph/path/control_flow.go new file mode 100644 index 00000000..81ee33d7 --- /dev/null +++ b/graph/path/control_flow.go @@ -0,0 +1,118 @@ +// Copyright ©2014 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package path + +import ( + "github.com/gonum/graph" + "github.com/gonum/graph/internal/set" +) + +// Dominators returns all dominators for all nodes in g. It does not +// prune for strict post-dominators, immediate dominators etc. +// +// A dominates B if and only if the only path through B travels through A. +func Dominators(start graph.Node, g graph.Graph) map[int]set.Nodes { + allNodes := make(set.Nodes) + nlist := g.Nodes() + dominators := make(map[int]set.Nodes, len(nlist)) + for _, node := range nlist { + allNodes.Add(node) + } + + var to func(graph.Node) []graph.Node + switch g := g.(type) { + case graph.Directed: + to = g.To + default: + to = g.From + } + + for _, node := range nlist { + dominators[node.ID()] = make(set.Nodes) + if node.ID() == start.ID() { + dominators[node.ID()].Add(start) + } else { + dominators[node.ID()].Copy(allNodes) + } + } + + for somethingChanged := true; somethingChanged; { + somethingChanged = false + for _, node := range nlist { + if node.ID() == start.ID() { + continue + } + preds := to(node) + if len(preds) == 0 { + continue + } + tmp := make(set.Nodes).Copy(dominators[preds[0].ID()]) + for _, pred := range preds[1:] { + tmp.Intersect(tmp, dominators[pred.ID()]) + } + + dom := make(set.Nodes) + dom.Add(node) + + dom.Union(dom, tmp) + if !set.Equal(dom, dominators[node.ID()]) { + dominators[node.ID()] = dom + somethingChanged = true + } + } + } + + return dominators +} + +// PostDominators returns all post-dominators for all nodes in g. It does not +// prune for strict post-dominators, immediate post-dominators etc. +// +// A post-dominates B if and only if all paths from B travel through A. +func PostDominators(end graph.Node, g graph.Graph) map[int]set.Nodes { + allNodes := make(set.Nodes) + nlist := g.Nodes() + dominators := make(map[int]set.Nodes, len(nlist)) + for _, node := range nlist { + allNodes.Add(node) + } + + for _, node := range nlist { + dominators[node.ID()] = make(set.Nodes) + if node.ID() == end.ID() { + dominators[node.ID()].Add(end) + } else { + dominators[node.ID()].Copy(allNodes) + } + } + + for somethingChanged := true; somethingChanged; { + somethingChanged = false + for _, node := range nlist { + if node.ID() == end.ID() { + continue + } + succs := g.From(node) + if len(succs) == 0 { + continue + } + tmp := make(set.Nodes).Copy(dominators[succs[0].ID()]) + for _, succ := range succs[1:] { + tmp.Intersect(tmp, dominators[succ.ID()]) + } + + dom := make(set.Nodes) + dom.Add(node) + + dom.Union(dom, tmp) + if !set.Equal(dom, dominators[node.ID()]) { + dominators[node.ID()] = dom + somethingChanged = true + } + } + } + + return dominators +} diff --git a/graph/path/dijkstra.go b/graph/path/dijkstra.go new file mode 100644 index 00000000..6eb97a9d --- /dev/null +++ b/graph/path/dijkstra.go @@ -0,0 +1,147 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package path + +import ( + "container/heap" + + "github.com/gonum/graph" +) + +// DijkstraFrom returns a shortest-path tree for a shortest path from u to all nodes in +// the graph g. If the graph does not implement graph.Weighter, UniformCost is used. +// DijkstraFrom will panic if g has a u-reachable negative edge weight. +// +// The time complexity of DijkstrFrom is O(|E|.log|V|). +func DijkstraFrom(u graph.Node, g graph.Graph) Shortest { + if !g.Has(u) { + return Shortest{from: u} + } + var weight Weighting + if wg, ok := g.(graph.Weighter); ok { + weight = wg.Weight + } else { + weight = UniformCost(g) + } + + nodes := g.Nodes() + path := newShortestFrom(u, nodes) + + // Dijkstra's algorithm here is implemented essentially as + // described in Function B.2 in figure 6 of UTCS Technical + // Report TR-07-54. + // + // This implementation deviates from the report as follows: + // - the value of path.dist for the start vertex u is initialized to 0; + // - outdated elements from the priority queue (i.e. with respect to the dist value) + // are skipped. + // + // http://www.cs.utexas.edu/ftp/techreports/tr07-54.pdf + Q := priorityQueue{{node: u, dist: 0}} + for Q.Len() != 0 { + mid := heap.Pop(&Q).(distanceNode) + k := path.indexOf[mid.node.ID()] + if mid.dist > path.dist[k] { + continue + } + for _, v := range g.From(mid.node) { + j := path.indexOf[v.ID()] + w, ok := weight(mid.node, v) + if !ok { + panic("dijkstra: unexpected invalid weight") + } + if w < 0 { + panic("dijkstra: negative edge weight") + } + joint := path.dist[k] + w + if joint < path.dist[j] { + heap.Push(&Q, distanceNode{node: v, dist: joint}) + path.set(j, joint, k) + } + } + } + + return path +} + +// DijkstraAllPaths returns a shortest-path tree for shortest paths in the graph g. +// If the graph does not implement graph.Weighter, UniformCost is used. +// DijkstraAllPaths will panic if g has a negative edge weight. +// +// The time complexity of DijkstrAllPaths is O(|V|.|E|+|V|^2.log|V|). +func DijkstraAllPaths(g graph.Graph) (paths AllShortest) { + paths = newAllShortest(g.Nodes(), false) + dijkstraAllPaths(g, paths) + return paths +} + +// dijkstraAllPaths is the all-paths implementation of Dijkstra. It is shared +// between DijkstraAllPaths and JohnsonAllPaths to avoid repeated allocation +// of the nodes slice and the indexOf map. It returns nothing, but stores the +// result of the work in the paths parameter which is a reference type. +func dijkstraAllPaths(g graph.Graph, paths AllShortest) { + var weight Weighting + if wg, ok := g.(graph.Weighter); ok { + weight = wg.Weight + } else { + weight = UniformCost(g) + } + + var Q priorityQueue + for i, u := range paths.nodes { + // Dijkstra's algorithm here is implemented essentially as + // described in Function B.2 in figure 6 of UTCS Technical + // Report TR-07-54 with the addition of handling multiple + // co-equal paths. + // + // http://www.cs.utexas.edu/ftp/techreports/tr07-54.pdf + + // Q must be empty at this point. + heap.Push(&Q, distanceNode{node: u, dist: 0}) + for Q.Len() != 0 { + mid := heap.Pop(&Q).(distanceNode) + k := paths.indexOf[mid.node.ID()] + if mid.dist < paths.dist.At(i, k) { + paths.dist.Set(i, k, mid.dist) + } + for _, v := range g.From(mid.node) { + j := paths.indexOf[v.ID()] + w, ok := weight(mid.node, v) + if !ok { + panic("dijkstra: unexpected invalid weight") + } + if w < 0 { + panic("dijkstra: negative edge weight") + } + joint := paths.dist.At(i, k) + w + if joint < paths.dist.At(i, j) { + heap.Push(&Q, distanceNode{node: v, dist: joint}) + paths.set(i, j, joint, k) + } else if joint == paths.dist.At(i, j) { + paths.add(i, j, k) + } + } + } + } +} + +type distanceNode struct { + node graph.Node + dist float64 +} + +// priorityQueue implements a no-dec priority queue. +type priorityQueue []distanceNode + +func (q priorityQueue) Len() int { return len(q) } +func (q priorityQueue) Less(i, j int) bool { return q[i].dist < q[j].dist } +func (q priorityQueue) Swap(i, j int) { q[i], q[j] = q[j], q[i] } +func (q *priorityQueue) Push(n interface{}) { *q = append(*q, n.(distanceNode)) } +func (q *priorityQueue) Pop() interface{} { + t := *q + var n interface{} + n, *q = t[len(t)-1], t[:len(t)-1] + return n +} diff --git a/graph/path/dijkstra_test.go b/graph/path/dijkstra_test.go new file mode 100644 index 00000000..931a1411 --- /dev/null +++ b/graph/path/dijkstra_test.go @@ -0,0 +1,178 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package path + +import ( + "math" + "reflect" + "sort" + "testing" + + "github.com/gonum/graph" + "github.com/gonum/graph/internal/ordered" + "github.com/gonum/graph/path/internal/testgraphs" +) + +func TestDijkstraFrom(t *testing.T) { + for _, test := range testgraphs.ShortestPathTests { + g := test.Graph() + for _, e := range test.Edges { + g.SetEdge(e) + } + + var ( + pt Shortest + + panicked bool + ) + func() { + defer func() { + panicked = recover() != nil + }() + pt = DijkstraFrom(test.Query.From(), g.(graph.Graph)) + }() + if panicked || test.HasNegativeWeight { + if !test.HasNegativeWeight { + t.Errorf("%q: unexpected panic", test.Name) + } + if !panicked { + t.Errorf("%q: expected panic for negative edge weight", test.Name) + } + continue + } + + if pt.From().ID() != test.Query.From().ID() { + t.Fatalf("%q: unexpected from node ID: got:%d want:%d", pt.From().ID(), test.Query.From().ID()) + } + + p, weight := pt.To(test.Query.To()) + if weight != test.Weight { + t.Errorf("%q: unexpected weight from Between: got:%f want:%f", + test.Name, weight, test.Weight) + } + if weight := pt.WeightTo(test.Query.To()); weight != test.Weight { + t.Errorf("%q: unexpected weight from Weight: got:%f want:%f", + test.Name, weight, test.Weight) + } + + var got []int + for _, n := range p { + got = append(got, n.ID()) + } + ok := len(got) == 0 && len(test.WantPaths) == 0 + for _, sp := range test.WantPaths { + if reflect.DeepEqual(got, sp) { + ok = true + break + } + } + if !ok { + t.Errorf("%q: unexpected shortest path:\ngot: %v\nwant from:%v", + test.Name, p, test.WantPaths) + } + + np, weight := pt.To(test.NoPathFor.To()) + if pt.From().ID() == test.NoPathFor.From().ID() && (np != nil || !math.IsInf(weight, 1)) { + t.Errorf("%q: unexpected path:\ngot: path=%v weight=%f\nwant:path= weight=+Inf", + test.Name, np, weight) + } + } +} + +func TestDijkstraAllPaths(t *testing.T) { + for _, test := range testgraphs.ShortestPathTests { + g := test.Graph() + for _, e := range test.Edges { + g.SetEdge(e) + } + + var ( + pt AllShortest + + panicked bool + ) + func() { + defer func() { + panicked = recover() != nil + }() + pt = DijkstraAllPaths(g.(graph.Graph)) + }() + if panicked || test.HasNegativeWeight { + if !test.HasNegativeWeight { + t.Errorf("%q: unexpected panic", test.Name) + } + if !panicked { + t.Errorf("%q: expected panic for negative edge weight", test.Name) + } + continue + } + + // Check all random paths returned are OK. + for i := 0; i < 10; i++ { + p, weight, unique := pt.Between(test.Query.From(), test.Query.To()) + if weight != test.Weight { + t.Errorf("%q: unexpected weight from Between: got:%f want:%f", + test.Name, weight, test.Weight) + } + if weight := pt.Weight(test.Query.From(), test.Query.To()); weight != test.Weight { + t.Errorf("%q: unexpected weight from Weight: got:%f want:%f", + test.Name, weight, test.Weight) + } + if unique != test.HasUniquePath { + t.Errorf("%q: unexpected number of paths: got: unique=%t want: unique=%t", + test.Name, unique, test.HasUniquePath) + } + + var got []int + for _, n := range p { + got = append(got, n.ID()) + } + ok := len(got) == 0 && len(test.WantPaths) == 0 + for _, sp := range test.WantPaths { + if reflect.DeepEqual(got, sp) { + ok = true + break + } + } + if !ok { + t.Errorf("%q: unexpected shortest path:\ngot: %v\nwant from:%v", + test.Name, p, test.WantPaths) + } + } + + np, weight, unique := pt.Between(test.NoPathFor.From(), test.NoPathFor.To()) + if np != nil || !math.IsInf(weight, 1) || unique != false { + t.Errorf("%q: unexpected path:\ngot: path=%v weight=%f unique=%t\nwant:path= weight=+Inf unique=false", + test.Name, np, weight, unique) + } + + paths, weight := pt.AllBetween(test.Query.From(), test.Query.To()) + if weight != test.Weight { + t.Errorf("%q: unexpected weight from Between: got:%f want:%f", + test.Name, weight, test.Weight) + } + + var got [][]int + if len(paths) != 0 { + got = make([][]int, len(paths)) + } + for i, p := range paths { + for _, v := range p { + got[i] = append(got[i], v.ID()) + } + } + sort.Sort(ordered.BySliceValues(got)) + if !reflect.DeepEqual(got, test.WantPaths) { + t.Errorf("testing %q: unexpected shortest paths:\ngot: %v\nwant:%v", + test.Name, got, test.WantPaths) + } + + nps, weight := pt.AllBetween(test.NoPathFor.From(), test.NoPathFor.To()) + if nps != nil || !math.IsInf(weight, 1) { + t.Errorf("%q: unexpected path:\ngot: paths=%v weight=%f\nwant:path= weight=+Inf", + test.Name, nps, weight) + } + } +} diff --git a/graph/path/disjoint.go b/graph/path/disjoint.go new file mode 100644 index 00000000..1840d380 --- /dev/null +++ b/graph/path/disjoint.go @@ -0,0 +1,87 @@ +// Copyright ©2014 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package path + +// A disjoint set is a collection of non-overlapping sets. That is, for any two sets in the +// disjoint set, their intersection is the empty set. +// +// A disjoint set has three principle operations: Make Set, Find, and Union. +// +// Make set creates a new set for an element (presuming it does not already exist in any set in +// the disjoint set), Find finds the set containing that element (if any), and Union merges two +// sets in the disjoint set. In general, algorithms operating on disjoint sets are "union-find" +// algorithms, where two sets are found with Find, and then joined with Union. +// +// A concrete example of a union-find algorithm can be found as discrete.Kruskal -- which unions +// two sets when an edge is created between two vertices, and refuses to make an edge between two +// vertices if they're part of the same set. +type disjointSet struct { + master map[int]*disjointSetNode +} + +type disjointSetNode struct { + parent *disjointSetNode + rank int +} + +func newDisjointSet() *disjointSet { + return &disjointSet{master: make(map[int]*disjointSetNode)} +} + +// If the element isn't already somewhere in there, adds it to the master set and its own tiny set. +func (ds *disjointSet) makeSet(e int) { + if _, ok := ds.master[e]; ok { + return + } + dsNode := &disjointSetNode{rank: 0} + dsNode.parent = dsNode + ds.master[e] = dsNode +} + +// Returns the set the element belongs to, or nil if none. +func (ds *disjointSet) find(e int) *disjointSetNode { + dsNode, ok := ds.master[e] + if !ok { + return nil + } + + return find(dsNode) +} + +func find(dsNode *disjointSetNode) *disjointSetNode { + if dsNode.parent != dsNode { + dsNode.parent = find(dsNode.parent) + } + + return dsNode.parent +} + +// Unions two subsets within the disjointSet. +// +// If x or y are not in this disjoint set, the behavior is undefined. If either pointer is nil, +// this function will panic. +func (ds *disjointSet) union(x, y *disjointSetNode) { + if x == nil || y == nil { + panic("Disjoint Set union on nil sets") + } + xRoot := find(x) + yRoot := find(y) + if xRoot == nil || yRoot == nil { + return + } + + if xRoot == yRoot { + return + } + + if xRoot.rank < yRoot.rank { + xRoot.parent = yRoot + } else if yRoot.rank < xRoot.rank { + yRoot.parent = xRoot + } else { + yRoot.parent = xRoot + xRoot.rank++ + } +} diff --git a/graph/path/disjoint_test.go b/graph/path/disjoint_test.go new file mode 100644 index 00000000..2639592a --- /dev/null +++ b/graph/path/disjoint_test.go @@ -0,0 +1,63 @@ +// Copyright ©2014 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package path + +import ( + "testing" +) + +func TestDisjointSetMakeSet(t *testing.T) { + ds := newDisjointSet() + if ds.master == nil { + t.Fatal("Internal disjoint set map erroneously nil") + } else if len(ds.master) != 0 { + t.Error("Disjoint set master map of wrong size") + } + + ds.makeSet(3) + if len(ds.master) != 1 { + t.Error("Disjoint set master map of wrong size") + } + + if node, ok := ds.master[3]; !ok { + t.Error("Make set did not successfully add element") + } else { + if node == nil { + t.Fatal("Disjoint set node from makeSet is nil") + } + + if node.rank != 0 { + t.Error("Node rank set incorrectly") + } + + if node.parent != node { + t.Error("Node parent set incorrectly") + } + } +} + +func TestDisjointSetFind(t *testing.T) { + ds := newDisjointSet() + + ds.makeSet(3) + ds.makeSet(5) + + if ds.find(3) == ds.find(5) { + t.Error("Disjoint sets incorrectly found to be the same") + } +} + +func TestUnion(t *testing.T) { + ds := newDisjointSet() + + ds.makeSet(3) + ds.makeSet(5) + + ds.union(ds.find(3), ds.find(5)) + + if ds.find(3) != ds.find(5) { + t.Error("Sets found to be disjoint after union") + } +} diff --git a/graph/path/doc.go b/graph/path/doc.go new file mode 100644 index 00000000..2a86fed0 --- /dev/null +++ b/graph/path/doc.go @@ -0,0 +1,6 @@ +// Copyright ©2016 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package path provides graph path finding functions. +package path diff --git a/graph/path/dynamic/doc.go b/graph/path/dynamic/doc.go new file mode 100644 index 00000000..597bd956 --- /dev/null +++ b/graph/path/dynamic/doc.go @@ -0,0 +1,6 @@ +// Copyright ©2016 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package dynamic provides incremental heuristic graph path finding functions. +package dynamic diff --git a/graph/path/dynamic/dstarlite.go b/graph/path/dynamic/dstarlite.go new file mode 100644 index 00000000..c7d7a9cc --- /dev/null +++ b/graph/path/dynamic/dstarlite.go @@ -0,0 +1,491 @@ +// Copyright ©2014 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package dynamic + +import ( + "container/heap" + "fmt" + "math" + + "github.com/gonum/graph" + "github.com/gonum/graph/path" + "github.com/gonum/graph/simple" +) + +// DStarLite implements the D* Lite dynamic re-planning path search algorithm. +// +// doi:10.1109/tro.2004.838026 and ISBN:0-262-51129-0 pp476-483 +// +type DStarLite struct { + s, t *dStarLiteNode + last *dStarLiteNode + + model WorldModel + queue dStarLiteQueue + keyModifier float64 + + weight path.Weighting + heuristic path.Heuristic +} + +// WorldModel is a mutable weighted directed graph that returns nodes identified +// by id number. +type WorldModel interface { + graph.DirectedBuilder + graph.Weighter + Node(id int) graph.Node +} + +// NewDStarLite returns a new DStarLite planner for the path from s to t in g using the +// heuristic h. The world model, m, is used to store shortest path information during path +// planning. The world model must be an empty graph when NewDStarLite is called. +// +// If h is nil, the DStarLite will use the g.HeuristicCost method if g implements +// path.HeuristicCoster, falling back to path.NullHeuristic otherwise. If the graph does not +// implement graph.Weighter, path.UniformCost is used. NewDStarLite will panic if g has +// a negative edge weight. +func NewDStarLite(s, t graph.Node, g graph.Graph, h path.Heuristic, m WorldModel) *DStarLite { + /* + procedure Initialize() + {02”} U = ∅; + {03”} k_m = 0; + {04”} for all s ∈ S rhs(s) = g(s) = ∞; + {05”} rhs(s_goal) = 0; + {06”} U.Insert(s_goal, [h(s_start, s_goal); 0]); + */ + + d := &DStarLite{ + s: newDStarLiteNode(s), + t: newDStarLiteNode(t), // badKey is overwritten below. + + model: m, + + heuristic: h, + } + d.t.rhs = 0 + + /* + procedure Main() + {29”} s_last = s_start; + {30”} Initialize(); + */ + d.last = d.s + + if wg, ok := g.(graph.Weighter); ok { + d.weight = wg.Weight + } else { + d.weight = path.UniformCost(g) + } + if d.heuristic == nil { + if g, ok := g.(path.HeuristicCoster); ok { + d.heuristic = g.HeuristicCost + } else { + d.heuristic = path.NullHeuristic + } + } + + d.queue.insert(d.t, key{d.heuristic(s, t), 0}) + + for _, n := range g.Nodes() { + switch n.ID() { + case d.s.ID(): + d.model.AddNode(d.s) + case d.t.ID(): + d.model.AddNode(d.t) + default: + d.model.AddNode(newDStarLiteNode(n)) + } + } + for _, u := range d.model.Nodes() { + for _, v := range g.From(u) { + w := edgeWeight(d.weight, u, v) + if w < 0 { + panic("D* Lite: negative edge weight") + } + d.model.SetEdge(simple.Edge{F: u, T: d.model.Node(v.ID()), W: w}) + } + } + + /* + procedure Main() + {31”} ComputeShortestPath(); + */ + d.findShortestPath() + + return d +} + +// edgeWeight is a helper function that returns the weight of the edge between +// two connected nodes, u and v, using the provided weight function. It panics +// if there is no edge between u and v. +func edgeWeight(weight path.Weighting, u, v graph.Node) float64 { + w, ok := weight(u, v) + if !ok { + panic("D* Lite: unexpected invalid weight") + } + return w +} + +// keyFor is the CalculateKey procedure in the D* Lite papers. +func (d *DStarLite) keyFor(s *dStarLiteNode) key { + /* + procedure CalculateKey(s) + {01”} return [min(g(s), rhs(s)) + h(s_start, s) + k_m; min(g(s), rhs(s))]; + */ + k := key{1: math.Min(s.g, s.rhs)} + k[0] = k[1] + d.heuristic(d.s.Node, s.Node) + d.keyModifier + return k +} + +// update is the UpdateVertex procedure in the D* Lite papers. +func (d *DStarLite) update(u *dStarLiteNode) { + /* + procedure UpdateVertex(u) + {07”} if (g(u) != rhs(u) AND u ∈ U) U.Update(u,CalculateKey(u)); + {08”} else if (g(u) != rhs(u) AND u /∈ U) U.Insert(u,CalculateKey(u)); + {09”} else if (g(u) = rhs(u) AND u ∈ U) U.Remove(u); + */ + inQueue := u.inQueue() + switch { + case inQueue && u.g != u.rhs: + d.queue.update(u, d.keyFor(u)) + case !inQueue && u.g != u.rhs: + d.queue.insert(u, d.keyFor(u)) + case inQueue && u.g == u.rhs: + d.queue.remove(u) + } +} + +// findShortestPath is the ComputeShortestPath procedure in the D* Lite papers. +func (d *DStarLite) findShortestPath() { + /* + procedure ComputeShortestPath() + {10”} while (U.TopKey() < CalculateKey(s_start) OR rhs(s_start) > g(s_start)) + {11”} u = U.Top(); + {12”} k_old = U.TopKey(); + {13”} k_new = CalculateKey(u); + {14”} if(k_old < k_new) + {15”} U.Update(u, k_new); + {16”} else if (g(u) > rhs(u)) + {17”} g(u) = rhs(u); + {18”} U.Remove(u); + {19”} for all s ∈ Pred(u) + {20”} if (s != s_goal) rhs(s) = min(rhs(s), c(s, u) + g(u)); + {21”} UpdateVertex(s); + {22”} else + {23”} g_old = g(u); + {24”} g(u) = ∞; + {25”} for all s ∈ Pred(u) ∪ {u} + {26”} if (rhs(s) = c(s, u) + g_old) + {27”} if (s != s_goal) rhs(s) = min s'∈Succ(s)(c(s, s') + g(s')); + {28”} UpdateVertex(s); + */ + for d.queue.Len() != 0 { // We use d.queue.Len since d.queue does not return an infinite key when empty. + u := d.queue.top() + if !u.key.less(d.keyFor(d.s)) && d.s.rhs <= d.s.g { + break + } + switch kNew := d.keyFor(u); { + case u.key.less(kNew): + d.queue.update(u, kNew) + case u.g > u.rhs: + u.g = u.rhs + d.queue.remove(u) + for _, _s := range d.model.To(u) { + s := _s.(*dStarLiteNode) + if s.ID() != d.t.ID() { + s.rhs = math.Min(s.rhs, edgeWeight(d.model.Weight, s, u)+u.g) + } + d.update(s) + } + default: + gOld := u.g + u.g = math.Inf(1) + for _, _s := range append(d.model.To(u), u) { + s := _s.(*dStarLiteNode) + if s.rhs == edgeWeight(d.model.Weight, s, u)+gOld { + if s.ID() != d.t.ID() { + s.rhs = math.Inf(1) + for _, t := range d.model.From(s) { + s.rhs = math.Min(s.rhs, edgeWeight(d.model.Weight, s, t)+t.(*dStarLiteNode).g) + } + } + } + d.update(s) + } + } + } +} + +// Step performs one movement step along the best path towards the goal. +// It returns false if no further progression toward the goal can be +// achieved, either because the goal has been reached or because there +// is no path. +func (d *DStarLite) Step() bool { + /* + procedure Main() + {32”} while (s_start != s_goal) + {33”} // if (rhs(s_start) = ∞) then there is no known path + {34”} s_start = argmin s'∈Succ(s_start)(c(s_start, s') + g(s')); + */ + if d.s.ID() == d.t.ID() { + return false + } + if math.IsInf(d.s.rhs, 1) { + return false + } + + // We use rhs comparison to break ties + // between coequally weighted nodes. + rhs := math.Inf(1) + min := math.Inf(1) + + var next *dStarLiteNode + for _, _s := range d.model.From(d.s) { + s := _s.(*dStarLiteNode) + w := edgeWeight(d.model.Weight, d.s, s) + s.g + if w < min || (w == min && s.rhs < rhs) { + next = s + min = w + rhs = s.rhs + } + } + d.s = next + + /* + procedure Main() + {35”} Move to s_start; + */ + return true +} + +// MoveTo moves to n in the world graph. +func (d *DStarLite) MoveTo(n graph.Node) { + d.last = d.s + d.s = d.model.Node(n.ID()).(*dStarLiteNode) + d.keyModifier += d.heuristic(d.last, d.s) +} + +// UpdateWorld updates or adds edges in the world graph. UpdateWorld will +// panic if changes include a negative edge weight. +func (d *DStarLite) UpdateWorld(changes []graph.Edge) { + /* + procedure Main() + {36”} Scan graph for changed edge costs; + {37”} if any edge costs changed + {38”} k_m = k_m + h(s_last, s_start); + {39”} s_last = s_start; + {40”} for all directed edges (u, v) with changed edge costs + {41”} c_old = c(u, v); + {42”} Update the edge cost c(u, v); + {43”} if (c_old > c(u, v)) + {44”} if (u != s_goal) rhs(u) = min(rhs(u), c(u, v) + g(v)); + {45”} else if (rhs(u) = c_old + g(v)) + {46”} if (u != s_goal) rhs(u) = min s'∈Succ(u)(c(u, s') + g(s')); + {47”} UpdateVertex(u); + {48”} ComputeShortestPath() + */ + if len(changes) == 0 { + return + } + d.keyModifier += d.heuristic(d.last, d.s) + d.last = d.s + for _, e := range changes { + from := e.From() + to := e.To() + c, _ := d.weight(from, to) + if c < 0 { + panic("D* Lite: negative edge weight") + } + cOld, _ := d.model.Weight(from, to) + u := d.worldNodeFor(from) + v := d.worldNodeFor(to) + d.model.SetEdge(simple.Edge{F: u, T: v, W: c}) + if cOld > c { + if u.ID() != d.t.ID() { + u.rhs = math.Min(u.rhs, c+v.g) + } + } else if u.rhs == cOld+v.g { + if u.ID() != d.t.ID() { + u.rhs = math.Inf(1) + for _, t := range d.model.From(u) { + u.rhs = math.Min(u.rhs, edgeWeight(d.model.Weight, u, t)+t.(*dStarLiteNode).g) + } + } + } + d.update(u) + } + d.findShortestPath() +} + +func (d *DStarLite) worldNodeFor(n graph.Node) *dStarLiteNode { + switch w := d.model.Node(n.ID()).(type) { + case *dStarLiteNode: + return w + case graph.Node: + panic(fmt.Sprintf("D* Lite: illegal world model node type: %T", w)) + default: + return newDStarLiteNode(n) + } +} + +// Here returns the current location. +func (d *DStarLite) Here() graph.Node { + return d.s.Node +} + +// Path returns the path from the current location to the goal and the +// weight of the path. +func (d *DStarLite) Path() (p []graph.Node, weight float64) { + u := d.s + p = []graph.Node{u.Node} + for u.ID() != d.t.ID() { + if math.IsInf(u.rhs, 1) { + return nil, math.Inf(1) + } + + // We use stored rhs comparison to break + // ties between calculated rhs-coequal nodes. + rhsMin := math.Inf(1) + min := math.Inf(1) + var ( + next *dStarLiteNode + cost float64 + ) + for _, _v := range d.model.From(u) { + v := _v.(*dStarLiteNode) + w := edgeWeight(d.model.Weight, u, v) + if rhs := w + v.g; rhs < min || (rhs == min && v.rhs < rhsMin) { + next = v + min = rhs + rhsMin = v.rhs + cost = w + } + } + if next == nil { + return nil, math.NaN() + } + u = next + weight += cost + p = append(p, u.Node) + } + return p, weight +} + +/* +The pseudocode uses the following functions to manage the priority +queue: + + * U.Top() returns a vertex with the smallest priority of all + vertices in priority queue U. + * U.TopKey() returns the smallest priority of all vertices in + priority queue U. (If is empty, then U.TopKey() returns [∞;∞].) + * U.Pop() deletes the vertex with the smallest priority in + priority queue U and returns the vertex. + * U.Insert(s, k) inserts vertex s into priority queue with + priority k. + * U.Update(s, k) changes the priority of vertex s in priority + queue U to k. (It does nothing if the current priority of vertex + s already equals k.) + * Finally, U.Remove(s) removes vertex s from priority queue U. +*/ + +// key is a D* Lite priority queue key. +type key [2]float64 + +var badKey = key{math.NaN(), math.NaN()} + +// less returns whether k is less than other. From ISBN:0-262-51129-0 pp476-483: +// +// k ≤ k' iff k₁ < k'₁ OR (k₁ == k'₁ AND k₂ ≤ k'₂) +// +func (k key) less(other key) bool { + if k != k || other != other { + panic("D* Lite: poisoned key") + } + return k[0] < other[0] || (k[0] == other[0] && k[1] < other[1]) +} + +// dStarLiteNode adds D* Lite accounting to a graph.Node. +type dStarLiteNode struct { + graph.Node + key key + idx int + rhs float64 + g float64 +} + +// newDStarLiteNode returns a dStarLite node that is in a legal state +// for existence outside the DStarLite priority queue. +func newDStarLiteNode(n graph.Node) *dStarLiteNode { + return &dStarLiteNode{ + Node: n, + rhs: math.Inf(1), + g: math.Inf(1), + key: badKey, + idx: -1, + } +} + +// inQueue returns whether the node is in the queue. +func (q *dStarLiteNode) inQueue() bool { + return q.idx >= 0 +} + +// dStarLiteQueue is a D* Lite priority queue. +type dStarLiteQueue []*dStarLiteNode + +func (q dStarLiteQueue) Less(i, j int) bool { + return q[i].key.less(q[j].key) +} + +func (q dStarLiteQueue) Swap(i, j int) { + q[i], q[j] = q[j], q[i] + q[i].idx = i + q[j].idx = j +} + +func (q dStarLiteQueue) Len() int { + return len(q) +} + +func (q *dStarLiteQueue) Push(x interface{}) { + n := x.(*dStarLiteNode) + n.idx = len(*q) + *q = append(*q, n) +} + +func (q *dStarLiteQueue) Pop() interface{} { + n := (*q)[len(*q)-1] + n.idx = -1 + *q = (*q)[:len(*q)-1] + return n +} + +// top returns the top node in the queue. Note that instead of +// returning a key [∞;∞] when q is empty, the caller checks for +// an empty queue by calling q.Len. +func (q dStarLiteQueue) top() *dStarLiteNode { + return q[0] +} + +// insert puts the node u into the queue with the key k. +func (q *dStarLiteQueue) insert(u *dStarLiteNode, k key) { + u.key = k + heap.Push(q, u) +} + +// update updates the node in the queue identified by id with the key k. +func (q *dStarLiteQueue) update(n *dStarLiteNode, k key) { + n.key = k + heap.Fix(q, n.idx) +} + +// remove removes the node identified by id from the queue. +func (q *dStarLiteQueue) remove(n *dStarLiteNode) { + heap.Remove(q, n.idx) + n.key = badKey + n.idx = -1 +} diff --git a/graph/path/dynamic/dstarlite_test.go b/graph/path/dynamic/dstarlite_test.go new file mode 100644 index 00000000..e6d433ef --- /dev/null +++ b/graph/path/dynamic/dstarlite_test.go @@ -0,0 +1,685 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package dynamic + +import ( + "bytes" + "flag" + "fmt" + "math" + "reflect" + "strings" + "testing" + + "github.com/gonum/graph" + "github.com/gonum/graph/path" + "github.com/gonum/graph/path/internal" + "github.com/gonum/graph/path/internal/testgraphs" + "github.com/gonum/graph/simple" +) + +var ( + debug = flag.Bool("debug", false, "write path progress for failing dynamic case tests") + vdebug = flag.Bool("vdebug", false, "write path progress for all dynamic case tests (requires test.v)") + maxWide = flag.Int("maxwidth", 5, "maximum width grid to dump for debugging") +) + +func TestDStarLiteNullHeuristic(t *testing.T) { + for _, test := range testgraphs.ShortestPathTests { + // Skip zero-weight cycles. + if strings.HasPrefix(test.Name, "zero-weight") { + continue + } + + g := test.Graph() + for _, e := range test.Edges { + g.SetEdge(e) + } + + var ( + d *DStarLite + + panicked bool + ) + func() { + defer func() { + panicked = recover() != nil + }() + d = NewDStarLite(test.Query.From(), test.Query.To(), g.(graph.Graph), path.NullHeuristic, simple.NewDirectedGraph(0, math.Inf(1))) + }() + if panicked || test.HasNegativeWeight { + if !test.HasNegativeWeight { + t.Errorf("%q: unexpected panic", test.Name) + } + if !panicked { + t.Errorf("%q: expected panic for negative edge weight", test.Name) + } + continue + } + + p, weight := d.Path() + + if !math.IsInf(weight, 1) && p[0].ID() != test.Query.From().ID() { + t.Fatalf("%q: unexpected from node ID: got:%d want:%d", p[0].ID(), test.Query.From().ID()) + } + if weight != test.Weight { + t.Errorf("%q: unexpected weight from Between: got:%f want:%f", + test.Name, weight, test.Weight) + } + + var got []int + for _, n := range p { + got = append(got, n.ID()) + } + ok := len(got) == 0 && len(test.WantPaths) == 0 + for _, sp := range test.WantPaths { + if reflect.DeepEqual(got, sp) { + ok = true + break + } + } + if !ok { + t.Errorf("%q: unexpected shortest path:\ngot: %v\nwant from:%v", + test.Name, p, test.WantPaths) + } + } +} + +var dynamicDStarLiteTests = []struct { + g *internal.Grid + radius float64 + all bool + diag, unit bool + remember []bool + modify func(*internal.LimitedVisionGrid) + + heuristic func(dx, dy float64) float64 + + s, t graph.Node + + want []graph.Node + weight float64 + wantedPaths map[int][]graph.Node +}{ + { + // This is the example shown in figures 6 and 7 of doi:10.1109/tro.2004.838026. + g: internal.NewGridFrom( + "...", + ".*.", + ".*.", + ".*.", + "...", + ), + radius: 1.5, + all: true, + diag: true, + unit: true, + remember: []bool{false, true}, + + heuristic: func(dx, dy float64) float64 { + return math.Max(math.Abs(dx), math.Abs(dy)) + }, + + s: simple.Node(3), + t: simple.Node(14), + + want: []graph.Node{ + simple.Node(3), + simple.Node(6), + simple.Node(9), + simple.Node(13), + simple.Node(14), + }, + weight: 4, + }, + { + // This is a small example that has the property that the first corner + // may be taken incorrectly at 90° or correctly at 45° because the + // calculated rhs values of 12 and 17 are tied when moving from node + // 16, and the grid is small enough to examine by a dump. + g: internal.NewGridFrom( + ".....", + "...*.", + "**.*.", + "...*.", + ), + radius: 1.5, + all: true, + diag: true, + remember: []bool{false, true}, + + heuristic: func(dx, dy float64) float64 { + return math.Max(math.Abs(dx), math.Abs(dy)) + }, + + s: simple.Node(15), + t: simple.Node(14), + + want: []graph.Node{ + simple.Node(15), + simple.Node(16), + simple.Node(12), + simple.Node(7), + simple.Node(3), + simple.Node(9), + simple.Node(14), + }, + weight: 7.242640687119285, + wantedPaths: map[int][]graph.Node{ + 12: []graph.Node{simple.Node(12), simple.Node(7), simple.Node(3), simple.Node(9), simple.Node(14)}, + }, + }, + { + // This is the example shown in figure 2 of doi:10.1109/tro.2004.838026 + // with the exception that diagonal edge weights are calculated with the hypot + // function instead of a step count and only allowing information to be known + // from exploration. + g: internal.NewGridFrom( + "..................", + "..................", + "..................", + "..................", + "..................", + "..................", + "....*.*...........", + "*****.***.........", + "......*...........", + "......***.........", + "......*...........", + "......*...........", + "......*...........", + "*****.*...........", + "......*...........", + ), + radius: 1.5, + all: true, + diag: true, + remember: []bool{false, true}, + + heuristic: func(dx, dy float64) float64 { + return math.Max(math.Abs(dx), math.Abs(dy)) + }, + + s: simple.Node(253), + t: simple.Node(122), + + want: []graph.Node{ + simple.Node(253), + simple.Node(254), + simple.Node(255), + simple.Node(256), + simple.Node(239), + simple.Node(221), + simple.Node(203), + simple.Node(185), + simple.Node(167), + simple.Node(149), + simple.Node(131), + simple.Node(113), + simple.Node(96), + + // The following section depends + // on map iteration order. + nil, + nil, + nil, + nil, + nil, + nil, + nil, + + simple.Node(122), + }, + weight: 21.242640687119287, + }, + { + // This is the example shown in figure 2 of doi:10.1109/tro.2004.838026 + // with the exception that diagonal edge weights are calculated with the hypot + // function instead of a step count, not closing the exit and only allowing + // information to be known from exploration. + g: internal.NewGridFrom( + "..................", + "..................", + "..................", + "..................", + "..................", + "..................", + "....*.*...........", + "*****.***.........", + "..................", // Keep open. + "......***.........", + "......*...........", + "......*...........", + "......*...........", + "*****.*...........", + "......*...........", + ), + radius: 1.5, + all: true, + diag: true, + remember: []bool{false, true}, + + heuristic: func(dx, dy float64) float64 { + return math.Max(math.Abs(dx), math.Abs(dy)) + }, + + s: simple.Node(253), + t: simple.Node(122), + + want: []graph.Node{ + simple.Node(253), + simple.Node(254), + simple.Node(255), + simple.Node(256), + simple.Node(239), + simple.Node(221), + simple.Node(203), + simple.Node(185), + simple.Node(167), + simple.Node(150), + simple.Node(151), + simple.Node(152), + + // The following section depends + // on map iteration order. + nil, + nil, + nil, + nil, + nil, + + simple.Node(122), + }, + weight: 18.656854249492383, + }, + { + // This is the example shown in figure 2 of doi:10.1109/tro.2004.838026 + // with the exception that diagonal edge weights are calculated with the hypot + // function instead of a step count, the exit is closed at a distance and + // information is allowed to be known from exploration. + g: internal.NewGridFrom( + "..................", + "..................", + "..................", + "..................", + "..................", + "..................", + "....*.*...........", + "*****.***.........", + "........*.........", + "......***.........", + "......*...........", + "......*...........", + "......*...........", + "*****.*...........", + "......*...........", + ), + radius: 1.5, + all: true, + diag: true, + remember: []bool{false, true}, + + heuristic: func(dx, dy float64) float64 { + return math.Max(math.Abs(dx), math.Abs(dy)) + }, + + s: simple.Node(253), + t: simple.Node(122), + + want: []graph.Node{ + simple.Node(253), + simple.Node(254), + simple.Node(255), + simple.Node(256), + simple.Node(239), + simple.Node(221), + simple.Node(203), + simple.Node(185), + simple.Node(167), + simple.Node(150), + simple.Node(151), + simple.Node(150), + simple.Node(131), + simple.Node(113), + simple.Node(96), + + // The following section depends + // on map iteration order. + nil, + nil, + nil, + nil, + nil, + nil, + nil, + + simple.Node(122), + }, + weight: 24.07106781186548, + }, + { + // This is the example shown in figure 2 of doi:10.1109/tro.2004.838026 + // with the exception that diagonal edge weights are calculated with the hypot + // function instead of a step count. + g: internal.NewGridFrom( + "..................", + "..................", + "..................", + "..................", + "..................", + "..................", + "....*.*...........", + "*****.***.........", + "......*...........", // Forget this wall. + "......***.........", + "......*...........", + "......*...........", + "......*...........", + "*****.*...........", + "......*...........", + ), + radius: 1.5, + all: true, + diag: true, + remember: []bool{true}, + + modify: func(l *internal.LimitedVisionGrid) { + all := l.Grid.AllVisible + l.Grid.AllVisible = false + for _, n := range l.Nodes() { + l.Known[n.ID()] = !l.Grid.Has(n) + } + l.Grid.AllVisible = all + + const ( + wallRow = 8 + wallCol = 6 + ) + l.Known[l.NodeAt(wallRow, wallCol).ID()] = false + + // Check we have a correctly modified representation. + for _, u := range l.Nodes() { + for _, v := range l.Nodes() { + if l.HasEdgeBetween(u, v) != l.Grid.HasEdgeBetween(u, v) { + ur, uc := l.RowCol(u.ID()) + vr, vc := l.RowCol(v.ID()) + if (ur == wallRow && uc == wallCol) || (vr == wallRow && vc == wallCol) { + if !l.HasEdgeBetween(u, v) { + panic(fmt.Sprintf("expected to believe edge between %v (%d,%d) and %v (%d,%d) is passable", + u, v, ur, uc, vr, vc)) + } + continue + } + panic(fmt.Sprintf("disagreement about edge between %v (%d,%d) and %v (%d,%d): got:%t want:%t", + u, v, ur, uc, vr, vc, l.HasEdgeBetween(u, v), l.Grid.HasEdgeBetween(u, v))) + } + } + } + }, + + heuristic: func(dx, dy float64) float64 { + return math.Max(math.Abs(dx), math.Abs(dy)) + }, + + s: simple.Node(253), + t: simple.Node(122), + + want: []graph.Node{ + simple.Node(253), + simple.Node(254), + simple.Node(255), + simple.Node(256), + simple.Node(239), + simple.Node(221), + simple.Node(203), + simple.Node(185), + simple.Node(167), + simple.Node(149), + simple.Node(131), + simple.Node(113), + simple.Node(96), + + // The following section depends + // on map iteration order. + nil, + nil, + nil, + nil, + nil, + nil, + nil, + + simple.Node(122), + }, + weight: 21.242640687119287, + }, + { + g: internal.NewGridFrom( + "*..*", + "**.*", + "**.*", + "**.*", + ), + radius: 1, + all: true, + diag: false, + remember: []bool{false, true}, + + heuristic: func(dx, dy float64) float64 { + return math.Hypot(dx, dy) + }, + + s: simple.Node(1), + t: simple.Node(14), + + want: []graph.Node{ + simple.Node(1), + simple.Node(2), + simple.Node(6), + simple.Node(10), + simple.Node(14), + }, + weight: 4, + }, + { + g: internal.NewGridFrom( + "*..*", + "**.*", + "**.*", + "**.*", + ), + radius: 1.5, + all: true, + diag: true, + remember: []bool{false, true}, + + heuristic: func(dx, dy float64) float64 { + return math.Hypot(dx, dy) + }, + + s: simple.Node(1), + t: simple.Node(14), + + want: []graph.Node{ + simple.Node(1), + simple.Node(6), + simple.Node(10), + simple.Node(14), + }, + weight: math.Sqrt2 + 2, + }, + { + g: internal.NewGridFrom( + "...", + ".*.", + ".*.", + ".*.", + ".*.", + ), + radius: 1, + all: true, + diag: false, + remember: []bool{false, true}, + + heuristic: func(dx, dy float64) float64 { + return math.Hypot(dx, dy) + }, + + s: simple.Node(6), + t: simple.Node(14), + + want: []graph.Node{ + simple.Node(6), + simple.Node(9), + simple.Node(12), + simple.Node(9), + simple.Node(6), + simple.Node(3), + simple.Node(0), + simple.Node(1), + simple.Node(2), + simple.Node(5), + simple.Node(8), + simple.Node(11), + simple.Node(14), + }, + weight: 12, + }, +} + +func TestDStarLiteDynamic(t *testing.T) { + for i, test := range dynamicDStarLiteTests { + for _, remember := range test.remember { + l := &internal.LimitedVisionGrid{ + Grid: test.g, + VisionRadius: test.radius, + Location: test.s, + } + if remember { + l.Known = make(map[int]bool) + } + + l.Grid.AllVisible = test.all + + l.Grid.AllowDiagonal = test.diag + l.Grid.UnitEdgeWeight = test.unit + + if test.modify != nil { + test.modify(l) + } + + got := []graph.Node{test.s} + l.MoveTo(test.s) + + heuristic := func(a, b graph.Node) float64 { + ax, ay := l.XY(a) + bx, by := l.XY(b) + return test.heuristic(ax-bx, ay-by) + } + + world := simple.NewDirectedGraph(0, math.Inf(1)) + d := NewDStarLite(test.s, test.t, l, heuristic, world) + var ( + dp *dumper + buf bytes.Buffer + ) + _, c := l.Grid.Dims() + if c <= *maxWide && (*debug || *vdebug) { + dp = &dumper{ + w: &buf, + + dStarLite: d, + grid: l, + } + } + + dp.dump(true) + dp.printEdges("Initial world knowledge: %s\n\n", simpleEdgesOf(l, world.Edges())) + for d.Step() { + changes, _ := l.MoveTo(d.Here()) + got = append(got, l.Location) + d.UpdateWorld(changes) + dp.dump(true) + if wantedPath, ok := test.wantedPaths[l.Location.ID()]; ok { + gotPath, _ := d.Path() + if !samePath(gotPath, wantedPath) { + t.Errorf("unexpected intermediate path estimation for test %d %s memory:\ngot: %v\nwant:%v", + i, memory(remember), gotPath, wantedPath) + } + } + dp.printEdges("Edges changing after last step:\n%s\n\n", simpleEdgesOf(l, changes)) + } + + if weight := weightOf(got, l.Grid); !samePath(got, test.want) || weight != test.weight { + t.Errorf("unexpected path for test %d %s memory got weight:%v want weight:%v:\ngot: %v\nwant:%v", + i, memory(remember), weight, test.weight, got, test.want) + b, err := l.Render(got) + t.Errorf("path taken (err:%v):\n%s", err, b) + if c <= *maxWide && (*debug || *vdebug) { + t.Error(buf.String()) + } + } else if c <= *maxWide && *vdebug { + t.Logf("Test %d:\n%s", i, buf.String()) + } + } + } +} + +type memory bool + +func (m memory) String() string { + if m { + return "with" + } + return "without" +} + +// samePath compares two paths for equality ignoring nodes that are nil. +func samePath(a, b []graph.Node) bool { + if len(a) != len(b) { + return false + } + for i, e := range a { + if e == nil || b[i] == nil { + continue + } + if e.ID() != b[i].ID() { + return false + } + } + return true +} + +type weightedGraph interface { + graph.Graph + graph.Weighter +} + +// weightOf return the weight of the path in g. +func weightOf(path []graph.Node, g weightedGraph) float64 { + var w float64 + if len(path) > 1 { + for p, n := range path[1:] { + ew, ok := g.Weight(path[p], n) + if !ok { + return math.Inf(1) + } + w += ew + } + } + return w +} + +// simpleEdgesOf returns the weighted edges in g corresponding to the given edges. +func simpleEdgesOf(g weightedGraph, edges []graph.Edge) []simple.Edge { + w := make([]simple.Edge, len(edges)) + for i, e := range edges { + w[i].F = e.From() + w[i].T = e.To() + ew, _ := g.Weight(e.From(), e.To()) + w[i].W = ew + } + return w +} diff --git a/graph/path/dynamic/dumper_test.go b/graph/path/dynamic/dumper_test.go new file mode 100644 index 00000000..7c4c0f87 --- /dev/null +++ b/graph/path/dynamic/dumper_test.go @@ -0,0 +1,153 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package dynamic + +import ( + "bytes" + "fmt" + "io" + "sort" + "text/tabwriter" + + "github.com/gonum/graph/path/internal" + "github.com/gonum/graph/simple" +) + +// dumper implements a grid D* Lite statistics dump. +type dumper struct { + step int + + dStarLite *DStarLite + grid *internal.LimitedVisionGrid + + w io.Writer +} + +// dump writes a single step of a D* Lite path search to the dumper's io.Writer. +func (d *dumper) dump(withpath bool) { + if d == nil { + return + } + var pathStep map[int]int + if withpath { + pathStep = make(map[int]int) + path, _ := d.dStarLite.Path() + for i, n := range path { + pathStep[n.ID()] = i + } + } + fmt.Fprintf(d.w, "Step:%d kₘ=%v\n", d.step, d.dStarLite.keyModifier) + d.step++ + w := tabwriter.NewWriter(d.w, 0, 0, 0, ' ', tabwriter.Debug) + rows, cols := d.grid.Grid.Dims() + for r := 0; r < rows; r++ { + if r == 0 { + for c := 0; c < cols; c++ { + if c != 0 { + fmt.Fprint(w, "\t") + } + fmt.Fprint(w, "-------------------") + } + fmt.Fprintln(w) + } + for ln := 0; ln < 6; ln++ { + for c := 0; c < cols; c++ { + if c != 0 { + fmt.Fprint(w, "\t") + } + n := d.dStarLite.model.Node(d.grid.NodeAt(r, c).ID()).(*dStarLiteNode) + switch ln { + case 0: + if n.ID() == d.grid.Location.ID() { + if d.grid.Grid.HasOpen(n) { + fmt.Fprintf(w, "id:%2d >@<", n.ID()) + } else { + // Mark location as illegal. + fmt.Fprintf(w, "id:%2d >!<", n.ID()) + } + } else if n.ID() == d.dStarLite.t.ID() { + fmt.Fprintf(w, "id:%2d G", n.ID()) + // Mark goal cell as illegal. + if !d.grid.Grid.HasOpen(n) { + fmt.Fprint(w, "!") + } + } else if pathStep[n.ID()] > 0 { + fmt.Fprintf(w, "id:%2d %2d", n.ID(), pathStep[n.ID()]) + // Mark path cells with an obstruction. + if !d.grid.Grid.HasOpen(n) { + fmt.Fprint(w, "!") + } + } else { + fmt.Fprintf(w, "id:%2d", n.ID()) + // Mark cells with an obstruction. + if !d.grid.Grid.HasOpen(n) { + fmt.Fprint(w, " *") + } + } + case 1: + fmt.Fprintf(w, "h: %.4v", d.dStarLite.heuristic(n, d.dStarLite.Here())) + case 2: + fmt.Fprintf(w, "g: %.4v", n.g) + case 3: + fmt.Fprintf(w, "rhs:%.4v", n.rhs) + case 4: + if n.g != n.rhs { + fmt.Fprintf(w, "key:%.3f", n.key) + } + if n.key == n.key { + // Mark keys for nodes in the priority queue. + // We use NaN inequality for this check since all + // keys not in the queue must have their key set + // to badKey. + // + // This should always mark cells where key is + // printed. + fmt.Fprint(w, "*") + } + if n.g > n.rhs { + fmt.Fprint(w, "^") + } + if n.g < n.rhs { + fmt.Fprint(w, "v") + } + default: + fmt.Fprint(w, "-------------------") + } + } + fmt.Fprintln(w) + } + } + w.Flush() + fmt.Fprintln(d.w) +} + +// printEdges pretty prints the given edges to the dumper's io.Writer using the provided +// format string. The edges are first formated to a string, so the format string must use +// the %s verb to indicate where the edges are to be printed. +func (d *dumper) printEdges(format string, edges []simple.Edge) { + if d == nil { + return + } + var buf bytes.Buffer + sort.Sort(lexically(edges)) + for i, e := range edges { + if i != 0 { + fmt.Fprint(&buf, ", ") + } + fmt.Fprintf(&buf, "%d->%d:%.4v", e.From().ID(), e.To().ID(), e.Weight()) + } + if len(edges) == 0 { + fmt.Fprint(&buf, "none") + } + fmt.Fprintf(d.w, format, buf.Bytes()) +} + +type lexically []simple.Edge + +func (l lexically) Len() int { return len(l) } +func (l lexically) Less(i, j int) bool { + return l[i].From().ID() < l[j].From().ID() || (l[i].From().ID() == l[j].From().ID() && l[i].To().ID() < l[j].To().ID()) +} +func (l lexically) Swap(i, j int) { l[i], l[j] = l[j], l[i] } diff --git a/graph/path/floydwarshall.go b/graph/path/floydwarshall.go new file mode 100644 index 00000000..bf5f237a --- /dev/null +++ b/graph/path/floydwarshall.go @@ -0,0 +1,59 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package path + +import "github.com/gonum/graph" + +// FloydWarshall returns a shortest-path tree for the graph g or false indicating +// that a negative cycle exists in the graph. If the graph does not implement +// graph.Weighter, UniformCost is used. +// +// The time complexity of FloydWarshall is O(|V|^3). +func FloydWarshall(g graph.Graph) (paths AllShortest, ok bool) { + var weight Weighting + if wg, ok := g.(graph.Weighter); ok { + weight = wg.Weight + } else { + weight = UniformCost(g) + } + + nodes := g.Nodes() + paths = newAllShortest(nodes, true) + for i, u := range nodes { + paths.dist.Set(i, i, 0) + for _, v := range g.From(u) { + j := paths.indexOf[v.ID()] + w, ok := weight(u, v) + if !ok { + panic("floyd-warshall: unexpected invalid weight") + } + paths.set(i, j, w, j) + } + } + + for k := range nodes { + for i := range nodes { + for j := range nodes { + ij := paths.dist.At(i, j) + joint := paths.dist.At(i, k) + paths.dist.At(k, j) + if ij > joint { + paths.set(i, j, joint, paths.at(i, k)...) + } else if ij-joint == 0 { + paths.add(i, j, paths.at(i, k)...) + } + } + } + } + + ok = true + for i := range nodes { + if paths.dist.At(i, i) < 0 { + ok = false + break + } + } + + return paths, ok +} diff --git a/graph/path/floydwarshall_test.go b/graph/path/floydwarshall_test.go new file mode 100644 index 00000000..5cc1851a --- /dev/null +++ b/graph/path/floydwarshall_test.go @@ -0,0 +1,102 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package path + +import ( + "math" + "reflect" + "sort" + "testing" + + "github.com/gonum/graph" + "github.com/gonum/graph/internal/ordered" + "github.com/gonum/graph/path/internal/testgraphs" +) + +func TestFloydWarshall(t *testing.T) { + for _, test := range testgraphs.ShortestPathTests { + g := test.Graph() + for _, e := range test.Edges { + g.SetEdge(e) + } + + pt, ok := FloydWarshall(g.(graph.Graph)) + if test.HasNegativeCycle { + if ok { + t.Errorf("%q: expected negative cycle", test.Name) + } + continue + } + if !ok { + t.Fatalf("%q: unexpected negative cycle", test.Name) + } + + // Check all random paths returned are OK. + for i := 0; i < 10; i++ { + p, weight, unique := pt.Between(test.Query.From(), test.Query.To()) + if weight != test.Weight { + t.Errorf("%q: unexpected weight from Between: got:%f want:%f", + test.Name, weight, test.Weight) + } + if weight := pt.Weight(test.Query.From(), test.Query.To()); weight != test.Weight { + t.Errorf("%q: unexpected weight from Weight: got:%f want:%f", + test.Name, weight, test.Weight) + } + if unique != test.HasUniquePath { + t.Errorf("%q: unexpected number of paths: got: unique=%t want: unique=%t", + test.Name, unique, test.HasUniquePath) + } + + var got []int + for _, n := range p { + got = append(got, n.ID()) + } + ok := len(got) == 0 && len(test.WantPaths) == 0 + for _, sp := range test.WantPaths { + if reflect.DeepEqual(got, sp) { + ok = true + break + } + } + if !ok { + t.Errorf("%q: unexpected shortest path:\ngot: %v\nwant from:%v", + test.Name, p, test.WantPaths) + } + } + + np, weight, unique := pt.Between(test.NoPathFor.From(), test.NoPathFor.To()) + if np != nil || !math.IsInf(weight, 1) || unique != false { + t.Errorf("%q: unexpected path:\ngot: path=%v weight=%f unique=%t\nwant:path= weight=+Inf unique=false", + test.Name, np, weight, unique) + } + + paths, weight := pt.AllBetween(test.Query.From(), test.Query.To()) + if weight != test.Weight { + t.Errorf("%q: unexpected weight from Between: got:%f want:%f", + test.Name, weight, test.Weight) + } + + var got [][]int + if len(paths) != 0 { + got = make([][]int, len(paths)) + } + for i, p := range paths { + for _, v := range p { + got[i] = append(got[i], v.ID()) + } + } + sort.Sort(ordered.BySliceValues(got)) + if !reflect.DeepEqual(got, test.WantPaths) { + t.Errorf("testing %q: unexpected shortest paths:\ngot: %v\nwant:%v", + test.Name, got, test.WantPaths) + } + + nps, weight := pt.AllBetween(test.NoPathFor.From(), test.NoPathFor.To()) + if nps != nil || !math.IsInf(weight, 1) { + t.Errorf("%q: unexpected path:\ngot: paths=%v weight=%f\nwant:path= weight=+Inf", + test.Name, nps, weight) + } + } +} diff --git a/graph/path/internal/grid.go b/graph/path/internal/grid.go new file mode 100644 index 00000000..8122ebf3 --- /dev/null +++ b/graph/path/internal/grid.go @@ -0,0 +1,286 @@ +// Copyright ©2014 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package internal + +import ( + "errors" + "fmt" + "math" + + "github.com/gonum/graph" + "github.com/gonum/graph/simple" +) + +const ( + Closed = '*' // Closed is the closed grid node representation. + Open = '.' // Open is the open grid node repesentation. + Unknown = '?' // Unknown is the unknown grid node repesentation. +) + +// Grid is a 2D grid planar undirected graph. +type Grid struct { + // AllowDiagonal specifies whether + // diagonally adjacent nodes can + // be connected by an edge. + AllowDiagonal bool + // UnitEdgeWeight specifies whether + // finite edge weights are returned as + // the unit length. Otherwise edge + // weights are the Euclidean distance + // between connected nodes. + UnitEdgeWeight bool + + // AllVisible specifies whether + // non-open nodes are visible + // in calls to Nodes and HasNode. + AllVisible bool + + open []bool + r, c int +} + +// NewGrid returns an r by c grid with all positions +// set to the specified open state. +func NewGrid(r, c int, open bool) *Grid { + states := make([]bool, r*c) + if open { + for i := range states { + states[i] = true + } + } + return &Grid{ + open: states, + r: r, + c: c, + } +} + +// NewGridFrom returns a grid specified by the rows strings. All rows must +// be the same length and must only contain the Open or Closed characters, +// NewGridFrom will panic otherwise. +func NewGridFrom(rows ...string) *Grid { + if len(rows) == 0 { + return nil + } + for i, r := range rows[:len(rows)-1] { + if len(r) != len(rows[i+1]) { + panic("grid: unequal row lengths") + } + } + states := make([]bool, 0, len(rows)*len(rows[0])) + for _, r := range rows { + for _, b := range r { + switch b { + case Closed: + states = append(states, false) + case Open: + states = append(states, true) + default: + panic(fmt.Sprintf("grid: invalid state: %q", r)) + } + } + } + return &Grid{ + open: states, + r: len(rows), + c: len(rows[0]), + } +} + +// Nodes returns all the open nodes in the grid if AllVisible is +// false, otherwise all nodes are returned. +func (g *Grid) Nodes() []graph.Node { + var nodes []graph.Node + for id, ok := range g.open { + if ok || g.AllVisible { + nodes = append(nodes, simple.Node(id)) + } + } + return nodes +} + +// Has returns whether n is a node in the grid. The state of +// the AllVisible field determines whether a non-open node is +// present. +func (g *Grid) Has(n graph.Node) bool { + return g.has(n.ID()) +} + +func (g *Grid) has(id int) bool { + return id >= 0 && id < len(g.open) && (g.AllVisible || g.open[id]) +} + +// HasOpen returns whether n is an open node in the grid. +func (g *Grid) HasOpen(n graph.Node) bool { + id := n.ID() + return id >= 0 && id < len(g.open) && g.open[id] +} + +// Set sets the node at position (r, c) to the specified open state. +func (g *Grid) Set(r, c int, open bool) { + if r < 0 || r >= g.r { + panic("grid: illegal row index") + } + if c < 0 || c >= g.c { + panic("grid: illegal column index") + } + g.open[r*g.c+c] = open +} + +// Dims returns the dimensions of the grid. +func (g *Grid) Dims() (r, c int) { + return g.r, g.c +} + +// RowCol returns the row and column of the id. RowCol will panic if the +// node id is outside the range of the grid. +func (g *Grid) RowCol(id int) (r, c int) { + if id < 0 || id >= len(g.open) { + panic("grid: illegal node id") + } + return id / g.c, id % g.c +} + +// XY returns the cartesian coordinates of n. If n is not a node +// in the grid, (NaN, NaN) is returned. +func (g *Grid) XY(n graph.Node) (x, y float64) { + if !g.Has(n) { + return math.NaN(), math.NaN() + } + r, c := g.RowCol(n.ID()) + return float64(c), float64(r) +} + +// NodeAt returns the node at (r, c). The returned node may be open or closed. +func (g *Grid) NodeAt(r, c int) graph.Node { + if r < 0 || r >= g.r || c < 0 || c >= g.c { + return nil + } + return simple.Node(r*g.c + c) +} + +// From returns all the nodes reachable from u. Reachabilty requires that both +// ends of an edge must be open. +func (g *Grid) From(u graph.Node) []graph.Node { + if !g.HasOpen(u) { + return nil + } + nr, nc := g.RowCol(u.ID()) + var to []graph.Node + for r := nr - 1; r <= nr+1; r++ { + for c := nc - 1; c <= nc+1; c++ { + if v := g.NodeAt(r, c); v != nil && g.HasEdgeBetween(u, v) { + to = append(to, v) + } + } + } + return to +} + +// HasEdgeBetween returns whether there is an edge between u and v. +func (g *Grid) HasEdgeBetween(u, v graph.Node) bool { + if !g.HasOpen(u) || !g.HasOpen(v) || u.ID() == v.ID() { + return false + } + ur, uc := g.RowCol(u.ID()) + vr, vc := g.RowCol(v.ID()) + if abs(ur-vr) > 1 || abs(uc-vc) > 1 { + return false + } + return g.AllowDiagonal || ur == vr || uc == vc +} + +func abs(i int) int { + if i < 0 { + return -i + } + return i +} + +// Edge returns the edge between u and v. +func (g *Grid) Edge(u, v graph.Node) graph.Edge { + return g.EdgeBetween(u, v) +} + +// EdgeBetween returns the edge between u and v. +func (g *Grid) EdgeBetween(u, v graph.Node) graph.Edge { + if g.HasEdgeBetween(u, v) { + if !g.AllowDiagonal || g.UnitEdgeWeight { + return simple.Edge{F: u, T: v, W: 1} + } + ux, uy := g.XY(u) + vx, vy := g.XY(v) + return simple.Edge{F: u, T: v, W: math.Hypot(ux-vx, uy-vy)} + } + return nil +} + +// Weight returns the weight of the given edge. +func (g *Grid) Weight(x, y graph.Node) (w float64, ok bool) { + if x.ID() == y.ID() { + return 0, true + } + if !g.HasEdgeBetween(x, y) { + return math.Inf(1), false + } + if e := g.EdgeBetween(x, y); e != nil { + if !g.AllowDiagonal || g.UnitEdgeWeight { + return 1, true + } + ux, uy := g.XY(e.From()) + vx, vy := g.XY(e.To()) + return math.Hypot(ux-vx, uy-vy), true + } + return math.Inf(1), true +} + +// String returns a string representation of the grid. +func (g *Grid) String() string { + b, _ := g.Render(nil) + return string(b) +} + +// Render returns a text representation of the graph +// with the given path included. If the path is not a path +// in the grid Render returns a non-nil error and the +// path up to that point. +func (g *Grid) Render(path []graph.Node) ([]byte, error) { + b := make([]byte, g.r*(g.c+1)-1) + for r := 0; r < g.r; r++ { + for c := 0; c < g.c; c++ { + if g.open[r*g.c+c] { + b[r*(g.c+1)+c] = Open + } else { + b[r*(g.c+1)+c] = Closed + } + } + if r < g.r-1 { + b[r*(g.c+1)+g.c] = '\n' + } + } + + // We don't use topo.IsPathIn at the outset because we + // want to draw as much as possible before failing. + for i, n := range path { + if !g.Has(n) || (i != 0 && !g.HasEdgeBetween(path[i-1], n)) { + id := n.ID() + if id >= 0 && id < len(g.open) { + r, c := g.RowCol(n.ID()) + b[r*(g.c+1)+c] = '!' + } + return b, errors.New("grid: not a path in graph") + } + r, c := g.RowCol(n.ID()) + switch i { + case len(path) - 1: + b[r*(g.c+1)+c] = 'G' + case 0: + b[r*(g.c+1)+c] = 'S' + default: + b[r*(g.c+1)+c] = 'o' + } + } + return b, nil +} diff --git a/graph/path/internal/grid_test.go b/graph/path/internal/grid_test.go new file mode 100644 index 00000000..d046a533 --- /dev/null +++ b/graph/path/internal/grid_test.go @@ -0,0 +1,258 @@ +// Copyright ©2014 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package internal + +import ( + "bytes" + "errors" + "reflect" + "strings" + "testing" + + "github.com/gonum/graph" + "github.com/gonum/graph/simple" +) + +var _ graph.Graph = (*Grid)(nil) + +func join(g ...string) string { return strings.Join(g, "\n") } + +type node int + +func (n node) ID() int { return int(n) } + +func TestGrid(t *testing.T) { + g := NewGrid(4, 4, false) + + got := g.String() + want := join( + "****", + "****", + "****", + "****", + ) + if got != want { + t.Fatalf("unexpected grid rendering:\ngot: %q\nwant:%q", got, want) + } + + var ops = []struct { + r, c int + state bool + want string + }{ + { + r: 0, c: 1, + state: true, + want: join( + "*.**", + "****", + "****", + "****", + ), + }, + { + r: 0, c: 1, + state: false, + want: join( + "****", + "****", + "****", + "****", + ), + }, + { + r: 0, c: 1, + state: true, + want: join( + "*.**", + "****", + "****", + "****", + ), + }, + { + r: 0, c: 2, + state: true, + want: join( + "*..*", + "****", + "****", + "****", + ), + }, + { + r: 1, c: 2, + state: true, + want: join( + "*..*", + "**.*", + "****", + "****", + ), + }, + { + r: 2, c: 2, + state: true, + want: join( + "*..*", + "**.*", + "**.*", + "****", + ), + }, + { + r: 3, c: 2, + state: true, + want: join( + "*..*", + "**.*", + "**.*", + "**.*", + ), + }, + } + for _, test := range ops { + g.Set(test.r, test.c, test.state) + got := g.String() + if got != test.want { + t.Fatalf("unexpected grid rendering after set (%d, %d) open state to %t:\ngot: %q\nwant:%q", + test.r, test.c, test.state, got, test.want) + } + } + + // Match the last state from the loop against the + // explicit description of the grid. + got = NewGridFrom( + "*..*", + "**.*", + "**.*", + "**.*", + ).String() + want = g.String() + if got != want { + t.Fatalf("unexpected grid rendering from NewGridFrom:\ngot: %q\nwant:%q", got, want) + } + + var paths = []struct { + path []graph.Node + diagonal bool + want string + }{ + { + path: nil, + diagonal: false, + want: join( + "*..*", + "**.*", + "**.*", + "**.*", + ), + }, + { + path: []graph.Node{node(1), node(2), node(6), node(10), node(14)}, + diagonal: false, + want: join( + "*So*", + "**o*", + "**o*", + "**G*", + ), + }, + { + path: []graph.Node{node(1), node(6), node(10), node(14)}, + diagonal: false, + want: join( + "*S.*", + "**!*", + "**.*", + "**.*", + ), + }, + { + path: []graph.Node{node(1), node(6), node(10), node(14)}, + diagonal: true, + want: join( + "*S.*", + "**o*", + "**o*", + "**G*", + ), + }, + { + path: []graph.Node{node(1), node(5), node(9)}, + diagonal: false, + want: join( + "*S.*", + "*!.*", + "**.*", + "**.*", + ), + }, + } + for _, test := range paths { + g.AllowDiagonal = test.diagonal + got, err := g.Render(test.path) + errored := err != nil + if bytes.Contains(got, []byte{'!'}) != errored { + t.Fatalf("unexpected error return: got:%v want:%v", err, errors.New("grid: not a path in graph")) + } + if string(got) != test.want { + t.Fatalf("unexpected grid path rendering for %v:\ngot: %q\nwant:%q", test.path, got, want) + } + } + + var coords = []struct { + r, c int + id int + }{ + {r: 0, c: 0, id: 0}, + {r: 0, c: 3, id: 3}, + {r: 3, c: 0, id: 12}, + {r: 3, c: 3, id: 15}, + } + for _, test := range coords { + if id := g.NodeAt(test.r, test.c).ID(); id != test.id { + t.Fatalf("unexpected ID for node at (%d, %d):\ngot: %d\nwant:%d", test.r, test.c, id, test.id) + } + if r, c := g.RowCol(test.id); r != test.r || c != test.c { + t.Fatalf("unexpected row/col for node %d:\ngot: (%d, %d)\nwant:(%d, %d)", test.id, r, c, test.r, test.c) + } + } + + var reach = []struct { + from graph.Node + diagonal bool + to []graph.Node + }{ + { + from: node(0), + diagonal: false, + to: nil, + }, + { + from: node(2), + diagonal: false, + to: []graph.Node{simple.Node(1), simple.Node(6)}, + }, + { + from: node(1), + diagonal: false, + to: []graph.Node{simple.Node(2)}, + }, + { + from: node(1), + diagonal: true, + to: []graph.Node{simple.Node(2), simple.Node(6)}, + }, + } + for _, test := range reach { + g.AllowDiagonal = test.diagonal + got := g.From(test.from) + if !reflect.DeepEqual(got, test.to) { + t.Fatalf("unexpected nodes from %d with allow diagonal=%t:\ngot: %v\nwant:%v", + test.from, test.diagonal, got, test.to) + } + } +} diff --git a/graph/path/internal/limited.go b/graph/path/internal/limited.go new file mode 100644 index 00000000..7b900fde --- /dev/null +++ b/graph/path/internal/limited.go @@ -0,0 +1,306 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package internal + +import ( + "errors" + "math" + + "github.com/gonum/graph" + "github.com/gonum/graph/simple" +) + +// LimitedVisionGrid is a 2D grid planar undirected graph where the capacity +// to determine the presence of edges is dependent on the current and past +// positions on the grid. In the absence of information, the grid is +// optimistic. +type LimitedVisionGrid struct { + Grid *Grid + + // Location is the current + // location on the grid. + Location graph.Node + + // VisionRadius specifies how far + // away edges can be detected. + VisionRadius float64 + + // Known holds a store of known + // nodes, if not nil. + Known map[int]bool +} + +// MoveTo moves to the node n on the grid and returns a slice of newly seen and +// already known edges. MoveTo panics if n is nil. +func (l *LimitedVisionGrid) MoveTo(n graph.Node) (new, old []graph.Edge) { + l.Location = n + row, column := l.RowCol(n.ID()) + x := float64(column) + y := float64(row) + seen := make(map[[2]int]bool) + bound := int(l.VisionRadius + 0.5) + for r := row - bound; r <= row+bound; r++ { + for c := column - bound; c <= column+bound; c++ { + u := l.NodeAt(r, c) + if u == nil { + continue + } + ux, uy := l.XY(u) + if math.Hypot(x-ux, y-uy) > l.VisionRadius { + continue + } + for _, v := range l.allPossibleFrom(u) { + if seen[[2]int{u.ID(), v.ID()}] { + continue + } + seen[[2]int{u.ID(), v.ID()}] = true + + vx, vy := l.XY(v) + if !l.Known[v.ID()] && math.Hypot(x-vx, y-vy) > l.VisionRadius { + continue + } + + e := simple.Edge{F: u, T: v} + if !l.Known[u.ID()] || !l.Known[v.ID()] { + new = append(new, e) + } else { + old = append(old, e) + } + } + } + } + + if l.Known != nil { + for r := row - bound; r <= row+bound; r++ { + for c := column - bound; c <= column+bound; c++ { + u := l.NodeAt(r, c) + if u == nil { + continue + } + ux, uy := l.XY(u) + if math.Hypot(x-ux, y-uy) > l.VisionRadius { + continue + } + for _, v := range l.allPossibleFrom(u) { + vx, vy := l.XY(v) + if math.Hypot(x-vx, y-vy) > l.VisionRadius { + continue + } + l.Known[v.ID()] = true + } + l.Known[u.ID()] = true + } + } + + } + + return new, old +} + +// allPossibleFrom returns all the nodes possibly reachable from u. +func (l *LimitedVisionGrid) allPossibleFrom(u graph.Node) []graph.Node { + if !l.Has(u) { + return nil + } + nr, nc := l.RowCol(u.ID()) + var to []graph.Node + for r := nr - 1; r <= nr+1; r++ { + for c := nc - 1; c <= nc+1; c++ { + v := l.NodeAt(r, c) + if v == nil || u.ID() == v.ID() { + continue + } + ur, uc := l.RowCol(u.ID()) + vr, vc := l.RowCol(v.ID()) + if abs(ur-vr) > 1 || abs(uc-vc) > 1 { + continue + } + if !l.Grid.AllowDiagonal && ur != vr && uc != vc { + continue + } + to = append(to, v) + } + } + return to +} + +// RowCol returns the row and column of the id. RowCol will panic if the +// node id is outside the range of the grid. +func (l *LimitedVisionGrid) RowCol(id int) (r, c int) { + return l.Grid.RowCol(id) +} + +// XY returns the cartesian coordinates of n. If n is not a node +// in the grid, (NaN, NaN) is returned. +func (l *LimitedVisionGrid) XY(n graph.Node) (x, y float64) { + if !l.Has(n) { + return math.NaN(), math.NaN() + } + r, c := l.RowCol(n.ID()) + return float64(c), float64(r) +} + +// Nodes returns all the nodes in the grid. +func (l *LimitedVisionGrid) Nodes() []graph.Node { + nodes := make([]graph.Node, 0, len(l.Grid.open)) + for id := range l.Grid.open { + nodes = append(nodes, simple.Node(id)) + } + return nodes +} + +// NodeAt returns the node at (r, c). The returned node may be open or closed. +func (l *LimitedVisionGrid) NodeAt(r, c int) graph.Node { + return l.Grid.NodeAt(r, c) +} + +// Has returns whether n is a node in the grid. +func (l *LimitedVisionGrid) Has(n graph.Node) bool { + return l.has(n.ID()) +} + +func (l *LimitedVisionGrid) has(id int) bool { + return id >= 0 && id < len(l.Grid.open) +} + +// From returns nodes that are optimistically reachable from u. +func (l *LimitedVisionGrid) From(u graph.Node) []graph.Node { + if !l.Has(u) { + return nil + } + + nr, nc := l.RowCol(u.ID()) + var to []graph.Node + for r := nr - 1; r <= nr+1; r++ { + for c := nc - 1; c <= nc+1; c++ { + if v := l.NodeAt(r, c); v != nil && l.HasEdgeBetween(u, v) { + to = append(to, v) + } + } + } + return to +} + +// HasEdgeBetween optimistically returns whether an edge is exists between u and v. +func (l *LimitedVisionGrid) HasEdgeBetween(u, v graph.Node) bool { + if u.ID() == v.ID() { + return false + } + ur, uc := l.RowCol(u.ID()) + vr, vc := l.RowCol(v.ID()) + if abs(ur-vr) > 1 || abs(uc-vc) > 1 { + return false + } + if !l.Grid.AllowDiagonal && ur != vr && uc != vc { + return false + } + + x, y := l.XY(l.Location) + ux, uy := l.XY(u) + vx, vy := l.XY(v) + uKnown := l.Known[u.ID()] || math.Hypot(x-ux, y-uy) <= l.VisionRadius + vKnown := l.Known[v.ID()] || math.Hypot(x-vx, y-vy) <= l.VisionRadius + + switch { + case uKnown && vKnown: + return l.Grid.HasEdgeBetween(u, v) + case uKnown: + return l.Grid.HasOpen(u) + case vKnown: + return l.Grid.HasOpen(v) + default: + return true + } +} + +// Edge optimistically returns the edge from u to v. +func (l *LimitedVisionGrid) Edge(u, v graph.Node) graph.Edge { + return l.EdgeBetween(u, v) +} + +// EdgeBetween optimistically returns the edge between u and v. +func (l *LimitedVisionGrid) EdgeBetween(u, v graph.Node) graph.Edge { + if l.HasEdgeBetween(u, v) { + if !l.Grid.AllowDiagonal || l.Grid.UnitEdgeWeight { + return simple.Edge{F: u, T: v, W: 1} + } + ux, uy := l.XY(u) + vx, vy := l.XY(v) + return simple.Edge{F: u, T: v, W: math.Hypot(ux-vx, uy-vy)} + } + return nil +} + +// Weight returns the weight of the given edge. +func (l *LimitedVisionGrid) Weight(x, y graph.Node) (w float64, ok bool) { + if x.ID() == y.ID() { + return 0, true + } + if !l.HasEdgeBetween(x, y) { + return math.Inf(1), false + } + if e := l.EdgeBetween(x, y); e != nil { + if !l.Grid.AllowDiagonal || l.Grid.UnitEdgeWeight { + return 1, true + } + ux, uy := l.XY(e.From()) + vx, vy := l.XY(e.To()) + return math.Hypot(ux-vx, uy-vy), true + + } + return math.Inf(1), true +} + +// String returns a string representation of the grid. +func (l *LimitedVisionGrid) String() string { + b, _ := l.Render(nil) + return string(b) +} + +// Render returns a text representation of the graph +// with the given path included. If the path is not a path +// in the grid Render returns a non-nil error and the +// path up to that point. +func (l *LimitedVisionGrid) Render(path []graph.Node) ([]byte, error) { + rows, cols := l.Grid.Dims() + b := make([]byte, rows*(cols+1)-1) + for r := 0; r < rows; r++ { + for c := 0; c < cols; c++ { + if !l.Known[r*cols+c] { + b[r*(cols+1)+c] = Unknown + } else if l.Grid.open[r*cols+c] { + b[r*(cols+1)+c] = Open + } else { + b[r*(cols+1)+c] = Closed + } + } + if r < rows-1 { + b[r*(cols+1)+cols] = '\n' + } + } + + // We don't use topo.IsPathIn at the outset because we + // want to draw as much as possible before failing. + for i, n := range path { + if !l.Has(n) || (i != 0 && !l.HasEdgeBetween(path[i-1], n)) { + id := n.ID() + if id >= 0 && id < len(l.Grid.open) { + r, c := l.RowCol(n.ID()) + b[r*(cols+1)+c] = '!' + } + return b, errors.New("grid: not a path in graph") + } + r, c := l.RowCol(n.ID()) + switch i { + case len(path) - 1: + b[r*(cols+1)+c] = 'G' + case 0: + b[r*(cols+1)+c] = 'S' + default: + b[r*(cols+1)+c] = 'o' + } + } + return b, nil +} diff --git a/graph/path/internal/limited_test.go b/graph/path/internal/limited_test.go new file mode 100644 index 00000000..ef20de41 --- /dev/null +++ b/graph/path/internal/limited_test.go @@ -0,0 +1,1242 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package internal + +import ( + "math" + "reflect" + "testing" + + "github.com/gonum/graph" + "github.com/gonum/graph/simple" +) + +type changes struct { + n graph.Node + + new, old []simple.Edge +} + +var limitedVisionTests = []struct { + g *Grid + radius float64 + diag bool + remember bool + + path []graph.Node + + want []changes +}{ + { + g: NewGridFrom( + "*..*", + "**.*", + "**.*", + "**.*", + ), + radius: 1, + diag: false, + remember: false, + path: []graph.Node{node(1), node(2), node(6), node(10), node(14)}, + + want: []changes{ + { + n: node(1), + new: []simple.Edge{ + {F: simple.Node(0), T: simple.Node(1), W: math.Inf(1)}, + {F: simple.Node(1), T: simple.Node(0), W: math.Inf(1)}, + {F: simple.Node(1), T: simple.Node(2), W: 1}, + {F: simple.Node(1), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(2), T: simple.Node(1), W: 1}, + {F: simple.Node(5), T: simple.Node(1), W: math.Inf(1)}, + }, + old: nil, + }, + { + n: node(2), + new: []simple.Edge{ + {F: simple.Node(1), T: simple.Node(2), W: 1}, + {F: simple.Node(2), T: simple.Node(1), W: 1}, + {F: simple.Node(2), T: simple.Node(3), W: math.Inf(1)}, + {F: simple.Node(2), T: simple.Node(6), W: 1}, + {F: simple.Node(3), T: simple.Node(2), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(2), W: 1}, + }, + old: nil, + }, + { + n: node(6), + new: []simple.Edge{ + {F: simple.Node(2), T: simple.Node(6), W: 1}, + {F: simple.Node(5), T: simple.Node(6), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(2), W: 1}, + {F: simple.Node(6), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(7), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(10), W: 1}, + {F: simple.Node(7), T: simple.Node(6), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(6), W: 1}, + }, + old: nil, + }, + { + n: node(10), + new: []simple.Edge{ + {F: simple.Node(6), T: simple.Node(10), W: 1}, + {F: simple.Node(9), T: simple.Node(10), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(6), W: 1}, + {F: simple.Node(10), T: simple.Node(9), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(11), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(14), W: 1}, + {F: simple.Node(11), T: simple.Node(10), W: math.Inf(1)}, + {F: simple.Node(14), T: simple.Node(10), W: 1}, + }, + old: nil, + }, + { + n: node(14), + new: []simple.Edge{ + {F: simple.Node(10), T: simple.Node(14), W: 1}, + {F: simple.Node(13), T: simple.Node(14), W: math.Inf(1)}, + {F: simple.Node(14), T: simple.Node(10), W: 1}, + {F: simple.Node(14), T: simple.Node(13), W: math.Inf(1)}, + {F: simple.Node(14), T: simple.Node(15), W: math.Inf(1)}, + {F: simple.Node(15), T: simple.Node(14), W: math.Inf(1)}, + }, + old: nil, + }, + }, + }, + { + g: NewGridFrom( + "*..*", + "**.*", + "**.*", + "**.*", + ), + radius: 1.5, + diag: false, + remember: false, + path: []graph.Node{node(1), node(2), node(6), node(10), node(14)}, + + want: []changes{ + { + n: node(1), + new: []simple.Edge{ + {F: simple.Node(0), T: simple.Node(1), W: math.Inf(1)}, + {F: simple.Node(0), T: simple.Node(4), W: math.Inf(1)}, + {F: simple.Node(1), T: simple.Node(0), W: math.Inf(1)}, + {F: simple.Node(1), T: simple.Node(2), W: 1}, + {F: simple.Node(1), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(2), T: simple.Node(1), W: 1}, + {F: simple.Node(2), T: simple.Node(6), W: 1}, + {F: simple.Node(4), T: simple.Node(0), W: math.Inf(1)}, + {F: simple.Node(4), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(1), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(4), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(6), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(2), W: 1}, + {F: simple.Node(6), T: simple.Node(5), W: math.Inf(1)}, + }, + old: nil, + }, + { + n: node(2), + new: []simple.Edge{ + {F: simple.Node(1), T: simple.Node(2), W: 1}, + {F: simple.Node(1), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(2), T: simple.Node(1), W: 1}, + {F: simple.Node(2), T: simple.Node(3), W: math.Inf(1)}, + {F: simple.Node(2), T: simple.Node(6), W: 1}, + {F: simple.Node(3), T: simple.Node(2), W: math.Inf(1)}, + {F: simple.Node(3), T: simple.Node(7), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(1), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(6), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(2), W: 1}, + {F: simple.Node(6), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(7), W: math.Inf(1)}, + {F: simple.Node(7), T: simple.Node(3), W: math.Inf(1)}, + {F: simple.Node(7), T: simple.Node(6), W: math.Inf(1)}, + }, + old: nil, + }, + { + n: node(6), + new: []simple.Edge{ + {F: simple.Node(1), T: simple.Node(2), W: 1}, + {F: simple.Node(1), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(2), T: simple.Node(1), W: 1}, + {F: simple.Node(2), T: simple.Node(3), W: math.Inf(1)}, + {F: simple.Node(2), T: simple.Node(6), W: 1}, + {F: simple.Node(3), T: simple.Node(2), W: math.Inf(1)}, + {F: simple.Node(3), T: simple.Node(7), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(1), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(6), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(9), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(2), W: 1}, + {F: simple.Node(6), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(7), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(10), W: 1}, + {F: simple.Node(7), T: simple.Node(3), W: math.Inf(1)}, + {F: simple.Node(7), T: simple.Node(6), W: math.Inf(1)}, + {F: simple.Node(7), T: simple.Node(11), W: math.Inf(1)}, + {F: simple.Node(9), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(9), T: simple.Node(10), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(6), W: 1}, + {F: simple.Node(10), T: simple.Node(9), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(11), W: math.Inf(1)}, + {F: simple.Node(11), T: simple.Node(7), W: math.Inf(1)}, + {F: simple.Node(11), T: simple.Node(10), W: math.Inf(1)}, + }, + old: nil, + }, + { + n: node(10), + new: []simple.Edge{ + {F: simple.Node(5), T: simple.Node(6), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(9), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(7), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(10), W: 1}, + {F: simple.Node(7), T: simple.Node(6), W: math.Inf(1)}, + {F: simple.Node(7), T: simple.Node(11), W: math.Inf(1)}, + {F: simple.Node(9), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(9), T: simple.Node(10), W: math.Inf(1)}, + {F: simple.Node(9), T: simple.Node(13), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(6), W: 1}, + {F: simple.Node(10), T: simple.Node(9), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(11), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(14), W: 1}, + {F: simple.Node(11), T: simple.Node(7), W: math.Inf(1)}, + {F: simple.Node(11), T: simple.Node(10), W: math.Inf(1)}, + {F: simple.Node(11), T: simple.Node(15), W: math.Inf(1)}, + {F: simple.Node(13), T: simple.Node(9), W: math.Inf(1)}, + {F: simple.Node(13), T: simple.Node(14), W: math.Inf(1)}, + {F: simple.Node(14), T: simple.Node(10), W: 1}, + {F: simple.Node(14), T: simple.Node(13), W: math.Inf(1)}, + {F: simple.Node(14), T: simple.Node(15), W: math.Inf(1)}, + {F: simple.Node(15), T: simple.Node(11), W: math.Inf(1)}, + {F: simple.Node(15), T: simple.Node(14), W: math.Inf(1)}, + }, + old: nil, + }, + { + n: node(14), + new: []simple.Edge{ + {F: simple.Node(9), T: simple.Node(10), W: math.Inf(1)}, + {F: simple.Node(9), T: simple.Node(13), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(9), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(11), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(14), W: 1}, + {F: simple.Node(11), T: simple.Node(10), W: math.Inf(1)}, + {F: simple.Node(11), T: simple.Node(15), W: math.Inf(1)}, + {F: simple.Node(13), T: simple.Node(9), W: math.Inf(1)}, + {F: simple.Node(13), T: simple.Node(14), W: math.Inf(1)}, + {F: simple.Node(14), T: simple.Node(10), W: 1}, + {F: simple.Node(14), T: simple.Node(13), W: math.Inf(1)}, + {F: simple.Node(14), T: simple.Node(15), W: math.Inf(1)}, + {F: simple.Node(15), T: simple.Node(11), W: math.Inf(1)}, + {F: simple.Node(15), T: simple.Node(14), W: math.Inf(1)}, + }, + old: nil, + }, + }, + }, + { + g: NewGridFrom( + "*..*", + "**.*", + "**.*", + "**.*", + ), + radius: 1, + diag: false, + remember: true, + path: []graph.Node{node(1), node(2), node(6), node(10), node(14)}, + + want: []changes{ + { + n: node(1), + new: []simple.Edge{ + {F: simple.Node(0), T: simple.Node(1), W: math.Inf(1)}, + {F: simple.Node(1), T: simple.Node(0), W: math.Inf(1)}, + {F: simple.Node(1), T: simple.Node(2), W: 1}, + {F: simple.Node(1), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(2), T: simple.Node(1), W: 1}, + {F: simple.Node(5), T: simple.Node(1), W: math.Inf(1)}, + }, + old: nil, + }, + { + n: node(2), + new: []simple.Edge{ + {F: simple.Node(2), T: simple.Node(3), W: math.Inf(1)}, + {F: simple.Node(2), T: simple.Node(6), W: 1}, + {F: simple.Node(3), T: simple.Node(2), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(2), W: 1}, + {F: simple.Node(6), T: simple.Node(5), W: math.Inf(1)}, + }, + old: []simple.Edge{ + {F: simple.Node(1), T: simple.Node(0), W: math.Inf(1)}, + {F: simple.Node(1), T: simple.Node(2), W: 1}, + {F: simple.Node(1), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(2), T: simple.Node(1), W: 1}, + }, + }, + { + n: node(6), + new: []simple.Edge{ + {F: simple.Node(6), T: simple.Node(7), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(10), W: 1}, + {F: simple.Node(7), T: simple.Node(3), W: math.Inf(1)}, + {F: simple.Node(7), T: simple.Node(6), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(6), W: 1}, + }, + old: []simple.Edge{ + {F: simple.Node(2), T: simple.Node(1), W: 1}, + {F: simple.Node(2), T: simple.Node(3), W: math.Inf(1)}, + {F: simple.Node(2), T: simple.Node(6), W: 1}, + {F: simple.Node(5), T: simple.Node(1), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(6), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(2), W: 1}, + {F: simple.Node(6), T: simple.Node(5), W: math.Inf(1)}, + }, + }, + { + n: node(10), + new: []simple.Edge{ + {F: simple.Node(9), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(9), T: simple.Node(10), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(9), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(11), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(14), W: 1}, + {F: simple.Node(11), T: simple.Node(7), W: math.Inf(1)}, + {F: simple.Node(11), T: simple.Node(10), W: math.Inf(1)}, + {F: simple.Node(14), T: simple.Node(10), W: 1}, + }, + old: []simple.Edge{ + {F: simple.Node(6), T: simple.Node(2), W: 1}, + {F: simple.Node(6), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(7), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(10), W: 1}, + {F: simple.Node(10), T: simple.Node(6), W: 1}, + }, + }, + { + n: node(14), + new: []simple.Edge{ + {F: simple.Node(13), T: simple.Node(9), W: math.Inf(1)}, + {F: simple.Node(13), T: simple.Node(14), W: math.Inf(1)}, + {F: simple.Node(14), T: simple.Node(13), W: math.Inf(1)}, + {F: simple.Node(14), T: simple.Node(15), W: math.Inf(1)}, + {F: simple.Node(15), T: simple.Node(11), W: math.Inf(1)}, + {F: simple.Node(15), T: simple.Node(14), W: math.Inf(1)}, + }, + old: []simple.Edge{ + {F: simple.Node(10), T: simple.Node(6), W: 1}, + {F: simple.Node(10), T: simple.Node(9), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(11), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(14), W: 1}, + {F: simple.Node(14), T: simple.Node(10), W: 1}, + }, + }, + }, + }, + { + g: NewGridFrom( + "*..*", + "**.*", + "**.*", + "**.*", + ), + radius: 1.5, + diag: false, + remember: true, + path: []graph.Node{node(1), node(2), node(6), node(10), node(14)}, + + want: []changes{ + { + n: node(1), + new: []simple.Edge{ + {F: simple.Node(0), T: simple.Node(1), W: math.Inf(1)}, + {F: simple.Node(0), T: simple.Node(4), W: math.Inf(1)}, + {F: simple.Node(1), T: simple.Node(0), W: math.Inf(1)}, + {F: simple.Node(1), T: simple.Node(2), W: 1}, + {F: simple.Node(1), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(2), T: simple.Node(1), W: 1}, + {F: simple.Node(2), T: simple.Node(6), W: 1}, + {F: simple.Node(4), T: simple.Node(0), W: math.Inf(1)}, + {F: simple.Node(4), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(1), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(4), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(6), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(2), W: 1}, + {F: simple.Node(6), T: simple.Node(5), W: math.Inf(1)}, + }, + old: nil, + }, + { + n: node(2), + new: []simple.Edge{ + {F: simple.Node(2), T: simple.Node(3), W: math.Inf(1)}, + {F: simple.Node(3), T: simple.Node(2), W: math.Inf(1)}, + {F: simple.Node(3), T: simple.Node(7), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(7), W: math.Inf(1)}, + {F: simple.Node(7), T: simple.Node(3), W: math.Inf(1)}, + {F: simple.Node(7), T: simple.Node(6), W: math.Inf(1)}, + }, + old: []simple.Edge{ + {F: simple.Node(1), T: simple.Node(0), W: math.Inf(1)}, + {F: simple.Node(1), T: simple.Node(2), W: 1}, + {F: simple.Node(1), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(2), T: simple.Node(1), W: 1}, + {F: simple.Node(2), T: simple.Node(6), W: 1}, + {F: simple.Node(5), T: simple.Node(1), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(4), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(6), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(2), W: 1}, + {F: simple.Node(6), T: simple.Node(5), W: math.Inf(1)}, + }, + }, + { + n: node(6), + new: []simple.Edge{ + {F: simple.Node(5), T: simple.Node(9), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(10), W: 1}, + {F: simple.Node(7), T: simple.Node(11), W: math.Inf(1)}, + {F: simple.Node(9), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(9), T: simple.Node(10), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(6), W: 1}, + {F: simple.Node(10), T: simple.Node(9), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(11), W: math.Inf(1)}, + {F: simple.Node(11), T: simple.Node(7), W: math.Inf(1)}, + {F: simple.Node(11), T: simple.Node(10), W: math.Inf(1)}, + }, + old: []simple.Edge{ + {F: simple.Node(1), T: simple.Node(0), W: math.Inf(1)}, + {F: simple.Node(1), T: simple.Node(2), W: 1}, + {F: simple.Node(1), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(2), T: simple.Node(1), W: 1}, + {F: simple.Node(2), T: simple.Node(3), W: math.Inf(1)}, + {F: simple.Node(2), T: simple.Node(6), W: 1}, + {F: simple.Node(3), T: simple.Node(2), W: math.Inf(1)}, + {F: simple.Node(3), T: simple.Node(7), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(1), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(4), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(6), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(2), W: 1}, + {F: simple.Node(6), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(7), W: math.Inf(1)}, + {F: simple.Node(7), T: simple.Node(3), W: math.Inf(1)}, + {F: simple.Node(7), T: simple.Node(6), W: math.Inf(1)}, + }, + }, + { + n: node(10), + new: []simple.Edge{ + {F: simple.Node(9), T: simple.Node(13), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(14), W: 1}, + {F: simple.Node(11), T: simple.Node(15), W: math.Inf(1)}, + {F: simple.Node(13), T: simple.Node(9), W: math.Inf(1)}, + {F: simple.Node(13), T: simple.Node(14), W: math.Inf(1)}, + {F: simple.Node(14), T: simple.Node(10), W: 1}, + {F: simple.Node(14), T: simple.Node(13), W: math.Inf(1)}, + {F: simple.Node(14), T: simple.Node(15), W: math.Inf(1)}, + {F: simple.Node(15), T: simple.Node(11), W: math.Inf(1)}, + {F: simple.Node(15), T: simple.Node(14), W: math.Inf(1)}, + }, + old: []simple.Edge{ + {F: simple.Node(5), T: simple.Node(1), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(4), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(6), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(9), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(2), W: 1}, + {F: simple.Node(6), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(7), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(10), W: 1}, + {F: simple.Node(7), T: simple.Node(3), W: math.Inf(1)}, + {F: simple.Node(7), T: simple.Node(6), W: math.Inf(1)}, + {F: simple.Node(7), T: simple.Node(11), W: math.Inf(1)}, + {F: simple.Node(9), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(9), T: simple.Node(10), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(6), W: 1}, + {F: simple.Node(10), T: simple.Node(9), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(11), W: math.Inf(1)}, + {F: simple.Node(11), T: simple.Node(7), W: math.Inf(1)}, + {F: simple.Node(11), T: simple.Node(10), W: math.Inf(1)}, + }, + }, + { + n: node(14), + new: nil, + old: []simple.Edge{ + {F: simple.Node(9), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(9), T: simple.Node(10), W: math.Inf(1)}, + {F: simple.Node(9), T: simple.Node(13), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(6), W: 1}, + {F: simple.Node(10), T: simple.Node(9), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(11), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(14), W: 1}, + {F: simple.Node(11), T: simple.Node(7), W: math.Inf(1)}, + {F: simple.Node(11), T: simple.Node(10), W: math.Inf(1)}, + {F: simple.Node(11), T: simple.Node(15), W: math.Inf(1)}, + {F: simple.Node(13), T: simple.Node(9), W: math.Inf(1)}, + {F: simple.Node(13), T: simple.Node(14), W: math.Inf(1)}, + {F: simple.Node(14), T: simple.Node(10), W: 1}, + {F: simple.Node(14), T: simple.Node(13), W: math.Inf(1)}, + {F: simple.Node(14), T: simple.Node(15), W: math.Inf(1)}, + {F: simple.Node(15), T: simple.Node(11), W: math.Inf(1)}, + {F: simple.Node(15), T: simple.Node(14), W: math.Inf(1)}, + }, + }, + }, + }, + { + g: NewGridFrom( + "*..*", + "**.*", + "**.*", + "**.*", + ), + radius: 1, + diag: true, + remember: false, + path: []graph.Node{node(1), node(2), node(6), node(10), node(14)}, + + want: []changes{ + { + n: node(1), + new: []simple.Edge{ + {F: simple.Node(0), T: simple.Node(1), W: math.Inf(1)}, + {F: simple.Node(0), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(1), T: simple.Node(0), W: math.Inf(1)}, + {F: simple.Node(1), T: simple.Node(2), W: 1}, + {F: simple.Node(1), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(2), T: simple.Node(1), W: 1}, + {F: simple.Node(2), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(0), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(1), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(2), W: math.Inf(1)}, + }, + old: nil, + }, + { + n: node(2), + new: []simple.Edge{ + {F: simple.Node(1), T: simple.Node(2), W: 1}, + {F: simple.Node(1), T: simple.Node(6), W: math.Sqrt2}, + {F: simple.Node(2), T: simple.Node(1), W: 1}, + {F: simple.Node(2), T: simple.Node(3), W: math.Inf(1)}, + {F: simple.Node(2), T: simple.Node(6), W: 1}, + {F: simple.Node(3), T: simple.Node(2), W: math.Inf(1)}, + {F: simple.Node(3), T: simple.Node(6), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(1), W: math.Sqrt2}, + {F: simple.Node(6), T: simple.Node(2), W: 1}, + {F: simple.Node(6), T: simple.Node(3), W: math.Inf(1)}, + }, + old: nil, + }, + { + n: node(6), + new: []simple.Edge{ + {F: simple.Node(2), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(2), T: simple.Node(6), W: 1}, + {F: simple.Node(2), T: simple.Node(7), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(2), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(6), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(10), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(2), W: 1}, + {F: simple.Node(6), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(7), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(10), W: 1}, + {F: simple.Node(7), T: simple.Node(2), W: math.Inf(1)}, + {F: simple.Node(7), T: simple.Node(6), W: math.Inf(1)}, + {F: simple.Node(7), T: simple.Node(10), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(6), W: 1}, + {F: simple.Node(10), T: simple.Node(7), W: math.Inf(1)}, + }, + old: nil, + }, + { + n: node(10), + new: []simple.Edge{ + {F: simple.Node(6), T: simple.Node(9), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(10), W: 1}, + {F: simple.Node(6), T: simple.Node(11), W: math.Inf(1)}, + {F: simple.Node(9), T: simple.Node(6), W: math.Inf(1)}, + {F: simple.Node(9), T: simple.Node(10), W: math.Inf(1)}, + {F: simple.Node(9), T: simple.Node(14), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(6), W: 1}, + {F: simple.Node(10), T: simple.Node(9), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(11), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(14), W: 1}, + {F: simple.Node(11), T: simple.Node(6), W: math.Inf(1)}, + {F: simple.Node(11), T: simple.Node(10), W: math.Inf(1)}, + {F: simple.Node(11), T: simple.Node(14), W: math.Inf(1)}, + {F: simple.Node(14), T: simple.Node(9), W: math.Inf(1)}, + {F: simple.Node(14), T: simple.Node(10), W: 1}, + {F: simple.Node(14), T: simple.Node(11), W: math.Inf(1)}, + }, + old: nil, + }, + { + n: node(14), + new: []simple.Edge{ + {F: simple.Node(10), T: simple.Node(13), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(14), W: 1}, + {F: simple.Node(10), T: simple.Node(15), W: math.Inf(1)}, + {F: simple.Node(13), T: simple.Node(10), W: math.Inf(1)}, + {F: simple.Node(13), T: simple.Node(14), W: math.Inf(1)}, + {F: simple.Node(14), T: simple.Node(10), W: 1}, + {F: simple.Node(14), T: simple.Node(13), W: math.Inf(1)}, + {F: simple.Node(14), T: simple.Node(15), W: math.Inf(1)}, + {F: simple.Node(15), T: simple.Node(10), W: math.Inf(1)}, + {F: simple.Node(15), T: simple.Node(14), W: math.Inf(1)}, + }, + old: nil, + }, + }, + }, + { + g: NewGridFrom( + "*..*", + "**.*", + "**.*", + "**.*", + ), + radius: 1.5, + diag: true, + remember: false, + path: []graph.Node{node(1), node(2), node(6), node(10), node(14)}, + + want: []changes{ + { + n: node(1), + new: []simple.Edge{ + {F: simple.Node(0), T: simple.Node(1), W: math.Inf(1)}, + {F: simple.Node(0), T: simple.Node(4), W: math.Inf(1)}, + {F: simple.Node(0), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(1), T: simple.Node(0), W: math.Inf(1)}, + {F: simple.Node(1), T: simple.Node(2), W: 1}, + {F: simple.Node(1), T: simple.Node(4), W: math.Inf(1)}, + {F: simple.Node(1), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(1), T: simple.Node(6), W: math.Sqrt2}, + {F: simple.Node(2), T: simple.Node(1), W: 1}, + {F: simple.Node(2), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(2), T: simple.Node(6), W: 1}, + {F: simple.Node(4), T: simple.Node(0), W: math.Inf(1)}, + {F: simple.Node(4), T: simple.Node(1), W: math.Inf(1)}, + {F: simple.Node(4), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(0), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(1), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(2), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(4), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(6), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(1), W: math.Sqrt2}, + {F: simple.Node(6), T: simple.Node(2), W: 1}, + {F: simple.Node(6), T: simple.Node(5), W: math.Inf(1)}, + }, + old: nil, + }, + { + n: node(2), + new: []simple.Edge{ + {F: simple.Node(1), T: simple.Node(2), W: 1}, + {F: simple.Node(1), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(1), T: simple.Node(6), W: math.Sqrt2}, + {F: simple.Node(2), T: simple.Node(1), W: 1}, + {F: simple.Node(2), T: simple.Node(3), W: math.Inf(1)}, + {F: simple.Node(2), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(2), T: simple.Node(6), W: 1}, + {F: simple.Node(2), T: simple.Node(7), W: math.Inf(1)}, + {F: simple.Node(3), T: simple.Node(2), W: math.Inf(1)}, + {F: simple.Node(3), T: simple.Node(6), W: math.Inf(1)}, + {F: simple.Node(3), T: simple.Node(7), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(1), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(2), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(6), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(1), W: math.Sqrt2}, + {F: simple.Node(6), T: simple.Node(2), W: 1}, + {F: simple.Node(6), T: simple.Node(3), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(7), W: math.Inf(1)}, + {F: simple.Node(7), T: simple.Node(2), W: math.Inf(1)}, + {F: simple.Node(7), T: simple.Node(3), W: math.Inf(1)}, + {F: simple.Node(7), T: simple.Node(6), W: math.Inf(1)}, + }, + old: nil, + }, + { + n: node(6), + new: []simple.Edge{ + {F: simple.Node(1), T: simple.Node(2), W: 1}, + {F: simple.Node(1), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(1), T: simple.Node(6), W: math.Sqrt2}, + {F: simple.Node(2), T: simple.Node(1), W: 1}, + {F: simple.Node(2), T: simple.Node(3), W: math.Inf(1)}, + {F: simple.Node(2), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(2), T: simple.Node(6), W: 1}, + {F: simple.Node(2), T: simple.Node(7), W: math.Inf(1)}, + {F: simple.Node(3), T: simple.Node(2), W: math.Inf(1)}, + {F: simple.Node(3), T: simple.Node(6), W: math.Inf(1)}, + {F: simple.Node(3), T: simple.Node(7), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(1), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(2), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(6), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(9), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(10), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(1), W: math.Sqrt2}, + {F: simple.Node(6), T: simple.Node(2), W: 1}, + {F: simple.Node(6), T: simple.Node(3), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(7), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(9), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(10), W: 1}, + {F: simple.Node(6), T: simple.Node(11), W: math.Inf(1)}, + {F: simple.Node(7), T: simple.Node(2), W: math.Inf(1)}, + {F: simple.Node(7), T: simple.Node(3), W: math.Inf(1)}, + {F: simple.Node(7), T: simple.Node(6), W: math.Inf(1)}, + {F: simple.Node(7), T: simple.Node(10), W: math.Inf(1)}, + {F: simple.Node(7), T: simple.Node(11), W: math.Inf(1)}, + {F: simple.Node(9), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(9), T: simple.Node(6), W: math.Inf(1)}, + {F: simple.Node(9), T: simple.Node(10), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(6), W: 1}, + {F: simple.Node(10), T: simple.Node(7), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(9), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(11), W: math.Inf(1)}, + {F: simple.Node(11), T: simple.Node(6), W: math.Inf(1)}, + {F: simple.Node(11), T: simple.Node(7), W: math.Inf(1)}, + {F: simple.Node(11), T: simple.Node(10), W: math.Inf(1)}, + }, + old: nil, + }, + { + n: node(10), + new: []simple.Edge{ + {F: simple.Node(5), T: simple.Node(6), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(9), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(10), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(7), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(9), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(10), W: 1}, + {F: simple.Node(6), T: simple.Node(11), W: math.Inf(1)}, + {F: simple.Node(7), T: simple.Node(6), W: math.Inf(1)}, + {F: simple.Node(7), T: simple.Node(10), W: math.Inf(1)}, + {F: simple.Node(7), T: simple.Node(11), W: math.Inf(1)}, + {F: simple.Node(9), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(9), T: simple.Node(6), W: math.Inf(1)}, + {F: simple.Node(9), T: simple.Node(10), W: math.Inf(1)}, + {F: simple.Node(9), T: simple.Node(13), W: math.Inf(1)}, + {F: simple.Node(9), T: simple.Node(14), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(6), W: 1}, + {F: simple.Node(10), T: simple.Node(7), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(9), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(11), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(13), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(14), W: 1}, + {F: simple.Node(10), T: simple.Node(15), W: math.Inf(1)}, + {F: simple.Node(11), T: simple.Node(6), W: math.Inf(1)}, + {F: simple.Node(11), T: simple.Node(7), W: math.Inf(1)}, + {F: simple.Node(11), T: simple.Node(10), W: math.Inf(1)}, + {F: simple.Node(11), T: simple.Node(14), W: math.Inf(1)}, + {F: simple.Node(11), T: simple.Node(15), W: math.Inf(1)}, + {F: simple.Node(13), T: simple.Node(9), W: math.Inf(1)}, + {F: simple.Node(13), T: simple.Node(10), W: math.Inf(1)}, + {F: simple.Node(13), T: simple.Node(14), W: math.Inf(1)}, + {F: simple.Node(14), T: simple.Node(9), W: math.Inf(1)}, + {F: simple.Node(14), T: simple.Node(10), W: 1}, + {F: simple.Node(14), T: simple.Node(11), W: math.Inf(1)}, + {F: simple.Node(14), T: simple.Node(13), W: math.Inf(1)}, + {F: simple.Node(14), T: simple.Node(15), W: math.Inf(1)}, + {F: simple.Node(15), T: simple.Node(10), W: math.Inf(1)}, + {F: simple.Node(15), T: simple.Node(11), W: math.Inf(1)}, + {F: simple.Node(15), T: simple.Node(14), W: math.Inf(1)}, + }, + old: nil, + }, + { + n: node(14), + new: []simple.Edge{ + {F: simple.Node(9), T: simple.Node(10), W: math.Inf(1)}, + {F: simple.Node(9), T: simple.Node(13), W: math.Inf(1)}, + {F: simple.Node(9), T: simple.Node(14), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(9), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(11), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(13), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(14), W: 1}, + {F: simple.Node(10), T: simple.Node(15), W: math.Inf(1)}, + {F: simple.Node(11), T: simple.Node(10), W: math.Inf(1)}, + {F: simple.Node(11), T: simple.Node(14), W: math.Inf(1)}, + {F: simple.Node(11), T: simple.Node(15), W: math.Inf(1)}, + {F: simple.Node(13), T: simple.Node(9), W: math.Inf(1)}, + {F: simple.Node(13), T: simple.Node(10), W: math.Inf(1)}, + {F: simple.Node(13), T: simple.Node(14), W: math.Inf(1)}, + {F: simple.Node(14), T: simple.Node(9), W: math.Inf(1)}, + {F: simple.Node(14), T: simple.Node(10), W: 1}, + {F: simple.Node(14), T: simple.Node(11), W: math.Inf(1)}, + {F: simple.Node(14), T: simple.Node(13), W: math.Inf(1)}, + {F: simple.Node(14), T: simple.Node(15), W: math.Inf(1)}, + {F: simple.Node(15), T: simple.Node(10), W: math.Inf(1)}, + {F: simple.Node(15), T: simple.Node(11), W: math.Inf(1)}, + {F: simple.Node(15), T: simple.Node(14), W: math.Inf(1)}, + }, + old: nil, + }, + }, + }, + { + g: NewGridFrom( + "*..*", + "**.*", + "**.*", + "**.*", + ), + radius: 1, + diag: true, + remember: true, + path: []graph.Node{node(1), node(2), node(6), node(10), node(14)}, + + want: []changes{ + { + n: node(1), + new: []simple.Edge{ + {F: simple.Node(0), T: simple.Node(1), W: math.Inf(1)}, + {F: simple.Node(0), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(1), T: simple.Node(0), W: math.Inf(1)}, + {F: simple.Node(1), T: simple.Node(2), W: 1}, + {F: simple.Node(1), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(2), T: simple.Node(1), W: 1}, + {F: simple.Node(2), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(0), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(1), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(2), W: math.Inf(1)}, + }, + old: nil, + }, + { + n: node(2), + new: []simple.Edge{ + {F: simple.Node(1), T: simple.Node(6), W: math.Sqrt2}, + {F: simple.Node(2), T: simple.Node(3), W: math.Inf(1)}, + {F: simple.Node(2), T: simple.Node(6), W: 1}, + {F: simple.Node(3), T: simple.Node(2), W: math.Inf(1)}, + {F: simple.Node(3), T: simple.Node(6), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(1), W: math.Sqrt2}, + {F: simple.Node(6), T: simple.Node(2), W: 1}, + {F: simple.Node(6), T: simple.Node(3), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(5), W: math.Inf(1)}, + }, + old: []simple.Edge{ + {F: simple.Node(1), T: simple.Node(0), W: math.Inf(1)}, + {F: simple.Node(1), T: simple.Node(2), W: 1}, + {F: simple.Node(1), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(2), T: simple.Node(1), W: 1}, + {F: simple.Node(2), T: simple.Node(5), W: math.Inf(1)}, + }, + }, + { + n: node(6), + new: []simple.Edge{ + {F: simple.Node(2), T: simple.Node(7), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(10), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(7), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(10), W: 1}, + {F: simple.Node(7), T: simple.Node(2), W: math.Inf(1)}, + {F: simple.Node(7), T: simple.Node(3), W: math.Inf(1)}, + {F: simple.Node(7), T: simple.Node(6), W: math.Inf(1)}, + {F: simple.Node(7), T: simple.Node(10), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(6), W: 1}, + {F: simple.Node(10), T: simple.Node(7), W: math.Inf(1)}, + }, + old: []simple.Edge{ + {F: simple.Node(2), T: simple.Node(1), W: 1}, + {F: simple.Node(2), T: simple.Node(3), W: math.Inf(1)}, + {F: simple.Node(2), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(2), T: simple.Node(6), W: 1}, + {F: simple.Node(5), T: simple.Node(0), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(1), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(2), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(6), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(1), W: math.Sqrt2}, + {F: simple.Node(6), T: simple.Node(2), W: 1}, + {F: simple.Node(6), T: simple.Node(3), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(5), W: math.Inf(1)}, + }, + }, + { + n: node(10), + new: []simple.Edge{ + {F: simple.Node(6), T: simple.Node(9), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(11), W: math.Inf(1)}, + {F: simple.Node(9), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(9), T: simple.Node(6), W: math.Inf(1)}, + {F: simple.Node(9), T: simple.Node(10), W: math.Inf(1)}, + {F: simple.Node(9), T: simple.Node(14), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(9), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(11), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(14), W: 1}, + {F: simple.Node(11), T: simple.Node(6), W: math.Inf(1)}, + {F: simple.Node(11), T: simple.Node(7), W: math.Inf(1)}, + {F: simple.Node(11), T: simple.Node(10), W: math.Inf(1)}, + {F: simple.Node(11), T: simple.Node(14), W: math.Inf(1)}, + {F: simple.Node(14), T: simple.Node(9), W: math.Inf(1)}, + {F: simple.Node(14), T: simple.Node(10), W: 1}, + {F: simple.Node(14), T: simple.Node(11), W: math.Inf(1)}, + }, + old: []simple.Edge{ + {F: simple.Node(6), T: simple.Node(1), W: math.Sqrt2}, + {F: simple.Node(6), T: simple.Node(2), W: 1}, + {F: simple.Node(6), T: simple.Node(3), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(7), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(10), W: 1}, + {F: simple.Node(10), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(6), W: 1}, + {F: simple.Node(10), T: simple.Node(7), W: math.Inf(1)}, + }, + }, + { + n: node(14), + new: []simple.Edge{ + {F: simple.Node(10), T: simple.Node(13), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(15), W: math.Inf(1)}, + {F: simple.Node(13), T: simple.Node(9), W: math.Inf(1)}, + {F: simple.Node(13), T: simple.Node(10), W: math.Inf(1)}, + {F: simple.Node(13), T: simple.Node(14), W: math.Inf(1)}, + {F: simple.Node(14), T: simple.Node(13), W: math.Inf(1)}, + {F: simple.Node(14), T: simple.Node(15), W: math.Inf(1)}, + {F: simple.Node(15), T: simple.Node(10), W: math.Inf(1)}, + {F: simple.Node(15), T: simple.Node(11), W: math.Inf(1)}, + {F: simple.Node(15), T: simple.Node(14), W: math.Inf(1)}, + }, + old: []simple.Edge{ + {F: simple.Node(10), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(6), W: 1}, + {F: simple.Node(10), T: simple.Node(7), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(9), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(11), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(14), W: 1}, + {F: simple.Node(14), T: simple.Node(9), W: math.Inf(1)}, + {F: simple.Node(14), T: simple.Node(10), W: 1}, + {F: simple.Node(14), T: simple.Node(11), W: math.Inf(1)}, + }, + }, + }, + }, + { + g: NewGridFrom( + "*..*", + "**.*", + "**.*", + "**.*", + ), + radius: 1.5, + diag: true, + remember: true, + path: []graph.Node{node(1), node(2), node(6), node(10), node(14)}, + + want: []changes{ + { + n: node(1), + new: []simple.Edge{ + {F: simple.Node(0), T: simple.Node(1), W: math.Inf(1)}, + {F: simple.Node(0), T: simple.Node(4), W: math.Inf(1)}, + {F: simple.Node(0), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(1), T: simple.Node(0), W: math.Inf(1)}, + {F: simple.Node(1), T: simple.Node(2), W: 1}, + {F: simple.Node(1), T: simple.Node(4), W: math.Inf(1)}, + {F: simple.Node(1), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(1), T: simple.Node(6), W: math.Sqrt2}, + {F: simple.Node(2), T: simple.Node(1), W: 1}, + {F: simple.Node(2), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(2), T: simple.Node(6), W: 1}, + {F: simple.Node(4), T: simple.Node(0), W: math.Inf(1)}, + {F: simple.Node(4), T: simple.Node(1), W: math.Inf(1)}, + {F: simple.Node(4), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(0), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(1), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(2), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(4), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(6), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(1), W: math.Sqrt2}, + {F: simple.Node(6), T: simple.Node(2), W: 1}, + {F: simple.Node(6), T: simple.Node(5), W: math.Inf(1)}, + }, + old: nil, + }, + { + n: node(2), + new: []simple.Edge{ + {F: simple.Node(2), T: simple.Node(3), W: math.Inf(1)}, + {F: simple.Node(2), T: simple.Node(7), W: math.Inf(1)}, + {F: simple.Node(3), T: simple.Node(2), W: math.Inf(1)}, + {F: simple.Node(3), T: simple.Node(6), W: math.Inf(1)}, + {F: simple.Node(3), T: simple.Node(7), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(3), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(7), W: math.Inf(1)}, + {F: simple.Node(7), T: simple.Node(2), W: math.Inf(1)}, + {F: simple.Node(7), T: simple.Node(3), W: math.Inf(1)}, + {F: simple.Node(7), T: simple.Node(6), W: math.Inf(1)}, + }, + old: []simple.Edge{ + {F: simple.Node(1), T: simple.Node(0), W: math.Inf(1)}, + {F: simple.Node(1), T: simple.Node(2), W: 1}, + {F: simple.Node(1), T: simple.Node(4), W: math.Inf(1)}, + {F: simple.Node(1), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(1), T: simple.Node(6), W: math.Sqrt2}, + {F: simple.Node(2), T: simple.Node(1), W: 1}, + {F: simple.Node(2), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(2), T: simple.Node(6), W: 1}, + {F: simple.Node(5), T: simple.Node(0), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(1), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(2), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(4), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(6), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(1), W: math.Sqrt2}, + {F: simple.Node(6), T: simple.Node(2), W: 1}, + {F: simple.Node(6), T: simple.Node(5), W: math.Inf(1)}, + }, + }, + { + n: node(6), + new: []simple.Edge{ + {F: simple.Node(5), T: simple.Node(9), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(10), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(9), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(10), W: 1}, + {F: simple.Node(6), T: simple.Node(11), W: math.Inf(1)}, + {F: simple.Node(7), T: simple.Node(10), W: math.Inf(1)}, + {F: simple.Node(7), T: simple.Node(11), W: math.Inf(1)}, + {F: simple.Node(9), T: simple.Node(4), W: math.Inf(1)}, + {F: simple.Node(9), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(9), T: simple.Node(6), W: math.Inf(1)}, + {F: simple.Node(9), T: simple.Node(10), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(6), W: 1}, + {F: simple.Node(10), T: simple.Node(7), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(9), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(11), W: math.Inf(1)}, + {F: simple.Node(11), T: simple.Node(6), W: math.Inf(1)}, + {F: simple.Node(11), T: simple.Node(7), W: math.Inf(1)}, + {F: simple.Node(11), T: simple.Node(10), W: math.Inf(1)}, + }, + old: []simple.Edge{ + {F: simple.Node(1), T: simple.Node(0), W: math.Inf(1)}, + {F: simple.Node(1), T: simple.Node(2), W: 1}, + {F: simple.Node(1), T: simple.Node(4), W: math.Inf(1)}, + {F: simple.Node(1), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(1), T: simple.Node(6), W: math.Sqrt2}, + {F: simple.Node(2), T: simple.Node(1), W: 1}, + {F: simple.Node(2), T: simple.Node(3), W: math.Inf(1)}, + {F: simple.Node(2), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(2), T: simple.Node(6), W: 1}, + {F: simple.Node(2), T: simple.Node(7), W: math.Inf(1)}, + {F: simple.Node(3), T: simple.Node(2), W: math.Inf(1)}, + {F: simple.Node(3), T: simple.Node(6), W: math.Inf(1)}, + {F: simple.Node(3), T: simple.Node(7), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(0), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(1), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(2), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(4), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(6), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(1), W: math.Sqrt2}, + {F: simple.Node(6), T: simple.Node(2), W: 1}, + {F: simple.Node(6), T: simple.Node(3), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(7), W: math.Inf(1)}, + {F: simple.Node(7), T: simple.Node(2), W: math.Inf(1)}, + {F: simple.Node(7), T: simple.Node(3), W: math.Inf(1)}, + {F: simple.Node(7), T: simple.Node(6), W: math.Inf(1)}, + }, + }, + { + n: node(10), + new: []simple.Edge{ + {F: simple.Node(9), T: simple.Node(13), W: math.Inf(1)}, + {F: simple.Node(9), T: simple.Node(14), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(13), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(14), W: 1}, + {F: simple.Node(10), T: simple.Node(15), W: math.Inf(1)}, + {F: simple.Node(11), T: simple.Node(14), W: math.Inf(1)}, + {F: simple.Node(11), T: simple.Node(15), W: math.Inf(1)}, + {F: simple.Node(13), T: simple.Node(9), W: math.Inf(1)}, + {F: simple.Node(13), T: simple.Node(10), W: math.Inf(1)}, + {F: simple.Node(13), T: simple.Node(14), W: math.Inf(1)}, + {F: simple.Node(14), T: simple.Node(9), W: math.Inf(1)}, + {F: simple.Node(14), T: simple.Node(10), W: 1}, + {F: simple.Node(14), T: simple.Node(11), W: math.Inf(1)}, + {F: simple.Node(14), T: simple.Node(13), W: math.Inf(1)}, + {F: simple.Node(14), T: simple.Node(15), W: math.Inf(1)}, + {F: simple.Node(15), T: simple.Node(10), W: math.Inf(1)}, + {F: simple.Node(15), T: simple.Node(11), W: math.Inf(1)}, + {F: simple.Node(15), T: simple.Node(14), W: math.Inf(1)}, + }, + old: []simple.Edge{ + {F: simple.Node(5), T: simple.Node(0), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(1), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(2), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(4), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(6), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(9), W: math.Inf(1)}, + {F: simple.Node(5), T: simple.Node(10), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(1), W: math.Sqrt2}, + {F: simple.Node(6), T: simple.Node(2), W: 1}, + {F: simple.Node(6), T: simple.Node(3), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(7), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(9), W: math.Inf(1)}, + {F: simple.Node(6), T: simple.Node(10), W: 1}, + {F: simple.Node(6), T: simple.Node(11), W: math.Inf(1)}, + {F: simple.Node(7), T: simple.Node(2), W: math.Inf(1)}, + {F: simple.Node(7), T: simple.Node(3), W: math.Inf(1)}, + {F: simple.Node(7), T: simple.Node(6), W: math.Inf(1)}, + {F: simple.Node(7), T: simple.Node(10), W: math.Inf(1)}, + {F: simple.Node(7), T: simple.Node(11), W: math.Inf(1)}, + {F: simple.Node(9), T: simple.Node(4), W: math.Inf(1)}, + {F: simple.Node(9), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(9), T: simple.Node(6), W: math.Inf(1)}, + {F: simple.Node(9), T: simple.Node(10), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(6), W: 1}, + {F: simple.Node(10), T: simple.Node(7), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(9), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(11), W: math.Inf(1)}, + {F: simple.Node(11), T: simple.Node(6), W: math.Inf(1)}, + {F: simple.Node(11), T: simple.Node(7), W: math.Inf(1)}, + {F: simple.Node(11), T: simple.Node(10), W: math.Inf(1)}, + }, + }, + { + n: node(14), + new: nil, + old: []simple.Edge{ + {F: simple.Node(9), T: simple.Node(4), W: math.Inf(1)}, + {F: simple.Node(9), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(9), T: simple.Node(6), W: math.Inf(1)}, + {F: simple.Node(9), T: simple.Node(10), W: math.Inf(1)}, + {F: simple.Node(9), T: simple.Node(13), W: math.Inf(1)}, + {F: simple.Node(9), T: simple.Node(14), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(5), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(6), W: 1}, + {F: simple.Node(10), T: simple.Node(7), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(9), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(11), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(13), W: math.Inf(1)}, + {F: simple.Node(10), T: simple.Node(14), W: 1}, + {F: simple.Node(10), T: simple.Node(15), W: math.Inf(1)}, + {F: simple.Node(11), T: simple.Node(6), W: math.Inf(1)}, + {F: simple.Node(11), T: simple.Node(7), W: math.Inf(1)}, + {F: simple.Node(11), T: simple.Node(10), W: math.Inf(1)}, + {F: simple.Node(11), T: simple.Node(14), W: math.Inf(1)}, + {F: simple.Node(11), T: simple.Node(15), W: math.Inf(1)}, + {F: simple.Node(13), T: simple.Node(9), W: math.Inf(1)}, + {F: simple.Node(13), T: simple.Node(10), W: math.Inf(1)}, + {F: simple.Node(13), T: simple.Node(14), W: math.Inf(1)}, + {F: simple.Node(14), T: simple.Node(9), W: math.Inf(1)}, + {F: simple.Node(14), T: simple.Node(10), W: 1}, + {F: simple.Node(14), T: simple.Node(11), W: math.Inf(1)}, + {F: simple.Node(14), T: simple.Node(13), W: math.Inf(1)}, + {F: simple.Node(14), T: simple.Node(15), W: math.Inf(1)}, + {F: simple.Node(15), T: simple.Node(10), W: math.Inf(1)}, + {F: simple.Node(15), T: simple.Node(11), W: math.Inf(1)}, + {F: simple.Node(15), T: simple.Node(14), W: math.Inf(1)}, + }, + }, + }, + }, +} + +func TestLimitedVisionGrid(t *testing.T) { + for i, test := range limitedVisionTests { + l := &LimitedVisionGrid{ + Grid: test.g, + VisionRadius: test.radius, + Location: test.path[0], + } + if test.remember { + l.Known = make(map[int]bool) + } + l.Grid.AllowDiagonal = test.diag + + x, y := l.XY(test.path[0]) + for _, u := range l.Nodes() { + ux, uy := l.XY(u) + uNear := math.Hypot(x-ux, y-uy) <= test.radius + for _, v := range l.Nodes() { + vx, vy := l.XY(v) + vNear := math.Hypot(x-vx, y-vy) <= test.radius + if u.ID() == v.ID() && l.HasEdgeBetween(u, v) { + t.Errorf("unexpected self edge: %v -- %v", u, v) + } + if !uNear && !vNear && !l.HasEdgeBetween(u, v) && couldConnectIn(l, u, v) { + t.Errorf("unexpected pessimism: no hope in distant edge between %v and %v for test %d", + u, v, i) + } + if (uNear && vNear) && l.HasEdgeBetween(u, v) != l.Grid.HasEdgeBetween(u, v) { + t.Errorf("unrealistic optimism: disagreement about edge between %v and %v for test %d: got:%t want:%t", + u, v, i, l.HasEdgeBetween(u, v), l.Grid.HasEdgeBetween(u, v)) + } + } + } + + var got []changes + for _, n := range test.path { + new, old := l.MoveTo(n) + got = append(got, changes{n: n, new: asConcreteEdges(new, l), old: asConcreteEdges(old, l)}) + } + if !reflect.DeepEqual(got, test.want) { + t.Errorf("unexpected walk for test %d:\ngot: %+v\nwant:%+v", i, got, test.want) + } + } +} + +func asConcreteEdges(changes []graph.Edge, in graph.Weighter) []simple.Edge { + if changes == nil { + return nil + } + we := make([]simple.Edge, len(changes)) + for i, e := range changes { + we[i].F = e.From() + we[i].T = e.To() + w, ok := in.Weight(e.From(), e.To()) + if !ok && !math.IsInf(w, 1) { + panic("unexpected invalid finite weight") + } + we[i].W = w + } + return we +} + +func couldConnectIn(l *LimitedVisionGrid, u, v graph.Node) bool { + if u.ID() == v.ID() { + return false + } + + ur, uc := l.RowCol(u.ID()) + vr, vc := l.RowCol(v.ID()) + if abs(ur-vr) > 1 || abs(uc-vc) > 1 { + return false + } + if (ur != vr || uc != vc) && !l.Grid.AllowDiagonal { + return false + } + + if !l.Known[u.ID()] && !l.Known[v.ID()] { + return true + } + if l.Known[u.ID()] && !l.Grid.HasOpen(u) { + return false + } + if l.Known[v.ID()] && !l.Grid.HasOpen(v) { + return false + } + + return true +} diff --git a/graph/path/internal/testgraphs/shortest.go b/graph/path/internal/testgraphs/shortest.go new file mode 100644 index 00000000..f512e382 --- /dev/null +++ b/graph/path/internal/testgraphs/shortest.go @@ -0,0 +1,654 @@ +// Copyright ©2014 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testgraphs + +import ( + "fmt" + "math" + + "github.com/gonum/graph" + "github.com/gonum/graph/simple" +) + +func init() { + for _, test := range ShortestPathTests { + if len(test.WantPaths) != 1 && test.HasUniquePath { + panic(fmt.Sprintf("%q: bad shortest path test: non-unique paths marked unique", test.Name)) + } + } +} + +// ShortestPathTests are graphs used to test the static shortest path routines in path: BellmanFord, +// DijkstraAllPaths, DijkstraFrom, FloydWarshall and Johnson, and the static degenerate case for the +// dynamic shortest path routine in path/dynamic: DStarLite. +var ShortestPathTests = []struct { + Name string + Graph func() graph.EdgeSetter + Edges []simple.Edge + HasNegativeWeight bool + HasNegativeCycle bool + + Query simple.Edge + Weight float64 + WantPaths [][]int + HasUniquePath bool + + NoPathFor simple.Edge +}{ + // Positive weighted graphs. + { + Name: "empty directed", + Graph: func() graph.EdgeSetter { return simple.NewDirectedGraph(0, math.Inf(1)) }, + + Query: simple.Edge{F: simple.Node(0), T: simple.Node(1)}, + Weight: math.Inf(1), + + NoPathFor: simple.Edge{F: simple.Node(0), T: simple.Node(1)}, + }, + { + Name: "empty undirected", + Graph: func() graph.EdgeSetter { return simple.NewUndirectedGraph(0, math.Inf(1)) }, + + Query: simple.Edge{F: simple.Node(0), T: simple.Node(1)}, + Weight: math.Inf(1), + + NoPathFor: simple.Edge{F: simple.Node(0), T: simple.Node(1)}, + }, + { + Name: "one edge directed", + Graph: func() graph.EdgeSetter { return simple.NewDirectedGraph(0, math.Inf(1)) }, + Edges: []simple.Edge{ + {F: simple.Node(0), T: simple.Node(1), W: 1}, + }, + + Query: simple.Edge{F: simple.Node(0), T: simple.Node(1)}, + Weight: 1, + WantPaths: [][]int{ + {0, 1}, + }, + HasUniquePath: true, + + NoPathFor: simple.Edge{F: simple.Node(2), T: simple.Node(3)}, + }, + { + Name: "one edge self directed", + Graph: func() graph.EdgeSetter { return simple.NewDirectedGraph(0, math.Inf(1)) }, + Edges: []simple.Edge{ + {F: simple.Node(0), T: simple.Node(1), W: 1}, + }, + + Query: simple.Edge{F: simple.Node(0), T: simple.Node(0)}, + Weight: 0, + WantPaths: [][]int{ + {0}, + }, + HasUniquePath: true, + + NoPathFor: simple.Edge{F: simple.Node(2), T: simple.Node(3)}, + }, + { + Name: "one edge undirected", + Graph: func() graph.EdgeSetter { return simple.NewUndirectedGraph(0, math.Inf(1)) }, + Edges: []simple.Edge{ + {F: simple.Node(0), T: simple.Node(1), W: 1}, + }, + + Query: simple.Edge{F: simple.Node(0), T: simple.Node(1)}, + Weight: 1, + WantPaths: [][]int{ + {0, 1}, + }, + HasUniquePath: true, + + NoPathFor: simple.Edge{F: simple.Node(2), T: simple.Node(3)}, + }, + { + Name: "two paths directed", + Graph: func() graph.EdgeSetter { return simple.NewDirectedGraph(0, math.Inf(1)) }, + Edges: []simple.Edge{ + {F: simple.Node(0), T: simple.Node(2), W: 2}, + {F: simple.Node(0), T: simple.Node(1), W: 1}, + {F: simple.Node(1), T: simple.Node(2), W: 1}, + }, + + Query: simple.Edge{F: simple.Node(0), T: simple.Node(2)}, + Weight: 2, + WantPaths: [][]int{ + {0, 1, 2}, + {0, 2}, + }, + HasUniquePath: false, + + NoPathFor: simple.Edge{F: simple.Node(2), T: simple.Node(1)}, + }, + { + Name: "two paths undirected", + Graph: func() graph.EdgeSetter { return simple.NewUndirectedGraph(0, math.Inf(1)) }, + Edges: []simple.Edge{ + {F: simple.Node(0), T: simple.Node(2), W: 2}, + {F: simple.Node(0), T: simple.Node(1), W: 1}, + {F: simple.Node(1), T: simple.Node(2), W: 1}, + }, + + Query: simple.Edge{F: simple.Node(0), T: simple.Node(2)}, + Weight: 2, + WantPaths: [][]int{ + {0, 1, 2}, + {0, 2}, + }, + HasUniquePath: false, + + NoPathFor: simple.Edge{F: simple.Node(2), T: simple.Node(4)}, + }, + { + Name: "confounding paths directed", + Graph: func() graph.EdgeSetter { return simple.NewDirectedGraph(0, math.Inf(1)) }, + Edges: []simple.Edge{ + // Add a path from 0->5 of weight 4 + {F: simple.Node(0), T: simple.Node(1), W: 1}, + {F: simple.Node(1), T: simple.Node(2), W: 1}, + {F: simple.Node(2), T: simple.Node(3), W: 1}, + {F: simple.Node(3), T: simple.Node(5), W: 1}, + + // Add direct edge to goal of weight 4 + {F: simple.Node(0), T: simple.Node(5), W: 4}, + + // Add edge to a node that's still optimal + {F: simple.Node(0), T: simple.Node(2), W: 2}, + + // Add edge to 3 that's overpriced + {F: simple.Node(0), T: simple.Node(3), W: 4}, + + // Add very cheap edge to 4 which is a dead end + {F: simple.Node(0), T: simple.Node(4), W: 0.25}, + }, + + Query: simple.Edge{F: simple.Node(0), T: simple.Node(5)}, + Weight: 4, + WantPaths: [][]int{ + {0, 1, 2, 3, 5}, + {0, 2, 3, 5}, + {0, 5}, + }, + HasUniquePath: false, + + NoPathFor: simple.Edge{F: simple.Node(4), T: simple.Node(5)}, + }, + { + Name: "confounding paths undirected", + Graph: func() graph.EdgeSetter { return simple.NewUndirectedGraph(0, math.Inf(1)) }, + Edges: []simple.Edge{ + // Add a path from 0->5 of weight 4 + {F: simple.Node(0), T: simple.Node(1), W: 1}, + {F: simple.Node(1), T: simple.Node(2), W: 1}, + {F: simple.Node(2), T: simple.Node(3), W: 1}, + {F: simple.Node(3), T: simple.Node(5), W: 1}, + + // Add direct edge to goal of weight 4 + {F: simple.Node(0), T: simple.Node(5), W: 4}, + + // Add edge to a node that's still optimal + {F: simple.Node(0), T: simple.Node(2), W: 2}, + + // Add edge to 3 that's overpriced + {F: simple.Node(0), T: simple.Node(3), W: 4}, + + // Add very cheap edge to 4 which is a dead end + {F: simple.Node(0), T: simple.Node(4), W: 0.25}, + }, + + Query: simple.Edge{F: simple.Node(0), T: simple.Node(5)}, + Weight: 4, + WantPaths: [][]int{ + {0, 1, 2, 3, 5}, + {0, 2, 3, 5}, + {0, 5}, + }, + HasUniquePath: false, + + NoPathFor: simple.Edge{F: simple.Node(5), T: simple.Node(6)}, + }, + { + Name: "confounding paths directed 2-step", + Graph: func() graph.EdgeSetter { return simple.NewDirectedGraph(0, math.Inf(1)) }, + Edges: []simple.Edge{ + // Add a path from 0->5 of weight 4 + {F: simple.Node(0), T: simple.Node(1), W: 1}, + {F: simple.Node(1), T: simple.Node(2), W: 1}, + {F: simple.Node(2), T: simple.Node(3), W: 1}, + {F: simple.Node(3), T: simple.Node(5), W: 1}, + + // Add two step path to goal of weight 4 + {F: simple.Node(0), T: simple.Node(6), W: 2}, + {F: simple.Node(6), T: simple.Node(5), W: 2}, + + // Add edge to a node that's still optimal + {F: simple.Node(0), T: simple.Node(2), W: 2}, + + // Add edge to 3 that's overpriced + {F: simple.Node(0), T: simple.Node(3), W: 4}, + + // Add very cheap edge to 4 which is a dead end + {F: simple.Node(0), T: simple.Node(4), W: 0.25}, + }, + + Query: simple.Edge{F: simple.Node(0), T: simple.Node(5)}, + Weight: 4, + WantPaths: [][]int{ + {0, 1, 2, 3, 5}, + {0, 2, 3, 5}, + {0, 6, 5}, + }, + HasUniquePath: false, + + NoPathFor: simple.Edge{F: simple.Node(4), T: simple.Node(5)}, + }, + { + Name: "confounding paths undirected 2-step", + Graph: func() graph.EdgeSetter { return simple.NewUndirectedGraph(0, math.Inf(1)) }, + Edges: []simple.Edge{ + // Add a path from 0->5 of weight 4 + {F: simple.Node(0), T: simple.Node(1), W: 1}, + {F: simple.Node(1), T: simple.Node(2), W: 1}, + {F: simple.Node(2), T: simple.Node(3), W: 1}, + {F: simple.Node(3), T: simple.Node(5), W: 1}, + + // Add two step path to goal of weight 4 + {F: simple.Node(0), T: simple.Node(6), W: 2}, + {F: simple.Node(6), T: simple.Node(5), W: 2}, + + // Add edge to a node that's still optimal + {F: simple.Node(0), T: simple.Node(2), W: 2}, + + // Add edge to 3 that's overpriced + {F: simple.Node(0), T: simple.Node(3), W: 4}, + + // Add very cheap edge to 4 which is a dead end + {F: simple.Node(0), T: simple.Node(4), W: 0.25}, + }, + + Query: simple.Edge{F: simple.Node(0), T: simple.Node(5)}, + Weight: 4, + WantPaths: [][]int{ + {0, 1, 2, 3, 5}, + {0, 2, 3, 5}, + {0, 6, 5}, + }, + HasUniquePath: false, + + NoPathFor: simple.Edge{F: simple.Node(5), T: simple.Node(7)}, + }, + { + Name: "zero-weight cycle directed", + Graph: func() graph.EdgeSetter { return simple.NewDirectedGraph(0, math.Inf(1)) }, + Edges: []simple.Edge{ + // Add a path from 0->4 of weight 4 + {F: simple.Node(0), T: simple.Node(1), W: 1}, + {F: simple.Node(1), T: simple.Node(2), W: 1}, + {F: simple.Node(2), T: simple.Node(3), W: 1}, + {F: simple.Node(3), T: simple.Node(4), W: 1}, + + // Add a zero-weight cycle. + {F: simple.Node(1), T: simple.Node(5), W: 0}, + {F: simple.Node(5), T: simple.Node(1), W: 0}, + }, + + Query: simple.Edge{F: simple.Node(0), T: simple.Node(4)}, + Weight: 4, + WantPaths: [][]int{ + {0, 1, 2, 3, 4}, + }, + HasUniquePath: false, + + NoPathFor: simple.Edge{F: simple.Node(4), T: simple.Node(5)}, + }, + { + Name: "zero-weight cycle^2 directed", + Graph: func() graph.EdgeSetter { return simple.NewDirectedGraph(0, math.Inf(1)) }, + Edges: []simple.Edge{ + // Add a path from 0->4 of weight 4 + {F: simple.Node(0), T: simple.Node(1), W: 1}, + {F: simple.Node(1), T: simple.Node(2), W: 1}, + {F: simple.Node(2), T: simple.Node(3), W: 1}, + {F: simple.Node(3), T: simple.Node(4), W: 1}, + + // Add a zero-weight cycle. + {F: simple.Node(1), T: simple.Node(5), W: 0}, + {F: simple.Node(5), T: simple.Node(1), W: 0}, + // With its own zero-weight cycle. + {F: simple.Node(5), T: simple.Node(6), W: 0}, + {F: simple.Node(6), T: simple.Node(5), W: 0}, + }, + + Query: simple.Edge{F: simple.Node(0), T: simple.Node(4)}, + Weight: 4, + WantPaths: [][]int{ + {0, 1, 2, 3, 4}, + }, + HasUniquePath: false, + + NoPathFor: simple.Edge{F: simple.Node(4), T: simple.Node(5)}, + }, + { + Name: "zero-weight cycle^2 confounding directed", + Graph: func() graph.EdgeSetter { return simple.NewDirectedGraph(0, math.Inf(1)) }, + Edges: []simple.Edge{ + // Add a path from 0->4 of weight 4 + {F: simple.Node(0), T: simple.Node(1), W: 1}, + {F: simple.Node(1), T: simple.Node(2), W: 1}, + {F: simple.Node(2), T: simple.Node(3), W: 1}, + {F: simple.Node(3), T: simple.Node(4), W: 1}, + + // Add a zero-weight cycle. + {F: simple.Node(1), T: simple.Node(5), W: 0}, + {F: simple.Node(5), T: simple.Node(1), W: 0}, + // With its own zero-weight cycle. + {F: simple.Node(5), T: simple.Node(6), W: 0}, + {F: simple.Node(6), T: simple.Node(5), W: 0}, + // But leading to the target. + {F: simple.Node(5), T: simple.Node(4), W: 3}, + }, + + Query: simple.Edge{F: simple.Node(0), T: simple.Node(4)}, + Weight: 4, + WantPaths: [][]int{ + {0, 1, 2, 3, 4}, + {0, 1, 5, 4}, + }, + HasUniquePath: false, + + NoPathFor: simple.Edge{F: simple.Node(4), T: simple.Node(5)}, + }, + { + Name: "zero-weight cycle^3 directed", + Graph: func() graph.EdgeSetter { return simple.NewDirectedGraph(0, math.Inf(1)) }, + Edges: []simple.Edge{ + // Add a path from 0->4 of weight 4 + {F: simple.Node(0), T: simple.Node(1), W: 1}, + {F: simple.Node(1), T: simple.Node(2), W: 1}, + {F: simple.Node(2), T: simple.Node(3), W: 1}, + {F: simple.Node(3), T: simple.Node(4), W: 1}, + + // Add a zero-weight cycle. + {F: simple.Node(1), T: simple.Node(5), W: 0}, + {F: simple.Node(5), T: simple.Node(1), W: 0}, + // With its own zero-weight cycle. + {F: simple.Node(5), T: simple.Node(6), W: 0}, + {F: simple.Node(6), T: simple.Node(5), W: 0}, + // With its own zero-weight cycle. + {F: simple.Node(6), T: simple.Node(7), W: 0}, + {F: simple.Node(7), T: simple.Node(6), W: 0}, + }, + + Query: simple.Edge{F: simple.Node(0), T: simple.Node(4)}, + Weight: 4, + WantPaths: [][]int{ + {0, 1, 2, 3, 4}, + }, + HasUniquePath: false, + + NoPathFor: simple.Edge{F: simple.Node(4), T: simple.Node(5)}, + }, + { + Name: "zero-weight 3·cycle^2 confounding directed", + Graph: func() graph.EdgeSetter { return simple.NewDirectedGraph(0, math.Inf(1)) }, + Edges: []simple.Edge{ + // Add a path from 0->4 of weight 4 + {F: simple.Node(0), T: simple.Node(1), W: 1}, + {F: simple.Node(1), T: simple.Node(2), W: 1}, + {F: simple.Node(2), T: simple.Node(3), W: 1}, + {F: simple.Node(3), T: simple.Node(4), W: 1}, + + // Add a zero-weight cycle. + {F: simple.Node(1), T: simple.Node(5), W: 0}, + {F: simple.Node(5), T: simple.Node(1), W: 0}, + // With 3 of its own zero-weight cycles. + {F: simple.Node(5), T: simple.Node(6), W: 0}, + {F: simple.Node(6), T: simple.Node(5), W: 0}, + {F: simple.Node(5), T: simple.Node(7), W: 0}, + {F: simple.Node(7), T: simple.Node(5), W: 0}, + // Each leading to the target. + {F: simple.Node(5), T: simple.Node(4), W: 3}, + {F: simple.Node(6), T: simple.Node(4), W: 3}, + {F: simple.Node(7), T: simple.Node(4), W: 3}, + }, + + Query: simple.Edge{F: simple.Node(0), T: simple.Node(4)}, + Weight: 4, + WantPaths: [][]int{ + {0, 1, 2, 3, 4}, + {0, 1, 5, 4}, + {0, 1, 5, 6, 4}, + {0, 1, 5, 7, 4}, + }, + HasUniquePath: false, + + NoPathFor: simple.Edge{F: simple.Node(4), T: simple.Node(5)}, + }, + { + Name: "zero-weight reversed 3·cycle^2 confounding directed", + Graph: func() graph.EdgeSetter { return simple.NewDirectedGraph(0, math.Inf(1)) }, + Edges: []simple.Edge{ + // Add a path from 0->4 of weight 4 + {F: simple.Node(0), T: simple.Node(1), W: 1}, + {F: simple.Node(1), T: simple.Node(2), W: 1}, + {F: simple.Node(2), T: simple.Node(3), W: 1}, + {F: simple.Node(3), T: simple.Node(4), W: 1}, + + // Add a zero-weight cycle. + {F: simple.Node(3), T: simple.Node(5), W: 0}, + {F: simple.Node(5), T: simple.Node(3), W: 0}, + // With 3 of its own zero-weight cycles. + {F: simple.Node(5), T: simple.Node(6), W: 0}, + {F: simple.Node(6), T: simple.Node(5), W: 0}, + {F: simple.Node(5), T: simple.Node(7), W: 0}, + {F: simple.Node(7), T: simple.Node(5), W: 0}, + // Each leading from the source. + {F: simple.Node(0), T: simple.Node(5), W: 3}, + {F: simple.Node(0), T: simple.Node(6), W: 3}, + {F: simple.Node(0), T: simple.Node(7), W: 3}, + }, + + Query: simple.Edge{F: simple.Node(0), T: simple.Node(4)}, + Weight: 4, + WantPaths: [][]int{ + {0, 1, 2, 3, 4}, + {0, 5, 3, 4}, + {0, 6, 5, 3, 4}, + {0, 7, 5, 3, 4}, + }, + HasUniquePath: false, + + NoPathFor: simple.Edge{F: simple.Node(4), T: simple.Node(5)}, + }, + { + Name: "zero-weight |V|·cycle^(n/|V|) directed", + Graph: func() graph.EdgeSetter { return simple.NewDirectedGraph(0, math.Inf(1)) }, + Edges: func() []simple.Edge { + e := []simple.Edge{ + // Add a path from 0->4 of weight 4 + {F: simple.Node(0), T: simple.Node(1), W: 1}, + {F: simple.Node(1), T: simple.Node(2), W: 1}, + {F: simple.Node(2), T: simple.Node(3), W: 1}, + {F: simple.Node(3), T: simple.Node(4), W: 1}, + } + next := len(e) + 1 + + // Add n zero-weight cycles. + const n = 100 + for i := 0; i < n; i++ { + e = append(e, + simple.Edge{F: simple.Node(next + i), T: simple.Node(i), W: 0}, + simple.Edge{F: simple.Node(i), T: simple.Node(next + i), W: 0}, + ) + } + return e + }(), + + Query: simple.Edge{F: simple.Node(0), T: simple.Node(4)}, + Weight: 4, + WantPaths: [][]int{ + {0, 1, 2, 3, 4}, + }, + HasUniquePath: false, + + NoPathFor: simple.Edge{F: simple.Node(4), T: simple.Node(5)}, + }, + { + Name: "zero-weight n·cycle directed", + Graph: func() graph.EdgeSetter { return simple.NewDirectedGraph(0, math.Inf(1)) }, + Edges: func() []simple.Edge { + e := []simple.Edge{ + // Add a path from 0->4 of weight 4 + {F: simple.Node(0), T: simple.Node(1), W: 1}, + {F: simple.Node(1), T: simple.Node(2), W: 1}, + {F: simple.Node(2), T: simple.Node(3), W: 1}, + {F: simple.Node(3), T: simple.Node(4), W: 1}, + } + next := len(e) + 1 + + // Add n zero-weight cycles. + const n = 100 + for i := 0; i < n; i++ { + e = append(e, + simple.Edge{F: simple.Node(next + i), T: simple.Node(1), W: 0}, + simple.Edge{F: simple.Node(1), T: simple.Node(next + i), W: 0}, + ) + } + return e + }(), + + Query: simple.Edge{F: simple.Node(0), T: simple.Node(4)}, + Weight: 4, + WantPaths: [][]int{ + {0, 1, 2, 3, 4}, + }, + HasUniquePath: false, + + NoPathFor: simple.Edge{F: simple.Node(4), T: simple.Node(5)}, + }, + { + Name: "zero-weight bi-directional tree with single exit directed", + Graph: func() graph.EdgeSetter { return simple.NewDirectedGraph(0, math.Inf(1)) }, + Edges: func() []simple.Edge { + e := []simple.Edge{ + // Add a path from 0->4 of weight 4 + {F: simple.Node(0), T: simple.Node(1), W: 1}, + {F: simple.Node(1), T: simple.Node(2), W: 1}, + {F: simple.Node(2), T: simple.Node(3), W: 1}, + {F: simple.Node(3), T: simple.Node(4), W: 1}, + } + + // Make a bi-directional tree rooted at node 2 with + // a single exit to node 4 and co-equal cost from + // 2 to 4. + const ( + depth = 4 + branching = 4 + ) + + next := len(e) + 1 + src := 2 + var i, last int + for l := 0; l < depth; l++ { + for i = 0; i < branching; i++ { + last = next + i + e = append(e, simple.Edge{F: simple.Node(src), T: simple.Node(last), W: 0}) + e = append(e, simple.Edge{F: simple.Node(last), T: simple.Node(src), W: 0}) + } + src = next + 1 + next += branching + } + e = append(e, simple.Edge{F: simple.Node(last), T: simple.Node(4), W: 2}) + return e + }(), + + Query: simple.Edge{F: simple.Node(0), T: simple.Node(4)}, + Weight: 4, + WantPaths: [][]int{ + {0, 1, 2, 3, 4}, + {0, 1, 2, 6, 10, 14, 20, 4}, + }, + HasUniquePath: false, + + NoPathFor: simple.Edge{F: simple.Node(4), T: simple.Node(5)}, + }, + + // Negative weighted graphs. + { + Name: "one edge directed negative", + Graph: func() graph.EdgeSetter { return simple.NewDirectedGraph(0, math.Inf(1)) }, + Edges: []simple.Edge{ + {F: simple.Node(0), T: simple.Node(1), W: -1}, + }, + HasNegativeWeight: true, + + Query: simple.Edge{F: simple.Node(0), T: simple.Node(1)}, + Weight: -1, + WantPaths: [][]int{ + {0, 1}, + }, + HasUniquePath: true, + + NoPathFor: simple.Edge{F: simple.Node(2), T: simple.Node(3)}, + }, + { + Name: "one edge undirected negative", + Graph: func() graph.EdgeSetter { return simple.NewUndirectedGraph(0, math.Inf(1)) }, + Edges: []simple.Edge{ + {F: simple.Node(0), T: simple.Node(1), W: -1}, + }, + HasNegativeWeight: true, + HasNegativeCycle: true, + + Query: simple.Edge{F: simple.Node(0), T: simple.Node(1)}, + }, + { + Name: "wp graph negative", // http://en.wikipedia.org/w/index.php?title=Johnson%27s_algorithm&oldid=564595231 + Graph: func() graph.EdgeSetter { return simple.NewDirectedGraph(0, math.Inf(1)) }, + Edges: []simple.Edge{ + {F: simple.Node('w'), T: simple.Node('z'), W: 2}, + {F: simple.Node('x'), T: simple.Node('w'), W: 6}, + {F: simple.Node('x'), T: simple.Node('y'), W: 3}, + {F: simple.Node('y'), T: simple.Node('w'), W: 4}, + {F: simple.Node('y'), T: simple.Node('z'), W: 5}, + {F: simple.Node('z'), T: simple.Node('x'), W: -7}, + {F: simple.Node('z'), T: simple.Node('y'), W: -3}, + }, + HasNegativeWeight: true, + + Query: simple.Edge{F: simple.Node('z'), T: simple.Node('y')}, + Weight: -4, + WantPaths: [][]int{ + {'z', 'x', 'y'}, + }, + HasUniquePath: true, + + NoPathFor: simple.Edge{F: simple.Node(2), T: simple.Node(3)}, + }, + { + Name: "roughgarden negative", + Graph: func() graph.EdgeSetter { return simple.NewDirectedGraph(0, math.Inf(1)) }, + Edges: []simple.Edge{ + {F: simple.Node('a'), T: simple.Node('b'), W: -2}, + {F: simple.Node('b'), T: simple.Node('c'), W: -1}, + {F: simple.Node('c'), T: simple.Node('a'), W: 4}, + {F: simple.Node('c'), T: simple.Node('x'), W: 2}, + {F: simple.Node('c'), T: simple.Node('y'), W: -3}, + {F: simple.Node('z'), T: simple.Node('x'), W: 1}, + {F: simple.Node('z'), T: simple.Node('y'), W: -4}, + }, + HasNegativeWeight: true, + + Query: simple.Edge{F: simple.Node('a'), T: simple.Node('y')}, + Weight: -6, + WantPaths: [][]int{ + {'a', 'b', 'c', 'y'}, + }, + HasUniquePath: true, + + NoPathFor: simple.Edge{F: simple.Node(2), T: simple.Node(3)}, + }, +} diff --git a/graph/path/johnson_apsp.go b/graph/path/johnson_apsp.go new file mode 100644 index 00000000..c69dfb07 --- /dev/null +++ b/graph/path/johnson_apsp.go @@ -0,0 +1,138 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package path + +import ( + "math" + "math/rand" + + "github.com/gonum/graph" + "github.com/gonum/graph/simple" +) + +// JohnsonAllPaths returns a shortest-path tree for shortest paths in the graph g. +// If the graph does not implement graph.Weighter, UniformCost is used. +// +// The time complexity of JohnsonAllPaths is O(|V|.|E|+|V|^2.log|V|). +func JohnsonAllPaths(g graph.Graph) (paths AllShortest, ok bool) { + jg := johnsonWeightAdjuster{ + g: g, + from: g.From, + edgeTo: g.Edge, + } + if wg, ok := g.(graph.Weighter); ok { + jg.weight = wg.Weight + } else { + jg.weight = UniformCost(g) + } + + paths = newAllShortest(g.Nodes(), false) + + sign := -1 + for { + // Choose a random node ID until we find + // one that is not in g. + jg.q = sign * rand.Int() + if _, exists := paths.indexOf[jg.q]; !exists { + break + } + sign *= -1 + } + + jg.bellmanFord = true + jg.adjustBy, ok = BellmanFordFrom(johnsonGraphNode(jg.q), jg) + if !ok { + return paths, false + } + + jg.bellmanFord = false + dijkstraAllPaths(jg, paths) + + for i, u := range paths.nodes { + hu := jg.adjustBy.WeightTo(u) + for j, v := range paths.nodes { + if i == j { + continue + } + hv := jg.adjustBy.WeightTo(v) + paths.dist.Set(i, j, paths.dist.At(i, j)-hu+hv) + } + } + + return paths, ok +} + +type johnsonWeightAdjuster struct { + q int + g graph.Graph + + from func(graph.Node) []graph.Node + edgeTo func(graph.Node, graph.Node) graph.Edge + weight Weighting + + bellmanFord bool + adjustBy Shortest +} + +var ( + // johnsonWeightAdjuster has the behaviour + // of a directed graph, but we don't need + // to be explicit with the type since it + // is not exported. + _ graph.Graph = johnsonWeightAdjuster{} + _ graph.Weighter = johnsonWeightAdjuster{} +) + +func (g johnsonWeightAdjuster) Has(n graph.Node) bool { + if g.bellmanFord && n.ID() == g.q { + return true + } + return g.g.Has(n) + +} + +func (g johnsonWeightAdjuster) Nodes() []graph.Node { + if g.bellmanFord { + return append(g.g.Nodes(), johnsonGraphNode(g.q)) + } + return g.g.Nodes() +} + +func (g johnsonWeightAdjuster) From(n graph.Node) []graph.Node { + if g.bellmanFord && n.ID() == g.q { + return g.g.Nodes() + } + return g.from(n) +} + +func (g johnsonWeightAdjuster) Edge(u, v graph.Node) graph.Edge { + if g.bellmanFord && u.ID() == g.q && g.g.Has(v) { + return simple.Edge{F: johnsonGraphNode(g.q), T: v} + } + return g.edgeTo(u, v) +} + +func (g johnsonWeightAdjuster) Weight(x, y graph.Node) (w float64, ok bool) { + if g.bellmanFord { + switch g.q { + case x.ID(): + return 0, true + case y.ID(): + return math.Inf(1), false + default: + return g.weight(x, y) + } + } + w, ok = g.weight(x, y) + return w + g.adjustBy.WeightTo(x) - g.adjustBy.WeightTo(y), ok +} + +func (johnsonWeightAdjuster) HasEdgeBetween(_, _ graph.Node) bool { + panic("path: unintended use of johnsonWeightAdjuster") +} + +type johnsonGraphNode int + +func (n johnsonGraphNode) ID() int { return int(n) } diff --git a/graph/path/johnson_apsp_test.go b/graph/path/johnson_apsp_test.go new file mode 100644 index 00000000..8fe05d13 --- /dev/null +++ b/graph/path/johnson_apsp_test.go @@ -0,0 +1,102 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package path + +import ( + "math" + "reflect" + "sort" + "testing" + + "github.com/gonum/graph" + "github.com/gonum/graph/internal/ordered" + "github.com/gonum/graph/path/internal/testgraphs" +) + +func TestJohnsonAllPaths(t *testing.T) { + for _, test := range testgraphs.ShortestPathTests { + g := test.Graph() + for _, e := range test.Edges { + g.SetEdge(e) + } + + pt, ok := JohnsonAllPaths(g.(graph.Graph)) + if test.HasNegativeCycle { + if ok { + t.Errorf("%q: expected negative cycle", test.Name) + } + continue + } + if !ok { + t.Fatalf("%q: unexpected negative cycle", test.Name) + } + + // Check all random paths returned are OK. + for i := 0; i < 10; i++ { + p, weight, unique := pt.Between(test.Query.From(), test.Query.To()) + if weight != test.Weight { + t.Errorf("%q: unexpected weight from Between: got:%f want:%f", + test.Name, weight, test.Weight) + } + if weight := pt.Weight(test.Query.From(), test.Query.To()); weight != test.Weight { + t.Errorf("%q: unexpected weight from Weight: got:%f want:%f", + test.Name, weight, test.Weight) + } + if unique != test.HasUniquePath { + t.Errorf("%q: unexpected number of paths: got: unique=%t want: unique=%t", + test.Name, unique, test.HasUniquePath) + } + + var got []int + for _, n := range p { + got = append(got, n.ID()) + } + ok := len(got) == 0 && len(test.WantPaths) == 0 + for _, sp := range test.WantPaths { + if reflect.DeepEqual(got, sp) { + ok = true + break + } + } + if !ok { + t.Errorf("%q: unexpected shortest path:\ngot: %v\nwant from:%v", + test.Name, p, test.WantPaths) + } + } + + np, weight, unique := pt.Between(test.NoPathFor.From(), test.NoPathFor.To()) + if np != nil || !math.IsInf(weight, 1) || unique != false { + t.Errorf("%q: unexpected path:\ngot: path=%v weight=%f unique=%t\nwant:path= weight=+Inf unique=false", + test.Name, np, weight, unique) + } + + paths, weight := pt.AllBetween(test.Query.From(), test.Query.To()) + if weight != test.Weight { + t.Errorf("%q: unexpected weight from Between: got:%f want:%f", + test.Name, weight, test.Weight) + } + + var got [][]int + if len(paths) != 0 { + got = make([][]int, len(paths)) + } + for i, p := range paths { + for _, v := range p { + got[i] = append(got[i], v.ID()) + } + } + sort.Sort(ordered.BySliceValues(got)) + if !reflect.DeepEqual(got, test.WantPaths) { + t.Errorf("testing %q: unexpected shortest paths:\ngot: %v\nwant:%v", + test.Name, got, test.WantPaths) + } + + nps, weight := pt.AllBetween(test.NoPathFor.From(), test.NoPathFor.To()) + if nps != nil || !math.IsInf(weight, 1) { + t.Errorf("%q: unexpected path:\ngot: paths=%v weight=%f\nwant:path= weight=+Inf", + test.Name, nps, weight) + } + } +} diff --git a/graph/path/shortest.go b/graph/path/shortest.go new file mode 100644 index 00000000..97f7e49f --- /dev/null +++ b/graph/path/shortest.go @@ -0,0 +1,319 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package path + +import ( + "math" + "math/rand" + + "github.com/gonum/graph" + "github.com/gonum/matrix/mat64" +) + +// Shortest is a shortest-path tree created by the BellmanFordFrom or DijkstraFrom +// single-source shortest path functions. +type Shortest struct { + // from holds the source node given to + // DijkstraFrom. + from graph.Node + + // nodes hold the nodes of the analysed + // graph. + nodes []graph.Node + // indexOf contains a mapping between + // the id-dense representation of the + // graph and the potentially id-sparse + // nodes held in nodes. + indexOf map[int]int + + // dist and next represent the shortest + // paths between nodes. + // + // Indices into dist and next are + // mapped through indexOf. + // + // dist contains the distances + // from the from node for each + // node in the graph. + dist []float64 + // next contains the shortest-path + // tree of the graph. The index is a + // linear mapping of to-dense-id. + next []int +} + +func newShortestFrom(u graph.Node, nodes []graph.Node) Shortest { + indexOf := make(map[int]int, len(nodes)) + uid := u.ID() + for i, n := range nodes { + indexOf[n.ID()] = i + if n.ID() == uid { + u = n + } + } + + p := Shortest{ + from: u, + + nodes: nodes, + indexOf: indexOf, + + dist: make([]float64, len(nodes)), + next: make([]int, len(nodes)), + } + for i := range nodes { + p.dist[i] = math.Inf(1) + p.next[i] = -1 + } + p.dist[indexOf[uid]] = 0 + + return p +} + +func (p Shortest) set(to int, weight float64, mid int) { + p.dist[to] = weight + p.next[to] = mid +} + +// From returns the starting node of the paths held by the Shortest. +func (p Shortest) From() graph.Node { return p.from } + +// WeightTo returns the weight of the minimum path to v. +func (p Shortest) WeightTo(v graph.Node) float64 { + to, toOK := p.indexOf[v.ID()] + if !toOK { + return math.Inf(1) + } + return p.dist[to] +} + +// To returns a shortest path to v and the weight of the path. +func (p Shortest) To(v graph.Node) (path []graph.Node, weight float64) { + to, toOK := p.indexOf[v.ID()] + if !toOK || math.IsInf(p.dist[to], 1) { + return nil, math.Inf(1) + } + from := p.indexOf[p.from.ID()] + path = []graph.Node{p.nodes[to]} + for to != from { + path = append(path, p.nodes[p.next[to]]) + to = p.next[to] + } + reverse(path) + return path, p.dist[p.indexOf[v.ID()]] +} + +// AllShortest is a shortest-path tree created by the DijkstraAllPaths, FloydWarshall +// or JohnsonAllPaths all-pairs shortest paths functions. +type AllShortest struct { + // nodes hold the nodes of the analysed + // graph. + nodes []graph.Node + // indexOf contains a mapping between + // the id-dense representation of the + // graph and the potentially id-sparse + // nodes held in nodes. + indexOf map[int]int + + // dist, next and forward represent + // the shortest paths between nodes. + // + // Indices into dist and next are + // mapped through indexOf. + // + // dist contains the pairwise + // distances between nodes. + dist *mat64.Dense + // next contains the shortest-path + // tree of the graph. The first index + // is a linear mapping of from-dense-id + // and to-dense-id, to-major with a + // stride equal to len(nodes); the + // slice indexed to is the list of + // intermediates leading from the 'from' + // node to the 'to' node represented + // by dense id. + // The interpretation of next is + // dependent on the state of forward. + next [][]int + // forward indicates the direction of + // path reconstruction. Forward + // reconstruction is used for Floyd- + // Warshall and reverse is used for + // Dijkstra. + forward bool +} + +func newAllShortest(nodes []graph.Node, forward bool) AllShortest { + indexOf := make(map[int]int, len(nodes)) + for i, n := range nodes { + indexOf[n.ID()] = i + } + dist := make([]float64, len(nodes)*len(nodes)) + for i := range dist { + dist[i] = math.Inf(1) + } + return AllShortest{ + nodes: nodes, + indexOf: indexOf, + + dist: mat64.NewDense(len(nodes), len(nodes), dist), + next: make([][]int, len(nodes)*len(nodes)), + forward: forward, + } +} + +func (p AllShortest) at(from, to int) (mid []int) { + return p.next[from+to*len(p.nodes)] +} + +func (p AllShortest) set(from, to int, weight float64, mid ...int) { + p.dist.Set(from, to, weight) + p.next[from+to*len(p.nodes)] = append(p.next[from+to*len(p.nodes)][:0], mid...) +} + +func (p AllShortest) add(from, to int, mid ...int) { +loop: // These are likely to be rare, so just loop over collisions. + for _, k := range mid { + for _, v := range p.next[from+to*len(p.nodes)] { + if k == v { + continue loop + } + } + p.next[from+to*len(p.nodes)] = append(p.next[from+to*len(p.nodes)], k) + } +} + +// Weight returns the weight of the minimum path between u and v. +func (p AllShortest) Weight(u, v graph.Node) float64 { + from, fromOK := p.indexOf[u.ID()] + to, toOK := p.indexOf[v.ID()] + if !fromOK || !toOK { + return math.Inf(1) + } + return p.dist.At(from, to) +} + +// Between returns a shortest path from u to v and the weight of the path. If more than +// one shortest path exists between u and v, a randomly chosen path will be returned and +// unique is returned false. If a cycle with zero weight exists in the path, it will not +// be included, but unique will be returned false. +func (p AllShortest) Between(u, v graph.Node) (path []graph.Node, weight float64, unique bool) { + from, fromOK := p.indexOf[u.ID()] + to, toOK := p.indexOf[v.ID()] + if !fromOK || !toOK || len(p.at(from, to)) == 0 { + if u.ID() == v.ID() { + return []graph.Node{p.nodes[from]}, 0, true + } + return nil, math.Inf(1), false + } + + seen := make([]int, len(p.nodes)) + for i := range seen { + seen[i] = -1 + } + var n graph.Node + if p.forward { + n = p.nodes[from] + seen[from] = 0 + } else { + n = p.nodes[to] + seen[to] = 0 + } + + path = []graph.Node{n} + weight = p.dist.At(from, to) + unique = true + + var next int + for from != to { + c := p.at(from, to) + if len(c) != 1 { + unique = false + next = c[rand.Intn(len(c))] + } else { + next = c[0] + } + if seen[next] >= 0 { + path = path[:seen[next]] + } + seen[next] = len(path) + path = append(path, p.nodes[next]) + if p.forward { + from = next + } else { + to = next + } + } + if !p.forward { + reverse(path) + } + + return path, weight, unique +} + +// AllBetween returns all shortest paths from u to v and the weight of the paths. Paths +// containing zero-weight cycles are not returned. +func (p AllShortest) AllBetween(u, v graph.Node) (paths [][]graph.Node, weight float64) { + from, fromOK := p.indexOf[u.ID()] + to, toOK := p.indexOf[v.ID()] + if !fromOK || !toOK || len(p.at(from, to)) == 0 { + if u.ID() == v.ID() { + return [][]graph.Node{{p.nodes[from]}}, 0 + } + return nil, math.Inf(1) + } + + var n graph.Node + if p.forward { + n = u + } else { + n = v + } + seen := make([]bool, len(p.nodes)) + paths = p.allBetween(from, to, seen, []graph.Node{n}, nil) + + return paths, p.dist.At(from, to) +} + +func (p AllShortest) allBetween(from, to int, seen []bool, path []graph.Node, paths [][]graph.Node) [][]graph.Node { + if p.forward { + seen[from] = true + } else { + seen[to] = true + } + if from == to { + if path == nil { + return paths + } + if !p.forward { + reverse(path) + } + return append(paths, path) + } + first := true + for _, n := range p.at(from, to) { + if seen[n] { + continue + } + if first { + path = append([]graph.Node(nil), path...) + first = false + } + if p.forward { + from = n + } else { + to = n + } + paths = p.allBetween(from, to, append([]bool(nil), seen...), append(path, p.nodes[n]), paths) + } + return paths +} + +func reverse(p []graph.Node) { + for i, j := 0, len(p)-1; i < j; i, j = i+1, j-1 { + p[i], p[j] = p[j], p[i] + } +} diff --git a/graph/path/spanning_tree.go b/graph/path/spanning_tree.go new file mode 100644 index 00000000..57a78eb6 --- /dev/null +++ b/graph/path/spanning_tree.go @@ -0,0 +1,182 @@ +// Copyright ©2014 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package path + +import ( + "container/heap" + "math" + "sort" + + "github.com/gonum/graph" + "github.com/gonum/graph/simple" +) + +// UndirectedWeighter is an undirected graph that returns edge weights. +type UndirectedWeighter interface { + graph.Undirected + graph.Weighter +} + +// Prim generates a minimum spanning tree of g by greedy tree extension, placing +// the result in the destination, dst. If the edge weights of g are distinct +// it will be the unique minimum spanning tree of g. The destination is not cleared +// first. The weight of the minimum spanning tree is returned. If g is not connected, +// a minimum spanning forest will be constructed in dst and the sum of minimum +// spanning tree weights will be returned. +func Prim(dst graph.UndirectedBuilder, g UndirectedWeighter) float64 { + nodes := g.Nodes() + if len(nodes) == 0 { + return 0 + } + + q := &primQueue{ + indexOf: make(map[int]int, len(nodes)-1), + nodes: make([]simple.Edge, 0, len(nodes)-1), + } + for _, u := range nodes[1:] { + heap.Push(q, simple.Edge{F: u, W: math.Inf(1)}) + } + + u := nodes[0] + for _, v := range g.From(u) { + w, ok := g.Weight(u, v) + if !ok { + panic("prim: unexpected invalid weight") + } + q.update(v, u, w) + } + + var w float64 + for q.Len() > 0 { + e := heap.Pop(q).(simple.Edge) + if e.To() != nil && g.HasEdgeBetween(e.From(), e.To()) { + dst.SetEdge(e) + w += e.Weight() + } + + u = e.From() + for _, n := range g.From(u) { + if key, ok := q.key(n); ok { + w, ok := g.Weight(u, n) + if !ok { + panic("prim: unexpected invalid weight") + } + if w < key { + q.update(n, u, w) + } + } + } + } + return w +} + +// primQueue is a Prim's priority queue. The priority queue is a +// queue of edge From nodes keyed on the minimum edge weight to +// a node in the set of nodes already connected to the minimum +// spanning forest. +type primQueue struct { + indexOf map[int]int + nodes []simple.Edge +} + +func (q *primQueue) Less(i, j int) bool { + return q.nodes[i].Weight() < q.nodes[j].Weight() +} + +func (q *primQueue) Swap(i, j int) { + q.indexOf[q.nodes[i].From().ID()] = j + q.indexOf[q.nodes[j].From().ID()] = i + q.nodes[i], q.nodes[j] = q.nodes[j], q.nodes[i] +} + +func (q *primQueue) Len() int { + return len(q.nodes) +} + +func (q *primQueue) Push(x interface{}) { + n := x.(simple.Edge) + q.indexOf[n.From().ID()] = len(q.nodes) + q.nodes = append(q.nodes, n) +} + +func (q *primQueue) Pop() interface{} { + n := q.nodes[len(q.nodes)-1] + q.nodes = q.nodes[:len(q.nodes)-1] + delete(q.indexOf, n.From().ID()) + return n +} + +// key returns the key for the node u and whether the node is +// in the queue. If the node is not in the queue, key is returned +// as +Inf. +func (q *primQueue) key(u graph.Node) (key float64, ok bool) { + i, ok := q.indexOf[u.ID()] + if !ok { + return math.Inf(1), false + } + return q.nodes[i].Weight(), ok +} + +// update updates u's position in the queue with the new closest +// MST-connected neighbour, v, and the key weight between u and v. +func (q *primQueue) update(u, v graph.Node, key float64) { + id := u.ID() + i, ok := q.indexOf[id] + if !ok { + return + } + q.nodes[i].T = v + q.nodes[i].W = key + heap.Fix(q, i) +} + +// UndirectedWeightLister is an undirected graph that returns edge weights and +// the set of edges in the graph. +type UndirectedWeightLister interface { + UndirectedWeighter + Edges() []graph.Edge +} + +// Kruskal generates a minimum spanning tree of g by greedy tree coalescence, placing +// the result in the destination, dst. If the edge weights of g are distinct +// it will be the unique minimum spanning tree of g. The destination is not cleared +// first. The weight of the minimum spanning tree is returned. If g is not connected, +// a minimum spanning forest will be constructed in dst and the sum of minimum +// spanning tree weights will be returned. +func Kruskal(dst graph.UndirectedBuilder, g UndirectedWeightLister) float64 { + edges := g.Edges() + ascend := make([]simple.Edge, 0, len(edges)) + for _, e := range edges { + u := e.From() + v := e.To() + w, ok := g.Weight(u, v) + if !ok { + panic("kruskal: unexpected invalid weight") + } + ascend = append(ascend, simple.Edge{F: u, T: v, W: w}) + } + sort.Sort(byWeight(ascend)) + + ds := newDisjointSet() + for _, node := range g.Nodes() { + ds.makeSet(node.ID()) + } + + var w float64 + for _, e := range ascend { + if s1, s2 := ds.find(e.From().ID()), ds.find(e.To().ID()); s1 != s2 { + ds.union(s1, s2) + dst.SetEdge(e) + w += e.Weight() + } + } + return w +} + +type byWeight []simple.Edge + +func (e byWeight) Len() int { return len(e) } +func (e byWeight) Less(i, j int) bool { return e[i].Weight() < e[j].Weight() } +func (e byWeight) Swap(i, j int) { e[i], e[j] = e[j], e[i] } diff --git a/graph/path/spanning_tree_test.go b/graph/path/spanning_tree_test.go new file mode 100644 index 00000000..49f4fe3b --- /dev/null +++ b/graph/path/spanning_tree_test.go @@ -0,0 +1,294 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package path + +import ( + "fmt" + "math" + "testing" + + "github.com/gonum/graph" + "github.com/gonum/graph/simple" +) + +func init() { + for _, test := range spanningTreeTests { + var w float64 + for _, e := range test.treeEdges { + w += e.W + } + if w != test.want { + panic(fmt.Sprintf("bad test: %s weight mismatch: %v != %v", test.name, w, test.want)) + } + } +} + +type spanningGraph interface { + graph.UndirectedBuilder + graph.Weighter + Edges() []graph.Edge +} + +var spanningTreeTests = []struct { + name string + graph func() spanningGraph + edges []simple.Edge + want float64 + treeEdges []simple.Edge +}{ + { + name: "Empty", + graph: func() spanningGraph { return simple.NewUndirectedGraph(0, math.Inf(1)) }, + want: 0, + }, + { + // https://upload.wikimedia.org/wikipedia/commons/f/f7/Prim%27s_algorithm.svg + // Modified to make edge weights unique; A--B is increased to 2.5 otherwise + // to prevent the alternative solution being found. + name: "Prim WP figure 1", + graph: func() spanningGraph { return simple.NewUndirectedGraph(0, math.Inf(1)) }, + edges: []simple.Edge{ + {F: simple.Node('A'), T: simple.Node('B'), W: 2.5}, + {F: simple.Node('A'), T: simple.Node('D'), W: 1}, + {F: simple.Node('B'), T: simple.Node('D'), W: 2}, + {F: simple.Node('C'), T: simple.Node('D'), W: 3}, + }, + + want: 6, + treeEdges: []simple.Edge{ + {F: simple.Node('A'), T: simple.Node('D'), W: 1}, + {F: simple.Node('B'), T: simple.Node('D'), W: 2}, + {F: simple.Node('C'), T: simple.Node('D'), W: 3}, + }, + }, + { + // https://upload.wikimedia.org/wikipedia/commons/5/5c/MST_kruskal_en.gif + name: "Kruskal WP figure 1", + graph: func() spanningGraph { return simple.NewUndirectedGraph(0, math.Inf(1)) }, + edges: []simple.Edge{ + {F: simple.Node('a'), T: simple.Node('b'), W: 3}, + {F: simple.Node('a'), T: simple.Node('e'), W: 1}, + {F: simple.Node('b'), T: simple.Node('c'), W: 5}, + {F: simple.Node('b'), T: simple.Node('e'), W: 4}, + {F: simple.Node('c'), T: simple.Node('d'), W: 2}, + {F: simple.Node('c'), T: simple.Node('e'), W: 6}, + {F: simple.Node('d'), T: simple.Node('e'), W: 7}, + }, + + want: 11, + treeEdges: []simple.Edge{ + {F: simple.Node('a'), T: simple.Node('b'), W: 3}, + {F: simple.Node('a'), T: simple.Node('e'), W: 1}, + {F: simple.Node('b'), T: simple.Node('c'), W: 5}, + {F: simple.Node('c'), T: simple.Node('d'), W: 2}, + }, + }, + { + // https://upload.wikimedia.org/wikipedia/commons/8/87/Kruskal_Algorithm_6.svg + name: "Kruskal WP example", + graph: func() spanningGraph { return simple.NewUndirectedGraph(0, math.Inf(1)) }, + edges: []simple.Edge{ + {F: simple.Node('A'), T: simple.Node('B'), W: 7}, + {F: simple.Node('A'), T: simple.Node('D'), W: 5}, + {F: simple.Node('B'), T: simple.Node('C'), W: 8}, + {F: simple.Node('B'), T: simple.Node('D'), W: 9}, + {F: simple.Node('B'), T: simple.Node('E'), W: 7}, + {F: simple.Node('C'), T: simple.Node('E'), W: 5}, + {F: simple.Node('D'), T: simple.Node('E'), W: 15}, + {F: simple.Node('D'), T: simple.Node('F'), W: 6}, + {F: simple.Node('E'), T: simple.Node('F'), W: 8}, + {F: simple.Node('E'), T: simple.Node('G'), W: 9}, + {F: simple.Node('F'), T: simple.Node('G'), W: 11}, + }, + + want: 39, + treeEdges: []simple.Edge{ + {F: simple.Node('A'), T: simple.Node('B'), W: 7}, + {F: simple.Node('A'), T: simple.Node('D'), W: 5}, + {F: simple.Node('B'), T: simple.Node('E'), W: 7}, + {F: simple.Node('C'), T: simple.Node('E'), W: 5}, + {F: simple.Node('D'), T: simple.Node('F'), W: 6}, + {F: simple.Node('E'), T: simple.Node('G'), W: 9}, + }, + }, + { + // https://upload.wikimedia.org/wikipedia/commons/2/2e/Boruvka%27s_algorithm_%28Sollin%27s_algorithm%29_Anim.gif + name: "Borůvka WP example", + graph: func() spanningGraph { return simple.NewUndirectedGraph(0, math.Inf(1)) }, + edges: []simple.Edge{ + {F: simple.Node('A'), T: simple.Node('B'), W: 13}, + {F: simple.Node('A'), T: simple.Node('C'), W: 6}, + {F: simple.Node('B'), T: simple.Node('C'), W: 7}, + {F: simple.Node('B'), T: simple.Node('D'), W: 1}, + {F: simple.Node('C'), T: simple.Node('D'), W: 14}, + {F: simple.Node('C'), T: simple.Node('E'), W: 8}, + {F: simple.Node('C'), T: simple.Node('H'), W: 20}, + {F: simple.Node('D'), T: simple.Node('E'), W: 9}, + {F: simple.Node('D'), T: simple.Node('F'), W: 3}, + {F: simple.Node('E'), T: simple.Node('F'), W: 2}, + {F: simple.Node('E'), T: simple.Node('J'), W: 18}, + {F: simple.Node('G'), T: simple.Node('H'), W: 15}, + {F: simple.Node('G'), T: simple.Node('I'), W: 5}, + {F: simple.Node('G'), T: simple.Node('J'), W: 19}, + {F: simple.Node('G'), T: simple.Node('K'), W: 10}, + {F: simple.Node('H'), T: simple.Node('J'), W: 17}, + {F: simple.Node('I'), T: simple.Node('K'), W: 11}, + {F: simple.Node('J'), T: simple.Node('K'), W: 16}, + {F: simple.Node('J'), T: simple.Node('L'), W: 4}, + {F: simple.Node('K'), T: simple.Node('L'), W: 12}, + }, + + want: 83, + treeEdges: []simple.Edge{ + {F: simple.Node('A'), T: simple.Node('C'), W: 6}, + {F: simple.Node('B'), T: simple.Node('C'), W: 7}, + {F: simple.Node('B'), T: simple.Node('D'), W: 1}, + {F: simple.Node('D'), T: simple.Node('F'), W: 3}, + {F: simple.Node('E'), T: simple.Node('F'), W: 2}, + {F: simple.Node('E'), T: simple.Node('J'), W: 18}, + {F: simple.Node('G'), T: simple.Node('H'), W: 15}, + {F: simple.Node('G'), T: simple.Node('I'), W: 5}, + {F: simple.Node('G'), T: simple.Node('K'), W: 10}, + {F: simple.Node('J'), T: simple.Node('L'), W: 4}, + {F: simple.Node('K'), T: simple.Node('L'), W: 12}, + }, + }, + { + // https://upload.wikimedia.org/wikipedia/commons/d/d2/Minimum_spanning_tree.svg + // Nodes labelled row major. + name: "Minimum Spanning Tree WP figure 1", + graph: func() spanningGraph { return simple.NewUndirectedGraph(0, math.Inf(1)) }, + edges: []simple.Edge{ + {F: simple.Node(1), T: simple.Node(2), W: 4}, + {F: simple.Node(1), T: simple.Node(3), W: 1}, + {F: simple.Node(1), T: simple.Node(4), W: 4}, + {F: simple.Node(2), T: simple.Node(3), W: 5}, + {F: simple.Node(2), T: simple.Node(5), W: 9}, + {F: simple.Node(2), T: simple.Node(6), W: 9}, + {F: simple.Node(2), T: simple.Node(8), W: 7}, + {F: simple.Node(3), T: simple.Node(4), W: 3}, + {F: simple.Node(3), T: simple.Node(8), W: 9}, + {F: simple.Node(4), T: simple.Node(8), W: 10}, + {F: simple.Node(4), T: simple.Node(10), W: 18}, + {F: simple.Node(5), T: simple.Node(6), W: 2}, + {F: simple.Node(5), T: simple.Node(7), W: 4}, + {F: simple.Node(5), T: simple.Node(9), W: 6}, + {F: simple.Node(6), T: simple.Node(7), W: 2}, + {F: simple.Node(6), T: simple.Node(8), W: 8}, + {F: simple.Node(7), T: simple.Node(8), W: 9}, + {F: simple.Node(7), T: simple.Node(9), W: 3}, + {F: simple.Node(7), T: simple.Node(10), W: 9}, + {F: simple.Node(8), T: simple.Node(10), W: 8}, + {F: simple.Node(9), T: simple.Node(10), W: 9}, + }, + + want: 38, + treeEdges: []simple.Edge{ + {F: simple.Node(1), T: simple.Node(2), W: 4}, + {F: simple.Node(1), T: simple.Node(3), W: 1}, + {F: simple.Node(2), T: simple.Node(8), W: 7}, + {F: simple.Node(3), T: simple.Node(4), W: 3}, + {F: simple.Node(5), T: simple.Node(6), W: 2}, + {F: simple.Node(6), T: simple.Node(7), W: 2}, + {F: simple.Node(6), T: simple.Node(8), W: 8}, + {F: simple.Node(7), T: simple.Node(9), W: 3}, + {F: simple.Node(8), T: simple.Node(10), W: 8}, + }, + }, + + { + // https://upload.wikimedia.org/wikipedia/commons/2/2e/Boruvka%27s_algorithm_%28Sollin%27s_algorithm%29_Anim.gif + // but with C--H and E--J cut. + name: "Borůvka WP example cut", + graph: func() spanningGraph { return simple.NewUndirectedGraph(0, math.Inf(1)) }, + edges: []simple.Edge{ + {F: simple.Node('A'), T: simple.Node('B'), W: 13}, + {F: simple.Node('A'), T: simple.Node('C'), W: 6}, + {F: simple.Node('B'), T: simple.Node('C'), W: 7}, + {F: simple.Node('B'), T: simple.Node('D'), W: 1}, + {F: simple.Node('C'), T: simple.Node('D'), W: 14}, + {F: simple.Node('C'), T: simple.Node('E'), W: 8}, + {F: simple.Node('D'), T: simple.Node('E'), W: 9}, + {F: simple.Node('D'), T: simple.Node('F'), W: 3}, + {F: simple.Node('E'), T: simple.Node('F'), W: 2}, + {F: simple.Node('G'), T: simple.Node('H'), W: 15}, + {F: simple.Node('G'), T: simple.Node('I'), W: 5}, + {F: simple.Node('G'), T: simple.Node('J'), W: 19}, + {F: simple.Node('G'), T: simple.Node('K'), W: 10}, + {F: simple.Node('H'), T: simple.Node('J'), W: 17}, + {F: simple.Node('I'), T: simple.Node('K'), W: 11}, + {F: simple.Node('J'), T: simple.Node('K'), W: 16}, + {F: simple.Node('J'), T: simple.Node('L'), W: 4}, + {F: simple.Node('K'), T: simple.Node('L'), W: 12}, + }, + + want: 65, + treeEdges: []simple.Edge{ + {F: simple.Node('A'), T: simple.Node('C'), W: 6}, + {F: simple.Node('B'), T: simple.Node('C'), W: 7}, + {F: simple.Node('B'), T: simple.Node('D'), W: 1}, + {F: simple.Node('D'), T: simple.Node('F'), W: 3}, + {F: simple.Node('E'), T: simple.Node('F'), W: 2}, + {F: simple.Node('G'), T: simple.Node('H'), W: 15}, + {F: simple.Node('G'), T: simple.Node('I'), W: 5}, + {F: simple.Node('G'), T: simple.Node('K'), W: 10}, + {F: simple.Node('J'), T: simple.Node('L'), W: 4}, + {F: simple.Node('K'), T: simple.Node('L'), W: 12}, + }, + }, +} + +func testMinumumSpanning(mst func(dst graph.UndirectedBuilder, g spanningGraph) float64, t *testing.T) { + for _, test := range spanningTreeTests { + g := test.graph() + for _, e := range test.edges { + g.SetEdge(e) + } + + dst := simple.NewUndirectedGraph(0, math.Inf(1)) + w := mst(dst, g) + if w != test.want { + t.Errorf("unexpected minimum spanning tree weight for %q: got: %f want: %f", + test.name, w, test.want) + } + var got float64 + for _, e := range dst.Edges() { + got += e.Weight() + } + if got != test.want { + t.Errorf("unexpected minimum spanning tree edge weight sum for %q: got: %f want: %f", + test.name, got, test.want) + } + + gotEdges := dst.Edges() + if len(gotEdges) != len(test.treeEdges) { + t.Errorf("unexpected number of spanning tree edges for %q: got: %d want: %d", + test.name, len(gotEdges), len(test.treeEdges)) + } + for _, e := range test.treeEdges { + w, ok := dst.Weight(e.From(), e.To()) + if !ok { + t.Errorf("spanning tree edge not found in graph for %q: %+v", + test.name, e) + } + if w != e.Weight() { + t.Errorf("unexpected spanning tree edge weight for %q: got: %f want: %f", + test.name, w, e.Weight()) + } + } + } +} + +func TestKruskal(t *testing.T) { + testMinumumSpanning(func(dst graph.UndirectedBuilder, g spanningGraph) float64 { + return Kruskal(dst, g) + }, t) +} + +func TestPrim(t *testing.T) { + testMinumumSpanning(func(dst graph.UndirectedBuilder, g spanningGraph) float64 { + return Prim(dst, g) + }, t) +} diff --git a/graph/path/weight.go b/graph/path/weight.go new file mode 100644 index 00000000..cfe1f17f --- /dev/null +++ b/graph/path/weight.go @@ -0,0 +1,40 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package path + +import ( + "math" + + "github.com/gonum/graph" +) + +// Weighting is a mapping between a pair of nodes and a weight. It follows the +// semantics of the Weighter interface. +type Weighting func(x, y graph.Node) (w float64, ok bool) + +// UniformCost returns a Weighting that returns an edge cost of 1 for existing +// edges, zero for node identity and Inf for otherwise absent edges. +func UniformCost(g graph.Graph) Weighting { + return func(x, y graph.Node) (w float64, ok bool) { + xid := x.ID() + yid := y.ID() + if xid == yid { + return 0, true + } + if e := g.Edge(x, y); e != nil { + return 1, true + } + return math.Inf(1), false + } +} + +// Heuristic returns an estimate of the cost of travelling between two nodes. +type Heuristic func(x, y graph.Node) float64 + +// HeuristicCoster wraps the HeuristicCost method. A graph implementing the +// interface provides a heuristic between any two given nodes. +type HeuristicCoster interface { + HeuristicCost(x, y graph.Node) float64 +} diff --git a/graph/simple/dense_directed_matrix.go b/graph/simple/dense_directed_matrix.go new file mode 100644 index 00000000..46db6b0f --- /dev/null +++ b/graph/simple/dense_directed_matrix.go @@ -0,0 +1,265 @@ +// Copyright ©2014 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package simple + +import ( + "sort" + + "github.com/gonum/graph" + "github.com/gonum/graph/internal/ordered" + "github.com/gonum/matrix/mat64" +) + +// DirectedMatrix represents a directed graph using an adjacency +// matrix such that all IDs are in a contiguous block from 0 to n-1. +// Edges are stored implicitly as an edge weight, so edges stored in +// the graph are not recoverable. +type DirectedMatrix struct { + mat *mat64.Dense + nodes []graph.Node + + self float64 + absent float64 +} + +// NewDirectedMatrix creates a directed dense graph with n nodes. +// All edges are initialized with the weight given by init. The self parameter +// specifies the cost of self connection, and absent specifies the weight +// returned for absent edges. +func NewDirectedMatrix(n int, init, self, absent float64) *DirectedMatrix { + mat := make([]float64, n*n) + if init != 0 { + for i := range mat { + mat[i] = init + } + } + for i := 0; i < len(mat); i += n + 1 { + mat[i] = self + } + return &DirectedMatrix{ + mat: mat64.NewDense(n, n, mat), + self: self, + absent: absent, + } +} + +// NewDirectedMatrixFrom creates a directed dense graph with the given nodes. +// The IDs of the nodes must be contiguous from 0 to len(nodes)-1, but may +// be in any order. If IDs are not contiguous NewDirectedMatrixFrom will panic. +// All edges are initialized with the weight given by init. The self parameter +// specifies the cost of self connection, and absent specifies the weight +// returned for absent edges. +func NewDirectedMatrixFrom(nodes []graph.Node, init, self, absent float64) *DirectedMatrix { + sort.Sort(ordered.ByID(nodes)) + for i, n := range nodes { + if i != n.ID() { + panic("simple: non-contiguous node IDs") + } + } + g := NewDirectedMatrix(len(nodes), init, self, absent) + g.nodes = nodes + return g +} + +// Node returns the node in the graph with the given ID. +func (g *DirectedMatrix) Node(id int) graph.Node { + if !g.has(id) { + return nil + } + if g.nodes == nil { + return Node(id) + } + return g.nodes[id] +} + +// Has returns whether the node exists within the graph. +func (g *DirectedMatrix) Has(n graph.Node) bool { + return g.has(n.ID()) +} + +func (g *DirectedMatrix) has(id int) bool { + r, _ := g.mat.Dims() + return 0 <= id && id < r +} + +// Nodes returns all the nodes in the graph. +func (g *DirectedMatrix) Nodes() []graph.Node { + if g.nodes != nil { + nodes := make([]graph.Node, len(g.nodes)) + copy(nodes, g.nodes) + return nodes + } + r, _ := g.mat.Dims() + nodes := make([]graph.Node, r) + for i := 0; i < r; i++ { + nodes[i] = Node(i) + } + return nodes +} + +// Edges returns all the edges in the graph. +func (g *DirectedMatrix) Edges() []graph.Edge { + var edges []graph.Edge + r, _ := g.mat.Dims() + for i := 0; i < r; i++ { + for j := 0; j < r; j++ { + if i == j { + continue + } + if w := g.mat.At(i, j); !isSame(w, g.absent) { + edges = append(edges, Edge{F: g.Node(i), T: g.Node(j), W: w}) + } + } + } + return edges +} + +// From returns all nodes in g that can be reached directly from n. +func (g *DirectedMatrix) From(n graph.Node) []graph.Node { + id := n.ID() + if !g.has(id) { + return nil + } + var neighbors []graph.Node + _, c := g.mat.Dims() + for j := 0; j < c; j++ { + if j == id { + continue + } + if !isSame(g.mat.At(id, j), g.absent) { + neighbors = append(neighbors, g.Node(j)) + } + } + return neighbors +} + +// To returns all nodes in g that can reach directly to n. +func (g *DirectedMatrix) To(n graph.Node) []graph.Node { + id := n.ID() + if !g.has(id) { + return nil + } + var neighbors []graph.Node + r, _ := g.mat.Dims() + for i := 0; i < r; i++ { + if i == id { + continue + } + if !isSame(g.mat.At(i, id), g.absent) { + neighbors = append(neighbors, g.Node(i)) + } + } + return neighbors +} + +// HasEdgeBetween returns whether an edge exists between nodes x and y without +// considering direction. +func (g *DirectedMatrix) HasEdgeBetween(x, y graph.Node) bool { + xid := x.ID() + if !g.has(xid) { + return false + } + yid := y.ID() + if !g.has(yid) { + return false + } + return xid != yid && (!isSame(g.mat.At(xid, yid), g.absent) || !isSame(g.mat.At(yid, xid), g.absent)) +} + +// Edge returns the edge from u to v if such an edge exists and nil otherwise. +// The node v must be directly reachable from u as defined by the From method. +func (g *DirectedMatrix) Edge(u, v graph.Node) graph.Edge { + if g.HasEdgeFromTo(u, v) { + return Edge{F: g.Node(u.ID()), T: g.Node(v.ID()), W: g.mat.At(u.ID(), v.ID())} + } + return nil +} + +// HasEdgeFromTo returns whether an edge exists in the graph from u to v. +func (g *DirectedMatrix) HasEdgeFromTo(u, v graph.Node) bool { + uid := u.ID() + if !g.has(uid) { + return false + } + vid := v.ID() + if !g.has(vid) { + return false + } + return uid != vid && !isSame(g.mat.At(uid, vid), g.absent) +} + +// Weight returns the weight for the edge between x and y if Edge(x, y) returns a non-nil Edge. +// If x and y are the same node or there is no joining edge between the two nodes the weight +// value returned is either the graph's absent or self value. Weight returns true if an edge +// exists between x and y or if x and y have the same ID, false otherwise. +func (g *DirectedMatrix) Weight(x, y graph.Node) (w float64, ok bool) { + xid := x.ID() + yid := y.ID() + if xid == yid { + return g.self, true + } + if g.has(xid) && g.has(yid) { + return g.mat.At(xid, yid), true + } + return g.absent, false +} + +// SetEdge sets e, an edge from one node to another. If the ends of the edge are not in g +// or the edge is a self loop, SetEdge panics. +func (g *DirectedMatrix) SetEdge(e graph.Edge) { + fid := e.From().ID() + tid := e.To().ID() + if fid == tid { + panic("simple: set illegal edge") + } + g.mat.Set(fid, tid, e.Weight()) +} + +// RemoveEdge removes e from the graph, leaving the terminal nodes. If the edge does not exist +// it is a no-op. +func (g *DirectedMatrix) RemoveEdge(e graph.Edge) { + fid := e.From().ID() + if !g.has(fid) { + return + } + tid := e.To().ID() + if !g.has(tid) { + return + } + g.mat.Set(fid, tid, g.absent) +} + +// Degree returns the in+out degree of n in g. +func (g *DirectedMatrix) Degree(n graph.Node) int { + id := n.ID() + var deg int + r, c := g.mat.Dims() + for i := 0; i < r; i++ { + if i == id { + continue + } + if !isSame(g.mat.At(id, i), g.absent) { + deg++ + } + } + for i := 0; i < c; i++ { + if i == id { + continue + } + if !isSame(g.mat.At(i, id), g.absent) { + deg++ + } + } + return deg +} + +// Matrix returns the mat64.Matrix representation of the graph. The orientation +// of the matrix is such that the matrix entry at G_{ij} is the weight of the edge +// from node i to node j. +func (g *DirectedMatrix) Matrix() mat64.Matrix { + // Prevent alteration of dimensions of the returned matrix. + m := *g.mat + return &m +} diff --git a/graph/simple/dense_undirected_matrix.go b/graph/simple/dense_undirected_matrix.go new file mode 100644 index 00000000..63f20a48 --- /dev/null +++ b/graph/simple/dense_undirected_matrix.go @@ -0,0 +1,224 @@ +// Copyright ©2014 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package simple + +import ( + "sort" + + "github.com/gonum/graph" + "github.com/gonum/graph/internal/ordered" + "github.com/gonum/matrix/mat64" +) + +// UndirectedMatrix represents an undirected graph using an adjacency +// matrix such that all IDs are in a contiguous block from 0 to n-1. +// Edges are stored implicitly as an edge weight, so edges stored in +// the graph are not recoverable. +type UndirectedMatrix struct { + mat *mat64.SymDense + nodes []graph.Node + + self float64 + absent float64 +} + +// NewUndirectedMatrix creates an undirected dense graph with n nodes. +// All edges are initialized with the weight given by init. The self parameter +// specifies the cost of self connection, and absent specifies the weight +// returned for absent edges. +func NewUndirectedMatrix(n int, init, self, absent float64) *UndirectedMatrix { + mat := make([]float64, n*n) + if init != 0 { + for i := range mat { + mat[i] = init + } + } + for i := 0; i < len(mat); i += n + 1 { + mat[i] = self + } + return &UndirectedMatrix{ + mat: mat64.NewSymDense(n, mat), + self: self, + absent: absent, + } +} + +// NewUndirectedMatrixFrom creates an undirected dense graph with the given nodes. +// The IDs of the nodes must be contiguous from 0 to len(nodes)-1, but may +// be in any order. If IDs are not contiguous NewUndirectedMatrixFrom will panic. +// All edges are initialized with the weight given by init. The self parameter +// specifies the cost of self connection, and absent specifies the weight +// returned for absent edges. +func NewUndirectedMatrixFrom(nodes []graph.Node, init, self, absent float64) *UndirectedMatrix { + sort.Sort(ordered.ByID(nodes)) + for i, n := range nodes { + if i != n.ID() { + panic("simple: non-contiguous node IDs") + } + } + g := NewUndirectedMatrix(len(nodes), init, self, absent) + g.nodes = nodes + return g +} + +// Node returns the node in the graph with the given ID. +func (g *UndirectedMatrix) Node(id int) graph.Node { + if !g.has(id) { + return nil + } + if g.nodes == nil { + return Node(id) + } + return g.nodes[id] +} + +// Has returns whether the node exists within the graph. +func (g *UndirectedMatrix) Has(n graph.Node) bool { + return g.has(n.ID()) +} + +func (g *UndirectedMatrix) has(id int) bool { + r := g.mat.Symmetric() + return 0 <= id && id < r +} + +// Nodes returns all the nodes in the graph. +func (g *UndirectedMatrix) Nodes() []graph.Node { + if g.nodes != nil { + nodes := make([]graph.Node, len(g.nodes)) + copy(nodes, g.nodes) + return nodes + } + r := g.mat.Symmetric() + nodes := make([]graph.Node, r) + for i := 0; i < r; i++ { + nodes[i] = Node(i) + } + return nodes +} + +// Edges returns all the edges in the graph. +func (g *UndirectedMatrix) Edges() []graph.Edge { + var edges []graph.Edge + r, _ := g.mat.Dims() + for i := 0; i < r; i++ { + for j := i + 1; j < r; j++ { + if w := g.mat.At(i, j); !isSame(w, g.absent) { + edges = append(edges, Edge{F: g.Node(i), T: g.Node(j), W: w}) + } + } + } + return edges +} + +// From returns all nodes in g that can be reached directly from n. +func (g *UndirectedMatrix) From(n graph.Node) []graph.Node { + id := n.ID() + if !g.has(id) { + return nil + } + var neighbors []graph.Node + r := g.mat.Symmetric() + for i := 0; i < r; i++ { + if i == id { + continue + } + if !isSame(g.mat.At(id, i), g.absent) { + neighbors = append(neighbors, g.Node(i)) + } + } + return neighbors +} + +// HasEdgeBetween returns whether an edge exists between nodes x and y. +func (g *UndirectedMatrix) HasEdgeBetween(u, v graph.Node) bool { + uid := u.ID() + if !g.has(uid) { + return false + } + vid := v.ID() + if !g.has(vid) { + return false + } + return uid != vid && !isSame(g.mat.At(uid, vid), g.absent) +} + +// Edge returns the edge from u to v if such an edge exists and nil otherwise. +// The node v must be directly reachable from u as defined by the From method. +func (g *UndirectedMatrix) Edge(u, v graph.Node) graph.Edge { + return g.EdgeBetween(u, v) +} + +// EdgeBetween returns the edge between nodes x and y. +func (g *UndirectedMatrix) EdgeBetween(u, v graph.Node) graph.Edge { + if g.HasEdgeBetween(u, v) { + return Edge{F: g.Node(u.ID()), T: g.Node(v.ID()), W: g.mat.At(u.ID(), v.ID())} + } + return nil +} + +// Weight returns the weight for the edge between x and y if Edge(x, y) returns a non-nil Edge. +// If x and y are the same node or there is no joining edge between the two nodes the weight +// value returned is either the graph's absent or self value. Weight returns true if an edge +// exists between x and y or if x and y have the same ID, false otherwise. +func (g *UndirectedMatrix) Weight(x, y graph.Node) (w float64, ok bool) { + xid := x.ID() + yid := y.ID() + if xid == yid { + return g.self, true + } + if g.has(xid) && g.has(yid) { + return g.mat.At(xid, yid), true + } + return g.absent, false +} + +// SetEdge sets e, an edge from one node to another. If the ends of the edge are not in g +// or the edge is a self loop, SetEdge panics. +func (g *UndirectedMatrix) SetEdge(e graph.Edge) { + fid := e.From().ID() + tid := e.To().ID() + if fid == tid { + panic("simple: set illegal edge") + } + g.mat.SetSym(fid, tid, e.Weight()) +} + +// RemoveEdge removes e from the graph, leaving the terminal nodes. If the edge does not exist +// it is a no-op. +func (g *UndirectedMatrix) RemoveEdge(e graph.Edge) { + fid := e.From().ID() + if !g.has(fid) { + return + } + tid := e.To().ID() + if !g.has(tid) { + return + } + g.mat.SetSym(fid, tid, g.absent) +} + +// Degree returns the degree of n in g. +func (g *UndirectedMatrix) Degree(n graph.Node) int { + id := n.ID() + var deg int + r := g.mat.Symmetric() + for i := 0; i < r; i++ { + if i == id { + continue + } + if !isSame(g.mat.At(id, i), g.absent) { + deg++ + } + } + return deg +} + +// Matrix returns the mat64.Matrix representation of the graph. +func (g *UndirectedMatrix) Matrix() mat64.Matrix { + // Prevent alteration of dimensions of the returned matrix. + m := *g.mat + return &m +} diff --git a/graph/simple/densegraph_test.go b/graph/simple/densegraph_test.go new file mode 100644 index 00000000..d68fd1d8 --- /dev/null +++ b/graph/simple/densegraph_test.go @@ -0,0 +1,140 @@ +// Copyright ©2014 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package simple + +import ( + "math" + "sort" + "testing" + + "github.com/gonum/graph" + "github.com/gonum/graph/internal/ordered" +) + +var ( + _ graph.Graph = (*UndirectedMatrix)(nil) + _ graph.Directed = (*DirectedMatrix)(nil) +) + +func TestBasicDenseImpassable(t *testing.T) { + dg := NewUndirectedMatrix(5, math.Inf(1), 0, math.Inf(1)) + if dg == nil { + t.Fatal("Directed graph could not be made") + } + + for i := 0; i < 5; i++ { + if !dg.Has(Node(i)) { + t.Errorf("Node that should exist doesn't: %d", i) + } + + if degree := dg.Degree(Node(i)); degree != 0 { + t.Errorf("Node in impassable graph has a neighbor. Node: %d Degree: %d", i, degree) + } + } + + for i := 5; i < 10; i++ { + if dg.Has(Node(i)) { + t.Errorf("Node exists that shouldn't: %d", i) + } + } +} + +func TestBasicDensePassable(t *testing.T) { + dg := NewUndirectedMatrix(5, 1, 0, math.Inf(1)) + if dg == nil { + t.Fatal("Directed graph could not be made") + } + + for i := 0; i < 5; i++ { + if !dg.Has(Node(i)) { + t.Errorf("Node that should exist doesn't: %d", i) + } + + if degree := dg.Degree(Node(i)); degree != 4 { + t.Errorf("Node in passable graph missing neighbors. Node: %d Degree: %d", i, degree) + } + } + + for i := 5; i < 10; i++ { + if dg.Has(Node(i)) { + t.Errorf("Node exists that shouldn't: %d", i) + } + } +} + +func TestDirectedDenseAddRemove(t *testing.T) { + dg := NewDirectedMatrix(10, math.Inf(1), 0, math.Inf(1)) + dg.SetEdge(Edge{F: Node(0), T: Node(2), W: 1}) + + if neighbors := dg.From(Node(0)); len(neighbors) != 1 || neighbors[0].ID() != 2 || + dg.Edge(Node(0), Node(2)) == nil { + t.Errorf("Adding edge didn't create successor") + } + + dg.RemoveEdge(Edge{F: Node(0), T: Node(2)}) + + if neighbors := dg.From(Node(0)); len(neighbors) != 0 || dg.Edge(Node(0), Node(2)) != nil { + t.Errorf("Removing edge didn't properly remove successor") + } + + if neighbors := dg.To(Node(2)); len(neighbors) != 0 || dg.Edge(Node(0), Node(2)) != nil { + t.Errorf("Removing directed edge wrongly kept predecessor") + } + + dg.SetEdge(Edge{F: Node(0), T: Node(2), W: 2}) + // I figure we've torture tested From/To at this point + // so we'll just use the bool functions now + if dg.Edge(Node(0), Node(2)) == nil { + t.Fatal("Adding directed edge didn't change successor back") + } + c1, _ := dg.Weight(Node(2), Node(0)) + c2, _ := dg.Weight(Node(0), Node(2)) + if c1 == c2 { + t.Error("Adding directed edge affected cost in undirected manner") + } +} + +func TestUndirectedDenseAddRemove(t *testing.T) { + dg := NewUndirectedMatrix(10, math.Inf(1), 0, math.Inf(1)) + dg.SetEdge(Edge{F: Node(0), T: Node(2)}) + + if neighbors := dg.From(Node(0)); len(neighbors) != 1 || neighbors[0].ID() != 2 || + dg.EdgeBetween(Node(0), Node(2)) == nil { + t.Errorf("Couldn't add neighbor") + } + + if neighbors := dg.From(Node(2)); len(neighbors) != 1 || neighbors[0].ID() != 0 || + dg.EdgeBetween(Node(2), Node(0)) == nil { + t.Errorf("Adding an undirected neighbor didn't add it reciprocally") + } +} + +func TestDenseLists(t *testing.T) { + dg := NewDirectedMatrix(15, 1, 0, math.Inf(1)) + nodes := dg.Nodes() + + if len(nodes) != 15 { + t.Fatalf("Wrong number of nodes") + } + + sort.Sort(ordered.ByID(nodes)) + + for i, node := range dg.Nodes() { + if i != node.ID() { + t.Errorf("Node list doesn't return properly id'd nodes") + } + } + + edges := dg.Edges() + if len(edges) != 15*14 { + t.Errorf("Improper number of edges for passable dense graph") + } + + dg.RemoveEdge(Edge{F: Node(12), T: Node(11)}) + edges = dg.Edges() + if len(edges) != (15*14)-1 { + t.Errorf("Removing edge didn't affect edge listing properly") + } +} diff --git a/graph/simple/directed.go b/graph/simple/directed.go new file mode 100644 index 00000000..fd67cade --- /dev/null +++ b/graph/simple/directed.go @@ -0,0 +1,280 @@ +// Copyright ©2014 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package simple + +import ( + "fmt" + + "golang.org/x/tools/container/intsets" + + "github.com/gonum/graph" +) + +// DirectedGraph implements a generalized directed graph. +type DirectedGraph struct { + nodes map[int]graph.Node + from map[int]map[int]graph.Edge + to map[int]map[int]graph.Edge + + self, absent float64 + + freeIDs intsets.Sparse + usedIDs intsets.Sparse +} + +// NewDirectedGraph returns a DirectedGraph with the specified self and absent +// edge weight values. +func NewDirectedGraph(self, absent float64) *DirectedGraph { + return &DirectedGraph{ + nodes: make(map[int]graph.Node), + from: make(map[int]map[int]graph.Edge), + to: make(map[int]map[int]graph.Edge), + + self: self, + absent: absent, + } +} + +// NewNodeID returns a new unique ID for a node to be added to g. The returned ID does +// not become a valid ID in g until it is added to g. +func (g *DirectedGraph) NewNodeID() int { + if len(g.nodes) == 0 { + return 0 + } + if len(g.nodes) == maxInt { + panic(fmt.Sprintf("simple: cannot allocate node: no slot")) + } + + var id int + if g.freeIDs.Len() != 0 && g.freeIDs.TakeMin(&id) { + return id + } + if id = g.usedIDs.Max(); id < maxInt { + return id + 1 + } + for id = 0; id < maxInt; id++ { + if !g.usedIDs.Has(id) { + return id + } + } + panic("unreachable") +} + +// AddNode adds n to the graph. It panics if the added node ID matches an existing node ID. +func (g *DirectedGraph) AddNode(n graph.Node) { + if _, exists := g.nodes[n.ID()]; exists { + panic(fmt.Sprintf("simple: node ID collision: %d", n.ID())) + } + g.nodes[n.ID()] = n + g.from[n.ID()] = make(map[int]graph.Edge) + g.to[n.ID()] = make(map[int]graph.Edge) + + g.freeIDs.Remove(n.ID()) + g.usedIDs.Insert(n.ID()) +} + +// RemoveNode removes n from the graph, as well as any edges attached to it. If the node +// is not in the graph it is a no-op. +func (g *DirectedGraph) RemoveNode(n graph.Node) { + if _, ok := g.nodes[n.ID()]; !ok { + return + } + delete(g.nodes, n.ID()) + + for from := range g.from[n.ID()] { + delete(g.to[from], n.ID()) + } + delete(g.from, n.ID()) + + for to := range g.to[n.ID()] { + delete(g.from[to], n.ID()) + } + delete(g.to, n.ID()) + + g.freeIDs.Insert(n.ID()) + g.usedIDs.Remove(n.ID()) +} + +// SetEdge adds e, an edge from one node to another. If the nodes do not exist, they are added. +// It will panic if the IDs of the e.From and e.To are equal. +func (g *DirectedGraph) SetEdge(e graph.Edge) { + var ( + from = e.From() + fid = from.ID() + to = e.To() + tid = to.ID() + ) + + if fid == tid { + panic("simple: adding self edge") + } + + if !g.Has(from) { + g.AddNode(from) + } + if !g.Has(to) { + g.AddNode(to) + } + + g.from[fid][tid] = e + g.to[tid][fid] = e +} + +// RemoveEdge removes e from the graph, leaving the terminal nodes. If the edge does not exist +// it is a no-op. +func (g *DirectedGraph) RemoveEdge(e graph.Edge) { + from, to := e.From(), e.To() + if _, ok := g.nodes[from.ID()]; !ok { + return + } + if _, ok := g.nodes[to.ID()]; !ok { + return + } + + delete(g.from[from.ID()], to.ID()) + delete(g.to[to.ID()], from.ID()) +} + +// Node returns the node in the graph with the given ID. +func (g *DirectedGraph) Node(id int) graph.Node { + return g.nodes[id] +} + +// Has returns whether the node exists within the graph. +func (g *DirectedGraph) Has(n graph.Node) bool { + _, ok := g.nodes[n.ID()] + + return ok +} + +// Nodes returns all the nodes in the graph. +func (g *DirectedGraph) Nodes() []graph.Node { + nodes := make([]graph.Node, len(g.from)) + i := 0 + for _, n := range g.nodes { + nodes[i] = n + i++ + } + + return nodes +} + +// Edges returns all the edges in the graph. +func (g *DirectedGraph) Edges() []graph.Edge { + var edges []graph.Edge + for _, u := range g.nodes { + for _, e := range g.from[u.ID()] { + edges = append(edges, e) + } + } + return edges +} + +// From returns all nodes in g that can be reached directly from n. +func (g *DirectedGraph) From(n graph.Node) []graph.Node { + if _, ok := g.from[n.ID()]; !ok { + return nil + } + + from := make([]graph.Node, len(g.from[n.ID()])) + i := 0 + for id := range g.from[n.ID()] { + from[i] = g.nodes[id] + i++ + } + + return from +} + +// To returns all nodes in g that can reach directly to n. +func (g *DirectedGraph) To(n graph.Node) []graph.Node { + if _, ok := g.from[n.ID()]; !ok { + return nil + } + + to := make([]graph.Node, len(g.to[n.ID()])) + i := 0 + for id := range g.to[n.ID()] { + to[i] = g.nodes[id] + i++ + } + + return to +} + +// HasEdgeBetween returns whether an edge exists between nodes x and y without +// considering direction. +func (g *DirectedGraph) HasEdgeBetween(x, y graph.Node) bool { + xid := x.ID() + yid := y.ID() + if _, ok := g.nodes[xid]; !ok { + return false + } + if _, ok := g.nodes[yid]; !ok { + return false + } + if _, ok := g.from[xid][yid]; ok { + return true + } + _, ok := g.from[yid][xid] + return ok +} + +// Edge returns the edge from u to v if such an edge exists and nil otherwise. +// The node v must be directly reachable from u as defined by the From method. +func (g *DirectedGraph) Edge(u, v graph.Node) graph.Edge { + if _, ok := g.nodes[u.ID()]; !ok { + return nil + } + if _, ok := g.nodes[v.ID()]; !ok { + return nil + } + edge, ok := g.from[u.ID()][v.ID()] + if !ok { + return nil + } + return edge +} + +// HasEdgeFromTo returns whether an edge exists in the graph from u to v. +func (g *DirectedGraph) HasEdgeFromTo(u, v graph.Node) bool { + if _, ok := g.nodes[u.ID()]; !ok { + return false + } + if _, ok := g.nodes[v.ID()]; !ok { + return false + } + if _, ok := g.from[u.ID()][v.ID()]; !ok { + return false + } + return true +} + +// Weight returns the weight for the edge between x and y if Edge(x, y) returns a non-nil Edge. +// If x and y are the same node or there is no joining edge between the two nodes the weight +// value returned is either the graph's absent or self value. Weight returns true if an edge +// exists between x and y or if x and y have the same ID, false otherwise. +func (g *DirectedGraph) Weight(x, y graph.Node) (w float64, ok bool) { + xid := x.ID() + yid := y.ID() + if xid == yid { + return g.self, true + } + if to, ok := g.from[xid]; ok { + if e, ok := to[yid]; ok { + return e.Weight(), true + } + } + return g.absent, false +} + +// Degree returns the in+out degree of n in g. +func (g *DirectedGraph) Degree(n graph.Node) int { + if _, ok := g.nodes[n.ID()]; !ok { + return 0 + } + + return len(g.from[n.ID()]) + len(g.to[n.ID()]) +} diff --git a/graph/simple/directed_test.go b/graph/simple/directed_test.go new file mode 100644 index 00000000..c685a050 --- /dev/null +++ b/graph/simple/directed_test.go @@ -0,0 +1,63 @@ +// Copyright ©2014 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package simple + +import ( + "math" + "testing" + + "github.com/gonum/graph" +) + +var _ graph.Graph = &DirectedGraph{} +var _ graph.Directed = &DirectedGraph{} +var _ graph.Directed = &DirectedGraph{} + +// Tests Issue #27 +func TestEdgeOvercounting(t *testing.T) { + g := generateDummyGraph() + + if neigh := g.From(Node(Node(2))); len(neigh) != 2 { + t.Errorf("Node 2 has incorrect number of neighbors got neighbors %v (count %d), expected 2 neighbors {0,1}", neigh, len(neigh)) + } +} + +func generateDummyGraph() *DirectedGraph { + nodes := [4]struct{ srcID, targetID int }{ + {2, 1}, + {1, 0}, + {2, 0}, + {0, 2}, + } + + g := NewDirectedGraph(0, math.Inf(1)) + + for _, n := range nodes { + g.SetEdge(Edge{F: Node(n.srcID), T: Node(n.targetID), W: 1}) + } + + return g +} + +// Test for issue #123 https://github.com/gonum/graph/issues/123 +func TestIssue123DirectedGraph(t *testing.T) { + defer func() { + if r := recover(); r != nil { + t.Errorf("unexpected panic: %v", r) + } + }() + g := NewDirectedGraph(0, math.Inf(1)) + + n0 := Node(g.NewNodeID()) + g.AddNode(n0) + + n1 := Node(g.NewNodeID()) + g.AddNode(n1) + + g.RemoveNode(n0) + + n2 := Node(g.NewNodeID()) + g.AddNode(n2) +} diff --git a/graph/simple/simple.go b/graph/simple/simple.go new file mode 100644 index 00000000..ab9fff0b --- /dev/null +++ b/graph/simple/simple.go @@ -0,0 +1,45 @@ +// Copyright ©2014 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package simple provides a suite of simple graph implementations satisfying +// the gonum/graph interfaces. +package simple + +import ( + "math" + + "github.com/gonum/graph" +) + +// Node is a simple graph node. +type Node int + +// ID returns the ID number of the node. +func (n Node) ID() int { + return int(n) +} + +// Edge is a simple graph edge. +type Edge struct { + F, T graph.Node + W float64 +} + +// From returns the from-node of the edge. +func (e Edge) From() graph.Node { return e.F } + +// To returns the to-node of the edge. +func (e Edge) To() graph.Node { return e.T } + +// Weight returns the weight of the edge. +func (e Edge) Weight() float64 { return e.W } + +// maxInt is the maximum value of the machine-dependent int type. +const maxInt int = int(^uint(0) >> 1) + +// isSame returns whether two float64 values are the same where NaN values +// are equalable. +func isSame(a, b float64) bool { + return a == b || (math.IsNaN(a) && math.IsNaN(b)) +} diff --git a/graph/simple/undirected.go b/graph/simple/undirected.go new file mode 100644 index 00000000..67154b50 --- /dev/null +++ b/graph/simple/undirected.go @@ -0,0 +1,241 @@ +// Copyright ©2014 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package simple + +import ( + "fmt" + + "golang.org/x/tools/container/intsets" + + "github.com/gonum/graph" +) + +// UndirectedGraph implements a generalized undirected graph. +type UndirectedGraph struct { + nodes map[int]graph.Node + edges map[int]map[int]graph.Edge + + self, absent float64 + + freeIDs intsets.Sparse + usedIDs intsets.Sparse +} + +// NewUndirectedGraph returns an UndirectedGraph with the specified self and absent +// edge weight values. +func NewUndirectedGraph(self, absent float64) *UndirectedGraph { + return &UndirectedGraph{ + nodes: make(map[int]graph.Node), + edges: make(map[int]map[int]graph.Edge), + + self: self, + absent: absent, + } +} + +// NewNodeID returns a new unique ID for a node to be added to g. The returned ID does +// not become a valid ID in g until it is added to g. +func (g *UndirectedGraph) NewNodeID() int { + if len(g.nodes) == 0 { + return 0 + } + if len(g.nodes) == maxInt { + panic(fmt.Sprintf("simple: cannot allocate node: no slot")) + } + + var id int + if g.freeIDs.Len() != 0 && g.freeIDs.TakeMin(&id) { + return id + } + if id = g.usedIDs.Max(); id < maxInt { + return id + 1 + } + for id = 0; id < maxInt; id++ { + if !g.usedIDs.Has(id) { + return id + } + } + panic("unreachable") +} + +// AddNode adds n to the graph. It panics if the added node ID matches an existing node ID. +func (g *UndirectedGraph) AddNode(n graph.Node) { + if _, exists := g.nodes[n.ID()]; exists { + panic(fmt.Sprintf("simple: node ID collision: %d", n.ID())) + } + g.nodes[n.ID()] = n + g.edges[n.ID()] = make(map[int]graph.Edge) + + g.freeIDs.Remove(n.ID()) + g.usedIDs.Insert(n.ID()) +} + +// RemoveNode removes n from the graph, as well as any edges attached to it. If the node +// is not in the graph it is a no-op. +func (g *UndirectedGraph) RemoveNode(n graph.Node) { + if _, ok := g.nodes[n.ID()]; !ok { + return + } + delete(g.nodes, n.ID()) + + for from := range g.edges[n.ID()] { + delete(g.edges[from], n.ID()) + } + delete(g.edges, n.ID()) + + g.freeIDs.Insert(n.ID()) + g.usedIDs.Remove(n.ID()) + +} + +// SetEdge adds e, an edge from one node to another. If the nodes do not exist, they are added. +// It will panic if the IDs of the e.From and e.To are equal. +func (g *UndirectedGraph) SetEdge(e graph.Edge) { + var ( + from = e.From() + fid = from.ID() + to = e.To() + tid = to.ID() + ) + + if fid == tid { + panic("simple: adding self edge") + } + + if !g.Has(from) { + g.AddNode(from) + } + if !g.Has(to) { + g.AddNode(to) + } + + g.edges[fid][tid] = e + g.edges[tid][fid] = e +} + +// RemoveEdge removes e from the graph, leaving the terminal nodes. If the edge does not exist +// it is a no-op. +func (g *UndirectedGraph) RemoveEdge(e graph.Edge) { + from, to := e.From(), e.To() + if _, ok := g.nodes[from.ID()]; !ok { + return + } + if _, ok := g.nodes[to.ID()]; !ok { + return + } + + delete(g.edges[from.ID()], to.ID()) + delete(g.edges[to.ID()], from.ID()) +} + +// Node returns the node in the graph with the given ID. +func (g *UndirectedGraph) Node(id int) graph.Node { + return g.nodes[id] +} + +// Has returns whether the node exists within the graph. +func (g *UndirectedGraph) Has(n graph.Node) bool { + _, ok := g.nodes[n.ID()] + return ok +} + +// Nodes returns all the nodes in the graph. +func (g *UndirectedGraph) Nodes() []graph.Node { + nodes := make([]graph.Node, len(g.nodes)) + i := 0 + for _, n := range g.nodes { + nodes[i] = n + i++ + } + + return nodes +} + +// Edges returns all the edges in the graph. +func (g *UndirectedGraph) Edges() []graph.Edge { + var edges []graph.Edge + + seen := make(map[[2]int]struct{}) + for _, u := range g.edges { + for _, e := range u { + uid := e.From().ID() + vid := e.To().ID() + if _, ok := seen[[2]int{uid, vid}]; ok { + continue + } + seen[[2]int{uid, vid}] = struct{}{} + seen[[2]int{vid, uid}] = struct{}{} + edges = append(edges, e) + } + } + + return edges +} + +// From returns all nodes in g that can be reached directly from n. +func (g *UndirectedGraph) From(n graph.Node) []graph.Node { + if !g.Has(n) { + return nil + } + + nodes := make([]graph.Node, len(g.edges[n.ID()])) + i := 0 + for from := range g.edges[n.ID()] { + nodes[i] = g.nodes[from] + i++ + } + + return nodes +} + +// HasEdgeBetween returns whether an edge exists between nodes x and y. +func (g *UndirectedGraph) HasEdgeBetween(x, y graph.Node) bool { + _, ok := g.edges[x.ID()][y.ID()] + return ok +} + +// Edge returns the edge from u to v if such an edge exists and nil otherwise. +// The node v must be directly reachable from u as defined by the From method. +func (g *UndirectedGraph) Edge(u, v graph.Node) graph.Edge { + return g.EdgeBetween(u, v) +} + +// EdgeBetween returns the edge between nodes x and y. +func (g *UndirectedGraph) EdgeBetween(x, y graph.Node) graph.Edge { + // We don't need to check if neigh exists because + // it's implicit in the edges access. + if !g.Has(x) { + return nil + } + + return g.edges[x.ID()][y.ID()] +} + +// Weight returns the weight for the edge between x and y if Edge(x, y) returns a non-nil Edge. +// If x and y are the same node or there is no joining edge between the two nodes the weight +// value returned is either the graph's absent or self value. Weight returns true if an edge +// exists between x and y or if x and y have the same ID, false otherwise. +func (g *UndirectedGraph) Weight(x, y graph.Node) (w float64, ok bool) { + xid := x.ID() + yid := y.ID() + if xid == yid { + return g.self, true + } + if n, ok := g.edges[xid]; ok { + if e, ok := n[yid]; ok { + return e.Weight(), true + } + } + return g.absent, false +} + +// Degree returns the degree of n in g. +func (g *UndirectedGraph) Degree(n graph.Node) int { + if _, ok := g.nodes[n.ID()]; !ok { + return 0 + } + + return len(g.edges[n.ID()]) +} diff --git a/graph/simple/undirected_test.go b/graph/simple/undirected_test.go new file mode 100644 index 00000000..1aee3e28 --- /dev/null +++ b/graph/simple/undirected_test.go @@ -0,0 +1,63 @@ +// Copyright ©2014 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package simple + +import ( + "math" + "testing" + + "github.com/gonum/graph" +) + +var _ graph.Graph = (*UndirectedGraph)(nil) + +func TestAssertMutableNotDirected(t *testing.T) { + var g graph.UndirectedBuilder = NewUndirectedGraph(0, math.Inf(1)) + if _, ok := g.(graph.Directed); ok { + t.Fatal("Graph is directed, but a MutableGraph cannot safely be directed!") + } +} + +func TestMaxID(t *testing.T) { + g := NewUndirectedGraph(0, math.Inf(1)) + nodes := make(map[graph.Node]struct{}) + for i := Node(0); i < 3; i++ { + g.AddNode(i) + nodes[i] = struct{}{} + } + g.RemoveNode(Node(0)) + delete(nodes, Node(0)) + g.RemoveNode(Node(2)) + delete(nodes, Node(2)) + n := Node(g.NewNodeID()) + g.AddNode(n) + if !g.Has(n) { + t.Error("added node does not exist in graph") + } + if _, exists := nodes[n]; exists { + t.Errorf("Created already existing node id: %v", n.ID()) + } +} + +// Test for issue #123 https://github.com/gonum/graph/issues/123 +func TestIssue123UndirectedGraph(t *testing.T) { + defer func() { + if r := recover(); r != nil { + t.Errorf("unexpected panic: %v", r) + } + }() + g := NewUndirectedGraph(0, math.Inf(1)) + + n0 := Node(g.NewNodeID()) + g.AddNode(n0) + + n1 := Node(g.NewNodeID()) + g.AddNode(n1) + + g.RemoveNode(n0) + + n2 := Node(g.NewNodeID()) + g.AddNode(n2) +} diff --git a/graph/topo/bench_test.go b/graph/topo/bench_test.go new file mode 100644 index 00000000..722b13e2 --- /dev/null +++ b/graph/topo/bench_test.go @@ -0,0 +1,58 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package topo + +import ( + "math" + "testing" + + "github.com/gonum/graph" + "github.com/gonum/graph/graphs/gen" + "github.com/gonum/graph/simple" +) + +var ( + gnpDirected_10_tenth = gnpDirected(10, 0.1) + gnpDirected_100_tenth = gnpDirected(100, 0.1) + gnpDirected_1000_tenth = gnpDirected(1000, 0.1) + gnpDirected_10_half = gnpDirected(10, 0.5) + gnpDirected_100_half = gnpDirected(100, 0.5) + gnpDirected_1000_half = gnpDirected(1000, 0.5) +) + +func gnpDirected(n int, p float64) graph.Directed { + g := simple.NewDirectedGraph(0, math.Inf(1)) + gen.Gnp(g, n, p, nil) + return g +} + +func benchmarkTarjanSCC(b *testing.B, g graph.Directed) { + var sccs [][]graph.Node + for i := 0; i < b.N; i++ { + sccs = TarjanSCC(g) + } + if len(sccs) == 0 { + b.Fatal("unexpected number zero-sized SCC set") + } +} + +func BenchmarkTarjanSCCGnp_10_tenth(b *testing.B) { + benchmarkTarjanSCC(b, gnpDirected_10_tenth) +} +func BenchmarkTarjanSCCGnp_100_tenth(b *testing.B) { + benchmarkTarjanSCC(b, gnpDirected_100_tenth) +} +func BenchmarkTarjanSCCGnp_1000_tenth(b *testing.B) { + benchmarkTarjanSCC(b, gnpDirected_1000_tenth) +} +func BenchmarkTarjanSCCGnp_10_half(b *testing.B) { + benchmarkTarjanSCC(b, gnpDirected_10_half) +} +func BenchmarkTarjanSCCGnp_100_half(b *testing.B) { + benchmarkTarjanSCC(b, gnpDirected_100_half) +} +func BenchmarkTarjanSCCGnp_1000_half(b *testing.B) { + benchmarkTarjanSCC(b, gnpDirected_1000_half) +} diff --git a/graph/topo/bron_kerbosch.go b/graph/topo/bron_kerbosch.go new file mode 100644 index 00000000..c2ec7f9b --- /dev/null +++ b/graph/topo/bron_kerbosch.go @@ -0,0 +1,225 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package topo + +import ( + "github.com/gonum/graph" + "github.com/gonum/graph/internal/set" +) + +// VertexOrdering returns the vertex ordering and the k-cores of +// the undirected graph g. +func VertexOrdering(g graph.Undirected) (order []graph.Node, cores [][]graph.Node) { + nodes := g.Nodes() + + // The algorithm used here is essentially as described at + // http://en.wikipedia.org/w/index.php?title=Degeneracy_%28graph_theory%29&oldid=640308710 + + // Initialize an output list L. + var l []graph.Node + + // Compute a number d_v for each vertex v in G, + // the number of neighbors of v that are not already in L. + // Initially, these numbers are just the degrees of the vertices. + dv := make(map[int]int, len(nodes)) + var ( + maxDegree int + neighbours = make(map[int][]graph.Node) + ) + for _, n := range nodes { + adj := g.From(n) + neighbours[n.ID()] = adj + dv[n.ID()] = len(adj) + if len(adj) > maxDegree { + maxDegree = len(adj) + } + } + + // Initialize an array D such that D[i] contains a list of the + // vertices v that are not already in L for which d_v = i. + d := make([][]graph.Node, maxDegree+1) + for _, n := range nodes { + deg := dv[n.ID()] + d[deg] = append(d[deg], n) + } + + // Initialize k to 0. + k := 0 + // Repeat n times: + s := []int{0} + for range nodes { + // Scan the array cells D[0], D[1], ... until + // finding an i for which D[i] is nonempty. + var ( + i int + di []graph.Node + ) + for i, di = range d { + if len(di) != 0 { + break + } + } + + // Set k to max(k,i). + if i > k { + k = i + s = append(s, make([]int, k-len(s)+1)...) + } + + // Select a vertex v from D[i]. Add v to the + // beginning of L and remove it from D[i]. + var v graph.Node + v, d[i] = di[len(di)-1], di[:len(di)-1] + l = append(l, v) + s[k]++ + delete(dv, v.ID()) + + // For each neighbor w of v not already in L, + // subtract one from d_w and move w to the + // cell of D corresponding to the new value of d_w. + for _, w := range neighbours[v.ID()] { + dw, ok := dv[w.ID()] + if !ok { + continue + } + for i, n := range d[dw] { + if n.ID() == w.ID() { + d[dw][i], d[dw] = d[dw][len(d[dw])-1], d[dw][:len(d[dw])-1] + dw-- + d[dw] = append(d[dw], w) + break + } + } + dv[w.ID()] = dw + } + } + + for i, j := 0, len(l)-1; i < j; i, j = i+1, j-1 { + l[i], l[j] = l[j], l[i] + } + cores = make([][]graph.Node, len(s)) + offset := len(l) + for i, n := range s { + cores[i] = l[offset-n : offset] + offset -= n + } + return l, cores +} + +// BronKerbosch returns the set of maximal cliques of the undirected graph g. +func BronKerbosch(g graph.Undirected) [][]graph.Node { + nodes := g.Nodes() + + // The algorithm used here is essentially BronKerbosch3 as described at + // http://en.wikipedia.org/w/index.php?title=Bron%E2%80%93Kerbosch_algorithm&oldid=656805858 + + p := make(set.Nodes, len(nodes)) + for _, n := range nodes { + p.Add(n) + } + x := make(set.Nodes) + var bk bronKerbosch + order, _ := VertexOrdering(g) + for _, v := range order { + neighbours := g.From(v) + nv := make(set.Nodes, len(neighbours)) + for _, n := range neighbours { + nv.Add(n) + } + bk.maximalCliquePivot(g, []graph.Node{v}, make(set.Nodes).Intersect(p, nv), make(set.Nodes).Intersect(x, nv)) + p.Remove(v) + x.Add(v) + } + return bk +} + +type bronKerbosch [][]graph.Node + +func (bk *bronKerbosch) maximalCliquePivot(g graph.Undirected, r []graph.Node, p, x set.Nodes) { + if len(p) == 0 && len(x) == 0 { + *bk = append(*bk, r) + return + } + + neighbours := bk.choosePivotFrom(g, p, x) + nu := make(set.Nodes, len(neighbours)) + for _, n := range neighbours { + nu.Add(n) + } + for _, v := range p { + if nu.Has(v) { + continue + } + neighbours := g.From(v) + nv := make(set.Nodes, len(neighbours)) + for _, n := range neighbours { + nv.Add(n) + } + + var found bool + for _, n := range r { + if n.ID() == v.ID() { + found = true + break + } + } + var sr []graph.Node + if !found { + sr = append(r[:len(r):len(r)], v) + } + + bk.maximalCliquePivot(g, sr, make(set.Nodes).Intersect(p, nv), make(set.Nodes).Intersect(x, nv)) + p.Remove(v) + x.Add(v) + } +} + +func (*bronKerbosch) choosePivotFrom(g graph.Undirected, p, x set.Nodes) (neighbors []graph.Node) { + // TODO(kortschak): Investigate the impact of pivot choice that maximises + // |p ⋂ neighbours(u)| as a function of input size. Until then, leave as + // compile time option. + if !tomitaTanakaTakahashi { + for _, n := range p { + return g.From(n) + } + for _, n := range x { + return g.From(n) + } + panic("bronKerbosch: empty set") + } + + var ( + max = -1 + pivot graph.Node + ) + maxNeighbors := func(s set.Nodes) { + outer: + for _, u := range s { + nb := g.From(u) + c := len(nb) + if c <= max { + continue + } + for n := range nb { + if _, ok := p[n]; ok { + continue + } + c-- + if c <= max { + continue outer + } + } + max = c + pivot = u + neighbors = nb + } + } + maxNeighbors(p) + maxNeighbors(x) + if pivot == nil { + panic("bronKerbosch: empty set") + } + return neighbors +} diff --git a/graph/topo/bron_kerbosch_test.go b/graph/topo/bron_kerbosch_test.go new file mode 100644 index 00000000..39fb98fe --- /dev/null +++ b/graph/topo/bron_kerbosch_test.go @@ -0,0 +1,164 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package topo + +import ( + "math" + "reflect" + "sort" + "testing" + + "github.com/gonum/graph/internal/ordered" + "github.com/gonum/graph/simple" +) + +var vOrderTests = []struct { + g []intset + wantCore [][]int + wantK int +}{ + { + g: []intset{ + 0: linksTo(1, 2, 4, 6), + 1: linksTo(2, 4, 6), + 2: linksTo(3, 6), + 3: linksTo(4, 5), + 4: linksTo(6), + 5: nil, + 6: nil, + }, + wantCore: [][]int{ + {}, + {5}, + {3}, + {0, 1, 2, 4, 6}, + }, + wantK: 3, + }, + { + g: batageljZaversnikGraph, + wantCore: [][]int{ + {0}, + {5, 9, 10, 16}, + {1, 2, 3, 4, 11, 12, 13, 15}, + {6, 7, 8, 14, 17, 18, 19, 20}, + }, + wantK: 3, + }, +} + +func TestVertexOrdering(t *testing.T) { + for i, test := range vOrderTests { + g := simple.NewUndirectedGraph(0, math.Inf(1)) + for u, e := range test.g { + // Add nodes that are not defined by an edge. + if !g.Has(simple.Node(u)) { + g.AddNode(simple.Node(u)) + } + for v := range e { + g.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v)}) + } + } + order, core := VertexOrdering(g) + if len(core)-1 != test.wantK { + t.Errorf("unexpected value of k for test %d: got: %d want: %d", i, len(core)-1, test.wantK) + } + var offset int + for k, want := range test.wantCore { + sort.Ints(want) + got := make([]int, len(want)) + for j, n := range order[len(order)-len(want)-offset : len(order)-offset] { + got[j] = n.ID() + } + sort.Ints(got) + if !reflect.DeepEqual(got, want) { + t.Errorf("unexpected %d-core for test %d:\ngot: %v\nwant:%v", got, test.wantCore) + } + + for j, n := range core[k] { + got[j] = n.ID() + } + sort.Ints(got) + if !reflect.DeepEqual(got, want) { + t.Errorf("unexpected %d-core for test %d:\ngot: %v\nwant:%v", got, test.wantCore) + } + offset += len(want) + } + } +} + +var bronKerboschTests = []struct { + g []intset + want [][]int +}{ + { + // This is the example given in the Bron-Kerbosch article on wikipedia (renumbered). + // http://en.wikipedia.org/w/index.php?title=Bron%E2%80%93Kerbosch_algorithm&oldid=656805858 + g: []intset{ + 0: linksTo(1, 4), + 1: linksTo(2, 4), + 2: linksTo(3), + 3: linksTo(4, 5), + 4: nil, + 5: nil, + }, + want: [][]int{ + {0, 1, 4}, + {1, 2}, + {2, 3}, + {3, 4}, + {3, 5}, + }, + }, + { + g: batageljZaversnikGraph, + want: [][]int{ + {0}, + {1, 2}, + {1, 3}, + {2, 4}, + {3, 4}, + {4, 5}, + {6, 7, 8, 14}, + {7, 11, 12}, + {9, 11}, + {10, 11}, + {12, 18}, + {13, 14, 15}, + {14, 15, 17}, + {15, 16}, + {17, 18, 19, 20}, + }, + }, +} + +func TestBronKerbosch(t *testing.T) { + for i, test := range bronKerboschTests { + g := simple.NewUndirectedGraph(0, math.Inf(1)) + for u, e := range test.g { + // Add nodes that are not defined by an edge. + if !g.Has(simple.Node(u)) { + g.AddNode(simple.Node(u)) + } + for v := range e { + g.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v)}) + } + } + cliques := BronKerbosch(g) + got := make([][]int, len(cliques)) + for j, c := range cliques { + ids := make([]int, len(c)) + for k, n := range c { + ids[k] = n.ID() + } + sort.Ints(ids) + got[j] = ids + } + sort.Sort(ordered.BySliceValues(got)) + if !reflect.DeepEqual(got, test.want) { + t.Errorf("unexpected cliques for test %d:\ngot: %v\nwant:%v", i, got, test.want) + } + } +} diff --git a/graph/topo/common_test.go b/graph/topo/common_test.go new file mode 100644 index 00000000..5d21786c --- /dev/null +++ b/graph/topo/common_test.go @@ -0,0 +1,47 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package topo + +// batageljZaversnikGraph is the example graph from +// figure 1 of http://arxiv.org/abs/cs/0310049v1 +var batageljZaversnikGraph = []intset{ + 0: nil, + + 1: linksTo(2, 3), + 2: linksTo(4), + 3: linksTo(4), + 4: linksTo(5), + 5: nil, + + 6: linksTo(7, 8, 14), + 7: linksTo(8, 11, 12, 14), + 8: linksTo(14), + 9: linksTo(11), + 10: linksTo(11), + 11: linksTo(12), + 12: linksTo(18), + 13: linksTo(14, 15), + 14: linksTo(15, 17), + 15: linksTo(16, 17), + 16: nil, + 17: linksTo(18, 19, 20), + 18: linksTo(19, 20), + 19: linksTo(20), + 20: nil, +} + +// intset is an integer set. +type intset map[int]struct{} + +func linksTo(i ...int) intset { + if len(i) == 0 { + return nil + } + s := make(intset) + for _, v := range i { + s[v] = struct{}{} + } + return s +} diff --git a/graph/topo/johnson_cycles.go b/graph/topo/johnson_cycles.go new file mode 100644 index 00000000..b62d110e --- /dev/null +++ b/graph/topo/johnson_cycles.go @@ -0,0 +1,280 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package topo + +import ( + "sort" + + "github.com/gonum/graph" + "github.com/gonum/graph/internal/ordered" + "github.com/gonum/graph/internal/set" +) + +// johnson implements Johnson's "Finding all the elementary +// circuits of a directed graph" algorithm. SIAM J. Comput. 4(1):1975. +// +// Comments in the johnson methods are kept in sync with the comments +// and labels from the paper. +type johnson struct { + adjacent johnsonGraph // SCC adjacency list. + b []set.Ints // Johnson's "B-list". + blocked []bool + s int + + stack []graph.Node + + result [][]graph.Node +} + +// CyclesIn returns the set of elementary cycles in the graph g. +func CyclesIn(g graph.Directed) [][]graph.Node { + jg := johnsonGraphFrom(g) + j := johnson{ + adjacent: jg, + b: make([]set.Ints, len(jg.orig)), + blocked: make([]bool, len(jg.orig)), + } + + // len(j.nodes) is the order of g. + for j.s < len(j.adjacent.orig)-1 { + // We use the previous SCC adjacency to reduce the work needed. + sccs := TarjanSCC(j.adjacent.subgraph(j.s)) + // A_k = adjacency structure of strong component K with least + // vertex in subgraph of G induced by {s, s+1, ... ,n}. + j.adjacent = j.adjacent.sccSubGraph(sccs, 2) // Only allow SCCs with >= 2 vertices. + if j.adjacent.order() == 0 { + break + } + + // s = least vertex in V_k + if s := j.adjacent.leastVertexIndex(); s < j.s { + j.s = s + } + for i, v := range j.adjacent.orig { + if !j.adjacent.nodes.Has(v.ID()) { + continue + } + if len(j.adjacent.succ[v.ID()]) > 0 { + j.blocked[i] = false + j.b[i] = make(set.Ints) + } + } + //L3: + _ = j.circuit(j.s) + j.s++ + } + + return j.result +} + +// circuit is the CIRCUIT sub-procedure in the paper. +func (j *johnson) circuit(v int) bool { + f := false + n := j.adjacent.orig[v] + j.stack = append(j.stack, n) + j.blocked[v] = true + + //L1: + for w := range j.adjacent.succ[n.ID()] { + w = j.adjacent.indexOf(w) + if w == j.s { + // Output circuit composed of stack followed by s. + r := make([]graph.Node, len(j.stack)+1) + copy(r, j.stack) + r[len(r)-1] = j.adjacent.orig[j.s] + j.result = append(j.result, r) + f = true + } else if !j.blocked[w] { + if j.circuit(w) { + f = true + } + } + } + + //L2: + if f { + j.unblock(v) + } else { + for w := range j.adjacent.succ[n.ID()] { + j.b[j.adjacent.indexOf(w)].Add(v) + } + } + j.stack = j.stack[:len(j.stack)-1] + + return f +} + +// unblock is the UNBLOCK sub-procedure in the paper. +func (j *johnson) unblock(u int) { + j.blocked[u] = false + for w := range j.b[u] { + j.b[u].Remove(w) + if j.blocked[w] { + j.unblock(w) + } + } +} + +// johnsonGraph is an edge list representation of a graph with helpers +// necessary for Johnson's algorithm +type johnsonGraph struct { + // Keep the original graph nodes and a + // look-up to into the non-sparse + // collection of potentially sparse IDs. + orig []graph.Node + index map[int]int + + nodes set.Ints + succ map[int]set.Ints +} + +// johnsonGraphFrom returns a deep copy of the graph g. +func johnsonGraphFrom(g graph.Directed) johnsonGraph { + nodes := g.Nodes() + sort.Sort(ordered.ByID(nodes)) + c := johnsonGraph{ + orig: nodes, + index: make(map[int]int, len(nodes)), + + nodes: make(set.Ints, len(nodes)), + succ: make(map[int]set.Ints), + } + for i, u := range nodes { + c.index[u.ID()] = i + for _, v := range g.From(u) { + if c.succ[u.ID()] == nil { + c.succ[u.ID()] = make(set.Ints) + c.nodes.Add(u.ID()) + } + c.nodes.Add(v.ID()) + c.succ[u.ID()].Add(v.ID()) + } + } + return c +} + +// order returns the order of the graph. +func (g johnsonGraph) order() int { return g.nodes.Count() } + +// indexOf returns the index of the retained node for the given node ID. +func (g johnsonGraph) indexOf(id int) int { + return g.index[id] +} + +// leastVertexIndex returns the index into orig of the least vertex. +func (g johnsonGraph) leastVertexIndex() int { + for _, v := range g.orig { + if g.nodes.Has(v.ID()) { + return g.indexOf(v.ID()) + } + } + panic("johnsonCycles: empty set") +} + +// subgraph returns a subgraph of g induced by {s, s+1, ... , n}. The +// subgraph is destructively generated in g. +func (g johnsonGraph) subgraph(s int) johnsonGraph { + sn := g.orig[s].ID() + for u, e := range g.succ { + if u < sn { + g.nodes.Remove(u) + delete(g.succ, u) + continue + } + for v := range e { + if v < sn { + g.succ[u].Remove(v) + } + } + } + return g +} + +// sccSubGraph returns the graph of the tarjan's strongly connected +// components with each SCC containing at least min vertices. +// sccSubGraph returns nil if there is no SCC with at least min +// members. +func (g johnsonGraph) sccSubGraph(sccs [][]graph.Node, min int) johnsonGraph { + if len(g.nodes) == 0 { + g.nodes = nil + g.succ = nil + return g + } + sub := johnsonGraph{ + orig: g.orig, + index: g.index, + nodes: make(set.Ints), + succ: make(map[int]set.Ints), + } + + var n int + for _, scc := range sccs { + if len(scc) < min { + continue + } + n++ + for _, u := range scc { + for _, v := range scc { + if _, ok := g.succ[u.ID()][v.ID()]; ok { + if sub.succ[u.ID()] == nil { + sub.succ[u.ID()] = make(set.Ints) + sub.nodes.Add(u.ID()) + } + sub.nodes.Add(v.ID()) + sub.succ[u.ID()].Add(v.ID()) + } + } + } + } + if n == 0 { + g.nodes = nil + g.succ = nil + return g + } + + return sub +} + +// Nodes is required to satisfy Tarjan. +func (g johnsonGraph) Nodes() []graph.Node { + n := make([]graph.Node, 0, len(g.nodes)) + for id := range g.nodes { + n = append(n, johnsonGraphNode(id)) + } + return n +} + +// Successors is required to satisfy Tarjan. +func (g johnsonGraph) From(n graph.Node) []graph.Node { + adj := g.succ[n.ID()] + if len(adj) == 0 { + return nil + } + succ := make([]graph.Node, 0, len(adj)) + for n := range adj { + succ = append(succ, johnsonGraphNode(n)) + } + return succ +} + +func (johnsonGraph) Has(graph.Node) bool { + panic("topo: unintended use of johnsonGraph") +} +func (johnsonGraph) HasEdgeBetween(_, _ graph.Node) bool { + panic("topo: unintended use of johnsonGraph") +} +func (johnsonGraph) Edge(_, _ graph.Node) graph.Edge { + panic("topo: unintended use of johnsonGraph") +} +func (johnsonGraph) HasEdgeFromTo(_, _ graph.Node) bool { + panic("topo: unintended use of johnsonGraph") +} +func (johnsonGraph) To(graph.Node) []graph.Node { + panic("topo: unintended use of johnsonGraph") +} + +type johnsonGraphNode int + +func (n johnsonGraphNode) ID() int { return int(n) } diff --git a/graph/topo/johnson_cycles_test.go b/graph/topo/johnson_cycles_test.go new file mode 100644 index 00000000..4c686b35 --- /dev/null +++ b/graph/topo/johnson_cycles_test.go @@ -0,0 +1,119 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package topo + +import ( + "math" + "reflect" + "sort" + "testing" + + "github.com/gonum/graph/internal/ordered" + "github.com/gonum/graph/simple" +) + +var cyclesInTests = []struct { + g []intset + sccs [][]int + want [][]int +}{ + { + g: []intset{ + 0: linksTo(1), + 1: linksTo(2, 7), + 2: linksTo(3, 6), + 3: linksTo(4), + 4: linksTo(2, 5), + 6: linksTo(3, 5), + 7: linksTo(0, 6), + }, + want: [][]int{ + {0, 1, 7, 0}, + {2, 3, 4, 2}, + {2, 6, 3, 4, 2}, + }, + }, + { + g: []intset{ + 0: linksTo(1, 2, 3), + 1: linksTo(2), + 2: linksTo(3), + 3: linksTo(1), + }, + want: [][]int{ + {1, 2, 3, 1}, + }, + }, + { + g: []intset{ + 0: linksTo(1), + 1: linksTo(0, 2), + 2: linksTo(1), + }, + want: [][]int{ + {0, 1, 0}, + {1, 2, 1}, + }, + }, + { + g: []intset{ + 0: linksTo(1), + 1: linksTo(2, 3), + 2: linksTo(4, 5), + 3: linksTo(4, 5), + 4: linksTo(6), + 5: nil, + 6: nil, + }, + want: nil, + }, + { + g: []intset{ + 0: linksTo(1), + 1: linksTo(2, 3, 4), + 2: linksTo(0, 3), + 3: linksTo(4), + 4: linksTo(3), + }, + want: [][]int{ + {0, 1, 2, 0}, + {3, 4, 3}, + }, + }, +} + +func TestCyclesIn(t *testing.T) { + for i, test := range cyclesInTests { + g := simple.NewDirectedGraph(0, math.Inf(1)) + g.AddNode(simple.Node(-10)) // Make sure we test graphs with sparse IDs. + for u, e := range test.g { + // Add nodes that are not defined by an edge. + if !g.Has(simple.Node(u)) { + g.AddNode(simple.Node(u)) + } + for v := range e { + g.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v)}) + } + } + cycles := CyclesIn(g) + var got [][]int + if cycles != nil { + got = make([][]int, len(cycles)) + } + // johnson.circuit does range iteration over maps, + // so sort to ensure consistent ordering. + for j, c := range cycles { + ids := make([]int, len(c)) + for k, n := range c { + ids[k] = n.ID() + } + got[j] = ids + } + sort.Sort(ordered.BySliceValues(got)) + if !reflect.DeepEqual(got, test.want) { + t.Errorf("unexpected johnson result for %d:\n\tgot:%#v\n\twant:%#v", i, got, test.want) + } + } +} diff --git a/graph/topo/non_tomita_choice.go b/graph/topo/non_tomita_choice.go new file mode 100644 index 00000000..de09ebd8 --- /dev/null +++ b/graph/topo/non_tomita_choice.go @@ -0,0 +1,9 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//+build !tomita + +package topo + +const tomitaTanakaTakahashi = false diff --git a/graph/topo/tarjan.go b/graph/topo/tarjan.go new file mode 100644 index 00000000..0c7d9681 --- /dev/null +++ b/graph/topo/tarjan.go @@ -0,0 +1,204 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package topo + +import ( + "fmt" + "sort" + + "golang.org/x/tools/container/intsets" + + "github.com/gonum/graph" + "github.com/gonum/graph/internal/ordered" +) + +// Unorderable is an error containing sets of unorderable graph.Nodes. +type Unorderable [][]graph.Node + +// Error satisfies the error interface. +func (e Unorderable) Error() string { + const maxNodes = 10 + var n int + for _, c := range e { + n += len(c) + } + if n > maxNodes { + // Don't return errors that are too long. + return fmt.Sprintf("topo: no topological ordering: %d nodes in %d cyclic components", n, len(e)) + } + return fmt.Sprintf("topo: no topological ordering: cyclic components: %v", [][]graph.Node(e)) +} + +func lexical(nodes []graph.Node) { sort.Sort(ordered.ByID(nodes)) } + +// Sort performs a topological sort of the directed graph g returning the 'from' to 'to' +// sort order. If a topological ordering is not possible, an Unorderable error is returned +// listing cyclic components in g with each cyclic component's members sorted by ID. When +// an Unorderable error is returned, each cyclic component's topological position within +// the sorted nodes is marked with a nil graph.Node. +func Sort(g graph.Directed) (sorted []graph.Node, err error) { + sccs := TarjanSCC(g) + return sortedFrom(sccs, lexical) +} + +// SortStabilized performs a topological sort of the directed graph g returning the 'from' +// to 'to' sort order, or the order defined by the in place order sort function where there +// is no unambiguous topological ordering. If a topological ordering is not possible, an +// Unorderable error is returned listing cyclic components in g with each cyclic component's +// members sorted by the provided order function. If order is nil, nodes are ordered lexically +// by node ID. When an Unorderable error is returned, each cyclic component's topological +// position within the sorted nodes is marked with a nil graph.Node. +func SortStabilized(g graph.Directed, order func([]graph.Node)) (sorted []graph.Node, err error) { + if order == nil { + order = lexical + } + sccs := tarjanSCCstabilized(g, order) + return sortedFrom(sccs, order) +} + +func sortedFrom(sccs [][]graph.Node, order func([]graph.Node)) ([]graph.Node, error) { + sorted := make([]graph.Node, 0, len(sccs)) + var sc Unorderable + for _, s := range sccs { + if len(s) != 1 { + order(s) + sc = append(sc, s) + sorted = append(sorted, nil) + continue + } + sorted = append(sorted, s[0]) + } + var err error + if sc != nil { + for i, j := 0, len(sc)-1; i < j; i, j = i+1, j-1 { + sc[i], sc[j] = sc[j], sc[i] + } + err = sc + } + reverse(sorted) + return sorted, err +} + +func reverse(p []graph.Node) { + for i, j := 0, len(p)-1; i < j; i, j = i+1, j-1 { + p[i], p[j] = p[j], p[i] + } +} + +// TarjanSCC returns the strongly connected components of the graph g using Tarjan's algorithm. +// +// A strongly connected component of a graph is a set of vertices where it's possible to reach any +// vertex in the set from any other (meaning there's a cycle between them.) +// +// Generally speaking, a directed graph where the number of strongly connected components is equal +// to the number of nodes is acyclic, unless you count reflexive edges as a cycle (which requires +// only a little extra testing.) +// +func TarjanSCC(g graph.Directed) [][]graph.Node { + return tarjanSCCstabilized(g, nil) +} + +func tarjanSCCstabilized(g graph.Directed, order func([]graph.Node)) [][]graph.Node { + nodes := g.Nodes() + var succ func(graph.Node) []graph.Node + if order == nil { + succ = g.From + } else { + order(nodes) + reverse(nodes) + + succ = func(n graph.Node) []graph.Node { + to := g.From(n) + order(to) + reverse(to) + return to + } + } + + t := tarjan{ + succ: succ, + + indexTable: make(map[int]int, len(nodes)), + lowLink: make(map[int]int, len(nodes)), + onStack: &intsets.Sparse{}, + } + for _, v := range nodes { + if t.indexTable[v.ID()] == 0 { + t.strongconnect(v) + } + } + return t.sccs +} + +// tarjan implements Tarjan's strongly connected component finding +// algorithm. The implementation is from the pseudocode at +// +// http://en.wikipedia.org/wiki/Tarjan%27s_strongly_connected_components_algorithm?oldid=642744644 +// +type tarjan struct { + succ func(graph.Node) []graph.Node + + index int + indexTable map[int]int + lowLink map[int]int + onStack *intsets.Sparse + + stack []graph.Node + + sccs [][]graph.Node +} + +// strongconnect is the strongconnect function described in the +// wikipedia article. +func (t *tarjan) strongconnect(v graph.Node) { + vID := v.ID() + + // Set the depth index for v to the smallest unused index. + t.index++ + t.indexTable[vID] = t.index + t.lowLink[vID] = t.index + t.stack = append(t.stack, v) + t.onStack.Insert(vID) + + // Consider successors of v. + for _, w := range t.succ(v) { + wID := w.ID() + if t.indexTable[wID] == 0 { + // Successor w has not yet been visited; recur on it. + t.strongconnect(w) + t.lowLink[vID] = min(t.lowLink[vID], t.lowLink[wID]) + } else if t.onStack.Has(wID) { + // Successor w is in stack s and hence in the current SCC. + t.lowLink[vID] = min(t.lowLink[vID], t.indexTable[wID]) + } + } + + // If v is a root node, pop the stack and generate an SCC. + if t.lowLink[vID] == t.indexTable[vID] { + // Start a new strongly connected component. + var ( + scc []graph.Node + w graph.Node + ) + for { + w, t.stack = t.stack[len(t.stack)-1], t.stack[:len(t.stack)-1] + t.onStack.Remove(w.ID()) + // Add w to current strongly connected component. + scc = append(scc, w) + if w.ID() == vID { + break + } + } + // Output the current strongly connected component. + t.sccs = append(t.sccs, scc) + } +} + +func min(a, b int) int { + if a < b { + return a + } + return b +} diff --git a/graph/topo/tarjan_test.go b/graph/topo/tarjan_test.go new file mode 100644 index 00000000..3c854a43 --- /dev/null +++ b/graph/topo/tarjan_test.go @@ -0,0 +1,309 @@ +// Copyright ©2014 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package topo + +import ( + "math" + "reflect" + "sort" + "testing" + + "github.com/gonum/graph" + "github.com/gonum/graph/internal/ordered" + "github.com/gonum/graph/simple" +) + +type interval struct{ start, end int } + +var tarjanTests = []struct { + g []intset + + ambiguousOrder []interval + want [][]int + + sortedLength int + unorderableLength int + sortable bool +}{ + { + g: []intset{ + 0: linksTo(1), + 1: linksTo(2, 7), + 2: linksTo(3, 6), + 3: linksTo(4), + 4: linksTo(2, 5), + 6: linksTo(3, 5), + 7: linksTo(0, 6), + }, + + want: [][]int{ + {5}, + {2, 3, 4, 6}, + {0, 1, 7}, + }, + + sortedLength: 1, + unorderableLength: 2, + sortable: false, + }, + { + g: []intset{ + 0: linksTo(1, 2, 3), + 1: linksTo(2), + 2: linksTo(3), + 3: linksTo(1), + }, + + want: [][]int{ + {1, 2, 3}, + {0}, + }, + + sortedLength: 1, + unorderableLength: 1, + sortable: false, + }, + { + g: []intset{ + 0: linksTo(1), + 1: linksTo(0, 2), + 2: linksTo(1), + }, + + want: [][]int{ + {0, 1, 2}, + }, + + sortedLength: 0, + unorderableLength: 1, + sortable: false, + }, + { + g: []intset{ + 0: linksTo(1), + 1: linksTo(2, 3), + 2: linksTo(4, 5), + 3: linksTo(4, 5), + 4: linksTo(6), + 5: nil, + 6: nil, + }, + + // Node pairs (2, 3) and (4, 5) are not + // relatively orderable within each pair. + ambiguousOrder: []interval{ + {0, 3}, // This includes node 6 since it only needs to be before 4 in topo sort. + {3, 5}, + }, + want: [][]int{ + {6}, {5}, {4}, {3}, {2}, {1}, {0}, + }, + + sortedLength: 7, + sortable: true, + }, + { + g: []intset{ + 0: linksTo(1), + 1: linksTo(2, 3, 4), + 2: linksTo(0, 3), + 3: linksTo(4), + 4: linksTo(3), + }, + + // SCCs are not relatively ordable. + ambiguousOrder: []interval{ + {0, 2}, + }, + want: [][]int{ + {0, 1, 2}, + {3, 4}, + }, + + sortedLength: 0, + unorderableLength: 2, + sortable: false, + }, +} + +func TestSort(t *testing.T) { + for i, test := range tarjanTests { + g := simple.NewDirectedGraph(0, math.Inf(1)) + for u, e := range test.g { + // Add nodes that are not defined by an edge. + if !g.Has(simple.Node(u)) { + g.AddNode(simple.Node(u)) + } + for v := range e { + g.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v)}) + } + } + sorted, err := Sort(g) + var gotSortedLen int + for _, n := range sorted { + if n != nil { + gotSortedLen++ + } + } + if gotSortedLen != test.sortedLength { + t.Errorf("unexpected number of sortable nodes for test %d: got:%d want:%d", i, gotSortedLen, test.sortedLength) + } + if err == nil != test.sortable { + t.Errorf("unexpected sortability for test %d: got error: %v want: nil-error=%t", i, err, test.sortable) + } + if err != nil && len(err.(Unorderable)) != test.unorderableLength { + t.Errorf("unexpected number of unorderable nodes for test %d: got:%d want:%d", i, len(err.(Unorderable)), test.unorderableLength) + } + } +} + +func TestTarjanSCC(t *testing.T) { + for i, test := range tarjanTests { + g := simple.NewDirectedGraph(0, math.Inf(1)) + for u, e := range test.g { + // Add nodes that are not defined by an edge. + if !g.Has(simple.Node(u)) { + g.AddNode(simple.Node(u)) + } + for v := range e { + g.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v)}) + } + } + gotSCCs := TarjanSCC(g) + // tarjan.strongconnect does range iteration over maps, + // so sort SCC members to ensure consistent ordering. + gotIDs := make([][]int, len(gotSCCs)) + for i, scc := range gotSCCs { + gotIDs[i] = make([]int, len(scc)) + for j, id := range scc { + gotIDs[i][j] = id.ID() + } + sort.Ints(gotIDs[i]) + } + for _, iv := range test.ambiguousOrder { + sort.Sort(ordered.BySliceValues(test.want[iv.start:iv.end])) + sort.Sort(ordered.BySliceValues(gotIDs[iv.start:iv.end])) + } + if !reflect.DeepEqual(gotIDs, test.want) { + t.Errorf("unexpected Tarjan scc result for %d:\n\tgot:%v\n\twant:%v", i, gotIDs, test.want) + } + } +} + +var stabilizedSortTests = []struct { + g []intset + + want []graph.Node + err error +}{ + { + g: []intset{ + 0: linksTo(1), + 1: linksTo(2, 7), + 2: linksTo(3, 6), + 3: linksTo(4), + 4: linksTo(2, 5), + 6: linksTo(3, 5), + 7: linksTo(0, 6), + }, + + want: []graph.Node{nil, nil, simple.Node(5)}, + err: Unorderable{ + {simple.Node(0), simple.Node(1), simple.Node(7)}, + {simple.Node(2), simple.Node(3), simple.Node(4), simple.Node(6)}, + }, + }, + { + g: []intset{ + 0: linksTo(1, 2, 3), + 1: linksTo(2), + 2: linksTo(3), + 3: linksTo(1), + }, + + want: []graph.Node{simple.Node(0), nil}, + err: Unorderable{ + {simple.Node(1), simple.Node(2), simple.Node(3)}, + }, + }, + { + g: []intset{ + 0: linksTo(1), + 1: linksTo(0, 2), + 2: linksTo(1), + }, + + want: []graph.Node{nil}, + err: Unorderable{ + {simple.Node(0), simple.Node(1), simple.Node(2)}, + }, + }, + { + g: []intset{ + 0: linksTo(1), + 1: linksTo(2, 3), + 2: linksTo(4, 5), + 3: linksTo(4, 5), + 4: linksTo(6), + 5: nil, + 6: nil, + }, + + want: []graph.Node{simple.Node(0), simple.Node(1), simple.Node(2), simple.Node(3), simple.Node(4), simple.Node(5), simple.Node(6)}, + err: nil, + }, + { + g: []intset{ + 0: linksTo(1), + 1: linksTo(2, 3, 4), + 2: linksTo(0, 3), + 3: linksTo(4), + 4: linksTo(3), + }, + + want: []graph.Node{nil, nil}, + err: Unorderable{ + {simple.Node(0), simple.Node(1), simple.Node(2)}, + {simple.Node(3), simple.Node(4)}, + }, + }, + { + g: []intset{ + 0: linksTo(1, 2, 3, 4, 5, 6), + 1: linksTo(7), + 2: linksTo(7), + 3: linksTo(7), + 4: linksTo(7), + 5: linksTo(7), + 6: linksTo(7), + 7: nil, + }, + + want: []graph.Node{simple.Node(0), simple.Node(1), simple.Node(2), simple.Node(3), simple.Node(4), simple.Node(5), simple.Node(6), simple.Node(7)}, + err: nil, + }, +} + +func TestSortStabilized(t *testing.T) { + for i, test := range stabilizedSortTests { + g := simple.NewDirectedGraph(0, math.Inf(1)) + for u, e := range test.g { + // Add nodes that are not defined by an edge. + if !g.Has(simple.Node(u)) { + g.AddNode(simple.Node(u)) + } + for v := range e { + g.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v)}) + } + } + got, err := SortStabilized(g, nil) + if !reflect.DeepEqual(got, test.want) { + t.Errorf("unexpected sort result for test %d: got:%d want:%d", i, got, test.want) + } + if !reflect.DeepEqual(err, test.err) { + t.Errorf("unexpected sort error for test %d: got:%v want:%v", i, err, test.want) + } + } +} diff --git a/graph/topo/tomita_choice.go b/graph/topo/tomita_choice.go new file mode 100644 index 00000000..d4eca625 --- /dev/null +++ b/graph/topo/tomita_choice.go @@ -0,0 +1,9 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//+build tomita + +package topo + +const tomitaTanakaTakahashi = true diff --git a/graph/topo/topo.go b/graph/topo/topo.go new file mode 100644 index 00000000..23568bb7 --- /dev/null +++ b/graph/topo/topo.go @@ -0,0 +1,69 @@ +// Copyright ©2014 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package topo provides graph topology analysis functions. +package topo + +import ( + "github.com/gonum/graph" + "github.com/gonum/graph/traverse" +) + +// IsPathIn returns whether path is a path in g. +// +// As special cases, IsPathIn returns true for a zero length path or for +// a path of length 1 when the node in path exists in the graph. +func IsPathIn(g graph.Graph, path []graph.Node) bool { + switch len(path) { + case 0: + return true + case 1: + return g.Has(path[0]) + default: + var canReach func(u, v graph.Node) bool + switch g := g.(type) { + case graph.Directed: + canReach = g.HasEdgeFromTo + default: + canReach = g.HasEdgeBetween + } + + for i, u := range path[:len(path)-1] { + if !canReach(u, path[i+1]) { + return false + } + } + return true + } +} + +// PathExistsIn returns whether there is a path in g starting at from extending +// to to. +// +// PathExistsIn exists as a helper function. If many tests for path existence +// are being performed, other approaches will be more efficient. +func PathExistsIn(g graph.Graph, from, to graph.Node) bool { + var t traverse.BreadthFirst + return t.Walk(g, from, func(n graph.Node, _ int) bool { return n.ID() == to.ID() }) != nil +} + +// ConnectedComponents returns the connected components of the undirected graph g. +func ConnectedComponents(g graph.Undirected) [][]graph.Node { + var ( + w traverse.DepthFirst + c []graph.Node + cc [][]graph.Node + ) + during := func(n graph.Node) { + c = append(c, n) + } + after := func() { + cc = append(cc, []graph.Node(nil)) + cc[len(cc)-1] = append(cc[len(cc)-1], c...) + c = c[:0] + } + w.WalkAll(g, nil, after, during) + + return cc +} diff --git a/graph/topo/topo_test.go b/graph/topo/topo_test.go new file mode 100644 index 00000000..1cd086a4 --- /dev/null +++ b/graph/topo/topo_test.go @@ -0,0 +1,176 @@ +// Copyright ©2014 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package topo + +import ( + "math" + "reflect" + "sort" + "testing" + + "github.com/gonum/graph" + "github.com/gonum/graph/internal/ordered" + "github.com/gonum/graph/simple" +) + +func TestIsPath(t *testing.T) { + dg := simple.NewDirectedGraph(0, math.Inf(1)) + if !IsPathIn(dg, nil) { + t.Error("IsPath returns false on nil path") + } + p := []graph.Node{simple.Node(0)} + if IsPathIn(dg, p) { + t.Error("IsPath returns true on nonexistant node") + } + dg.AddNode(p[0]) + if !IsPathIn(dg, p) { + t.Error("IsPath returns false on single-length path with existing node") + } + p = append(p, simple.Node(1)) + dg.AddNode(p[1]) + if IsPathIn(dg, p) { + t.Error("IsPath returns true on bad path of length 2") + } + dg.SetEdge(simple.Edge{F: p[0], T: p[1], W: 1}) + if !IsPathIn(dg, p) { + t.Error("IsPath returns false on correct path of length 2") + } + p[0], p[1] = p[1], p[0] + if IsPathIn(dg, p) { + t.Error("IsPath erroneously returns true for a reverse path") + } + p = []graph.Node{p[1], p[0], simple.Node(2)} + dg.SetEdge(simple.Edge{F: p[1], T: p[2], W: 1}) + if !IsPathIn(dg, p) { + t.Error("IsPath does not find a correct path for path > 2 nodes") + } + ug := simple.NewUndirectedGraph(0, math.Inf(1)) + ug.SetEdge(simple.Edge{F: p[1], T: p[0], W: 1}) + ug.SetEdge(simple.Edge{F: p[1], T: p[2], W: 1}) + if !IsPathIn(dg, p) { + t.Error("IsPath does not correctly account for undirected behavior") + } +} + +var pathExistsInUndirectedTests = []struct { + g []intset + from, to int + want bool +}{ + {g: batageljZaversnikGraph, from: 0, to: 0, want: true}, + {g: batageljZaversnikGraph, from: 0, to: 1, want: false}, + {g: batageljZaversnikGraph, from: 1, to: 2, want: true}, + {g: batageljZaversnikGraph, from: 2, to: 1, want: true}, + {g: batageljZaversnikGraph, from: 2, to: 12, want: false}, + {g: batageljZaversnikGraph, from: 20, to: 6, want: true}, +} + +func TestPathExistsInUndirected(t *testing.T) { + for i, test := range pathExistsInUndirectedTests { + g := simple.NewUndirectedGraph(0, math.Inf(1)) + + for u, e := range test.g { + if !g.Has(simple.Node(u)) { + g.AddNode(simple.Node(u)) + } + for v := range e { + if !g.Has(simple.Node(v)) { + g.AddNode(simple.Node(v)) + } + g.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v)}) + } + } + + got := PathExistsIn(g, simple.Node(test.from), simple.Node(test.to)) + if got != test.want { + t.Errorf("unexpected result for path existance in test %d: got:%t want %t", i, got, test.want) + } + } +} + +var pathExistsInDirectedTests = []struct { + g []intset + from, to int + want bool +}{ + // The graph definition is such that from node IDs are + // less than to node IDs. + {g: batageljZaversnikGraph, from: 0, to: 0, want: true}, + {g: batageljZaversnikGraph, from: 0, to: 1, want: false}, + {g: batageljZaversnikGraph, from: 1, to: 2, want: true}, + {g: batageljZaversnikGraph, from: 2, to: 1, want: false}, + {g: batageljZaversnikGraph, from: 2, to: 12, want: false}, + {g: batageljZaversnikGraph, from: 20, to: 6, want: false}, + {g: batageljZaversnikGraph, from: 6, to: 20, want: true}, +} + +func TestPathExistsInDirected(t *testing.T) { + for i, test := range pathExistsInDirectedTests { + g := simple.NewDirectedGraph(0, math.Inf(1)) + + for u, e := range test.g { + if !g.Has(simple.Node(u)) { + g.AddNode(simple.Node(u)) + } + for v := range e { + if !g.Has(simple.Node(v)) { + g.AddNode(simple.Node(v)) + } + g.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v)}) + } + } + + got := PathExistsIn(g, simple.Node(test.from), simple.Node(test.to)) + if got != test.want { + t.Errorf("unexpected result for path existance in test %d: got:%t want %t", i, got, test.want) + } + } +} + +var connectedComponentTests = []struct { + g []intset + want [][]int +}{ + { + g: batageljZaversnikGraph, + want: [][]int{ + {0}, + {1, 2, 3, 4, 5}, + {6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20}, + }, + }, +} + +func TestConnectedComponents(t *testing.T) { + for i, test := range connectedComponentTests { + g := simple.NewUndirectedGraph(0, math.Inf(1)) + + for u, e := range test.g { + if !g.Has(simple.Node(u)) { + g.AddNode(simple.Node(u)) + } + for v := range e { + if !g.Has(simple.Node(v)) { + g.AddNode(simple.Node(v)) + } + g.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v)}) + } + } + cc := ConnectedComponents(g) + got := make([][]int, len(cc)) + for j, c := range cc { + ids := make([]int, len(c)) + for k, n := range c { + ids[k] = n.ID() + } + sort.Ints(ids) + got[j] = ids + } + sort.Sort(ordered.BySliceValues(got)) + if !reflect.DeepEqual(got, test.want) { + t.Errorf("unexpected connected components for test %d %T:\ngot: %v\nwant:%v", i, g, got, test.want) + } + } +} diff --git a/graph/traverse/traverse.go b/graph/traverse/traverse.go new file mode 100644 index 00000000..6a351b07 --- /dev/null +++ b/graph/traverse/traverse.go @@ -0,0 +1,186 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package traverse provides basic graph traversal primitives. +package traverse + +import ( + "golang.org/x/tools/container/intsets" + + "github.com/gonum/graph" + "github.com/gonum/graph/internal/linear" +) + +// BreadthFirst implements stateful breadth-first graph traversal. +type BreadthFirst struct { + EdgeFilter func(graph.Edge) bool + Visit func(u, v graph.Node) + queue linear.NodeQueue + visited *intsets.Sparse +} + +// Walk performs a breadth-first traversal of the graph g starting from the given node, +// depending on the the EdgeFilter field and the until parameter if they are non-nil. The +// traversal follows edges for which EdgeFilter(edge) is true and returns the first node +// for which until(node, depth) is true. During the traversal, if the Visit field is +// non-nil, it is called with the nodes joined by each followed edge. +func (b *BreadthFirst) Walk(g graph.Graph, from graph.Node, until func(n graph.Node, d int) bool) graph.Node { + if b.visited == nil { + b.visited = &intsets.Sparse{} + } + b.queue.Enqueue(from) + b.visited.Insert(from.ID()) + + var ( + depth int + children int + untilNext = 1 + ) + for b.queue.Len() > 0 { + t := b.queue.Dequeue() + if until != nil && until(t, depth) { + return t + } + for _, n := range g.From(t) { + if b.EdgeFilter != nil && !b.EdgeFilter(g.Edge(t, n)) { + continue + } + if b.visited.Has(n.ID()) { + continue + } + if b.Visit != nil { + b.Visit(t, n) + } + b.visited.Insert(n.ID()) + children++ + b.queue.Enqueue(n) + } + if untilNext--; untilNext == 0 { + depth++ + untilNext = children + children = 0 + } + } + + return nil +} + +// WalkAll calls Walk for each unvisited node of the graph g using edges independent +// of their direction. The functions before and after are called prior to commencing +// and after completing each walk if they are non-nil respectively. The function +// during is called on each node as it is traversed. +func (b *BreadthFirst) WalkAll(g graph.Undirected, before, after func(), during func(graph.Node)) { + b.Reset() + for _, from := range g.Nodes() { + if b.Visited(from) { + continue + } + if before != nil { + before() + } + b.Walk(g, from, func(n graph.Node, _ int) bool { + if during != nil { + during(n) + } + return false + }) + if after != nil { + after() + } + } +} + +// Visited returned whether the node n was visited during a traverse. +func (b *BreadthFirst) Visited(n graph.Node) bool { + return b.visited != nil && b.visited.Has(n.ID()) +} + +// Reset resets the state of the traverser for reuse. +func (b *BreadthFirst) Reset() { + b.queue.Reset() + if b.visited != nil { + b.visited.Clear() + } +} + +// DepthFirst implements stateful depth-first graph traversal. +type DepthFirst struct { + EdgeFilter func(graph.Edge) bool + Visit func(u, v graph.Node) + stack linear.NodeStack + visited *intsets.Sparse +} + +// Walk performs a depth-first traversal of the graph g starting from the given node, +// depending on the the EdgeFilter field and the until parameter if they are non-nil. The +// traversal follows edges for which EdgeFilter(edge) is true and returns the first node +// for which until(node) is true. During the traversal, if the Visit field is non-nil, it +// is called with the nodes joined by each followed edge. +func (d *DepthFirst) Walk(g graph.Graph, from graph.Node, until func(graph.Node) bool) graph.Node { + if d.visited == nil { + d.visited = &intsets.Sparse{} + } + d.stack.Push(from) + d.visited.Insert(from.ID()) + + for d.stack.Len() > 0 { + t := d.stack.Pop() + if until != nil && until(t) { + return t + } + for _, n := range g.From(t) { + if d.EdgeFilter != nil && !d.EdgeFilter(g.Edge(t, n)) { + continue + } + if d.visited.Has(n.ID()) { + continue + } + if d.Visit != nil { + d.Visit(t, n) + } + d.visited.Insert(n.ID()) + d.stack.Push(n) + } + } + + return nil +} + +// WalkAll calls Walk for each unvisited node of the graph g using edges independent +// of their direction. The functions before and after are called prior to commencing +// and after completing each walk if they are non-nil respectively. The function +// during is called on each node as it is traversed. +func (d *DepthFirst) WalkAll(g graph.Undirected, before, after func(), during func(graph.Node)) { + d.Reset() + for _, from := range g.Nodes() { + if d.Visited(from) { + continue + } + if before != nil { + before() + } + d.Walk(g, from, func(n graph.Node) bool { + if during != nil { + during(n) + } + return false + }) + if after != nil { + after() + } + } +} + +// Visited returned whether the node n was visited during a traverse. +func (d *DepthFirst) Visited(n graph.Node) bool { + return d.visited != nil && d.visited.Has(n.ID()) +} + +// Reset resets the state of the traverser for reuse. +func (d *DepthFirst) Reset() { + d.stack = d.stack[:0] + if d.visited != nil { + d.visited.Clear() + } +} diff --git a/graph/traverse/traverse_test.go b/graph/traverse/traverse_test.go new file mode 100644 index 00000000..8c33d10e --- /dev/null +++ b/graph/traverse/traverse_test.go @@ -0,0 +1,434 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package traverse + +import ( + "fmt" + "math" + "reflect" + "sort" + "testing" + + "github.com/gonum/graph" + "github.com/gonum/graph/graphs/gen" + "github.com/gonum/graph/internal/ordered" + "github.com/gonum/graph/simple" +) + +var ( + // batageljZaversnikGraph is the example graph from + // figure 1 of http://arxiv.org/abs/cs/0310049v1 + batageljZaversnikGraph = []set{ + 0: nil, + + 1: linksTo(2, 3), + 2: linksTo(4), + 3: linksTo(4), + 4: linksTo(5), + 5: nil, + + 6: linksTo(7, 8, 14), + 7: linksTo(8, 11, 12, 14), + 8: linksTo(14), + 9: linksTo(11), + 10: linksTo(11), + 11: linksTo(12), + 12: linksTo(18), + 13: linksTo(14, 15), + 14: linksTo(15, 17), + 15: linksTo(16, 17), + 16: nil, + 17: linksTo(18, 19, 20), + 18: linksTo(19, 20), + 19: linksTo(20), + 20: nil, + } + + // wpBronKerboschGraph is the example given in the Bron-Kerbosch article on wikipedia (renumbered). + // http://en.wikipedia.org/w/index.php?title=Bron%E2%80%93Kerbosch_algorithm&oldid=656805858 + wpBronKerboschGraph = []set{ + 0: linksTo(1, 4), + 1: linksTo(2, 4), + 2: linksTo(3), + 3: linksTo(4, 5), + 4: nil, + 5: nil, + } +) + +var breadthFirstTests = []struct { + g []set + from graph.Node + edge func(graph.Edge) bool + until func(graph.Node, int) bool + final map[graph.Node]bool + want [][]int +}{ + { + g: wpBronKerboschGraph, + from: simple.Node(1), + final: map[graph.Node]bool{nil: true}, + want: [][]int{ + {1}, + {0, 2, 4}, + {3}, + {5}, + }, + }, + { + g: wpBronKerboschGraph, + edge: func(e graph.Edge) bool { + // Do not traverse an edge between 3 and 5. + return (e.From().ID() != 3 || e.To().ID() != 5) && (e.From().ID() != 5 || e.To().ID() != 3) + }, + from: simple.Node(1), + final: map[graph.Node]bool{nil: true}, + want: [][]int{ + {1}, + {0, 2, 4}, + {3}, + }, + }, + { + g: wpBronKerboschGraph, + from: simple.Node(1), + until: func(n graph.Node, _ int) bool { return n == simple.Node(3) }, + final: map[graph.Node]bool{simple.Node(3): true}, + want: [][]int{ + {1}, + {0, 2, 4}, + }, + }, + { + g: batageljZaversnikGraph, + from: simple.Node(13), + final: map[graph.Node]bool{nil: true}, + want: [][]int{ + {13}, + {14, 15}, + {6, 7, 8, 16, 17}, + {11, 12, 18, 19, 20}, + {9, 10}, + }, + }, + { + g: batageljZaversnikGraph, + from: simple.Node(13), + until: func(_ graph.Node, d int) bool { return d > 2 }, + final: map[graph.Node]bool{ + simple.Node(11): true, + simple.Node(12): true, + simple.Node(18): true, + simple.Node(19): true, + simple.Node(20): true, + }, + want: [][]int{ + {13}, + {14, 15}, + {6, 7, 8, 16, 17}, + }, + }, +} + +func TestBreadthFirst(t *testing.T) { + for i, test := range breadthFirstTests { + g := simple.NewUndirectedGraph(0, math.Inf(1)) + for u, e := range test.g { + // Add nodes that are not defined by an edge. + if !g.Has(simple.Node(u)) { + g.AddNode(simple.Node(u)) + } + for v := range e { + g.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v)}) + } + } + w := BreadthFirst{ + EdgeFilter: test.edge, + } + var got [][]int + final := w.Walk(g, test.from, func(n graph.Node, d int) bool { + if test.until != nil && test.until(n, d) { + return true + } + if d >= len(got) { + got = append(got, []int(nil)) + } + got[d] = append(got[d], n.ID()) + return false + }) + if !test.final[final] { + t.Errorf("unexepected final node for test %d:\ngot: %v\nwant: %v", i, final, test.final) + } + for _, l := range got { + sort.Ints(l) + } + if !reflect.DeepEqual(got, test.want) { + t.Errorf("unexepected BFS level structure for test %d:\ngot: %v\nwant: %v", i, got, test.want) + } + } +} + +var depthFirstTests = []struct { + g []set + from graph.Node + edge func(graph.Edge) bool + until func(graph.Node) bool + final map[graph.Node]bool + want []int +}{ + { + g: wpBronKerboschGraph, + from: simple.Node(1), + final: map[graph.Node]bool{nil: true}, + want: []int{0, 1, 2, 3, 4, 5}, + }, + { + g: wpBronKerboschGraph, + edge: func(e graph.Edge) bool { + // Do not traverse an edge between 3 and 5. + return (e.From().ID() != 3 || e.To().ID() != 5) && (e.From().ID() != 5 || e.To().ID() != 3) + }, + from: simple.Node(1), + final: map[graph.Node]bool{nil: true}, + want: []int{0, 1, 2, 3, 4}, + }, + { + g: wpBronKerboschGraph, + from: simple.Node(1), + until: func(n graph.Node) bool { return n == simple.Node(3) }, + final: map[graph.Node]bool{simple.Node(3): true}, + }, + { + g: batageljZaversnikGraph, + from: simple.Node(0), + final: map[graph.Node]bool{nil: true}, + want: []int{0}, + }, + { + g: batageljZaversnikGraph, + from: simple.Node(3), + final: map[graph.Node]bool{nil: true}, + want: []int{1, 2, 3, 4, 5}, + }, + { + g: batageljZaversnikGraph, + from: simple.Node(13), + final: map[graph.Node]bool{nil: true}, + want: []int{6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20}, + }, +} + +func TestDepthFirst(t *testing.T) { + for i, test := range depthFirstTests { + g := simple.NewUndirectedGraph(0, math.Inf(1)) + for u, e := range test.g { + // Add nodes that are not defined by an edge. + if !g.Has(simple.Node(u)) { + g.AddNode(simple.Node(u)) + } + for v := range e { + g.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v)}) + } + } + w := DepthFirst{ + EdgeFilter: test.edge, + } + var got []int + final := w.Walk(g, test.from, func(n graph.Node) bool { + if test.until != nil && test.until(n) { + return true + } + got = append(got, n.ID()) + return false + }) + if !test.final[final] { + t.Errorf("unexepected final node for test %d:\ngot: %v\nwant: %v", i, final, test.final) + } + sort.Ints(got) + if test.want != nil && !reflect.DeepEqual(got, test.want) { + t.Errorf("unexepected DFS traversed nodes for test %d:\ngot: %v\nwant: %v", i, got, test.want) + } + } +} + +var walkAllTests = []struct { + g []set + edge func(graph.Edge) bool + want [][]int +}{ + { + g: batageljZaversnikGraph, + want: [][]int{ + {0}, + {1, 2, 3, 4, 5}, + {6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20}, + }, + }, + { + g: batageljZaversnikGraph, + edge: func(e graph.Edge) bool { + // Do not traverse an edge between 3 and 5. + return (e.From().ID() != 4 || e.To().ID() != 5) && (e.From().ID() != 5 || e.To().ID() != 4) + }, + want: [][]int{ + {0}, + {1, 2, 3, 4}, + {5}, + {6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20}, + }, + }, +} + +func TestWalkAll(t *testing.T) { + for i, test := range walkAllTests { + g := simple.NewUndirectedGraph(0, math.Inf(1)) + + for u, e := range test.g { + if !g.Has(simple.Node(u)) { + g.AddNode(simple.Node(u)) + } + for v := range e { + if !g.Has(simple.Node(v)) { + g.AddNode(simple.Node(v)) + } + g.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v)}) + } + } + type walker interface { + WalkAll(g graph.Undirected, before, after func(), during func(graph.Node)) + } + for _, w := range []walker{ + &BreadthFirst{}, + &DepthFirst{}, + } { + var ( + c []graph.Node + cc [][]graph.Node + ) + switch w := w.(type) { + case *BreadthFirst: + w.EdgeFilter = test.edge + case *DepthFirst: + w.EdgeFilter = test.edge + default: + panic(fmt.Sprintf("bad walker type: %T", w)) + } + during := func(n graph.Node) { + c = append(c, n) + } + after := func() { + cc = append(cc, []graph.Node(nil)) + cc[len(cc)-1] = append(cc[len(cc)-1], c...) + c = c[:0] + } + w.WalkAll(g, nil, after, during) + + got := make([][]int, len(cc)) + for j, c := range cc { + ids := make([]int, len(c)) + for k, n := range c { + ids[k] = n.ID() + } + sort.Ints(ids) + got[j] = ids + } + sort.Sort(ordered.BySliceValues(got)) + if !reflect.DeepEqual(got, test.want) { + t.Errorf("unexpected connected components for test %d using %T:\ngot: %v\nwant:%v", i, w, got, test.want) + } + } + } +} + +// set is an integer set. +type set map[int]struct{} + +func linksTo(i ...int) set { + if len(i) == 0 { + return nil + } + s := make(set) + for _, v := range i { + s[v] = struct{}{} + } + return s +} + +var ( + gnpUndirected_10_tenth = gnpUndirected(10, 0.1) + gnpUndirected_100_tenth = gnpUndirected(100, 0.1) + gnpUndirected_1000_tenth = gnpUndirected(1000, 0.1) + gnpUndirected_10_half = gnpUndirected(10, 0.5) + gnpUndirected_100_half = gnpUndirected(100, 0.5) + gnpUndirected_1000_half = gnpUndirected(1000, 0.5) +) + +func gnpUndirected(n int, p float64) graph.Undirected { + g := simple.NewUndirectedGraph(0, math.Inf(1)) + gen.Gnp(g, n, p, nil) + return g +} + +func benchmarkWalkAllBreadthFirst(b *testing.B, g graph.Undirected) { + n := len(g.Nodes()) + b.ResetTimer() + var bft BreadthFirst + for i := 0; i < b.N; i++ { + bft.WalkAll(g, nil, nil, nil) + } + if bft.visited.Len() != n { + b.Fatalf("unexpected number of nodes visited: want: %d got %d", n, bft.visited.Len()) + } +} + +func BenchmarkWalkAllBreadthFirstGnp_10_tenth(b *testing.B) { + benchmarkWalkAllBreadthFirst(b, gnpUndirected_10_tenth) +} +func BenchmarkWalkAllBreadthFirstGnp_100_tenth(b *testing.B) { + benchmarkWalkAllBreadthFirst(b, gnpUndirected_100_tenth) +} +func BenchmarkWalkAllBreadthFirstGnp_1000_tenth(b *testing.B) { + benchmarkWalkAllBreadthFirst(b, gnpUndirected_1000_tenth) +} +func BenchmarkWalkAllBreadthFirstGnp_10_half(b *testing.B) { + benchmarkWalkAllBreadthFirst(b, gnpUndirected_10_half) +} +func BenchmarkWalkAllBreadthFirstGnp_100_half(b *testing.B) { + benchmarkWalkAllBreadthFirst(b, gnpUndirected_100_half) +} +func BenchmarkWalkAllBreadthFirstGnp_1000_half(b *testing.B) { + benchmarkWalkAllBreadthFirst(b, gnpUndirected_1000_half) +} + +func benchmarkWalkAllDepthFirst(b *testing.B, g graph.Undirected) { + n := len(g.Nodes()) + b.ResetTimer() + var dft DepthFirst + for i := 0; i < b.N; i++ { + dft.WalkAll(g, nil, nil, nil) + } + if dft.visited.Len() != n { + b.Fatalf("unexpected number of nodes visited: want: %d got %d", n, dft.visited.Len()) + } +} + +func BenchmarkWalkAllDepthFirstGnp_10_tenth(b *testing.B) { + benchmarkWalkAllDepthFirst(b, gnpUndirected_10_tenth) +} +func BenchmarkWalkAllDepthFirstGnp_100_tenth(b *testing.B) { + benchmarkWalkAllDepthFirst(b, gnpUndirected_100_tenth) +} +func BenchmarkWalkAllDepthFirstGnp_1000_tenth(b *testing.B) { + benchmarkWalkAllDepthFirst(b, gnpUndirected_1000_tenth) +} +func BenchmarkWalkAllDepthFirstGnp_10_half(b *testing.B) { + benchmarkWalkAllDepthFirst(b, gnpUndirected_10_half) +} +func BenchmarkWalkAllDepthFirstGnp_100_half(b *testing.B) { + benchmarkWalkAllDepthFirst(b, gnpUndirected_100_half) +} +func BenchmarkWalkAllDepthFirstGnp_1000_half(b *testing.B) { + benchmarkWalkAllDepthFirst(b, gnpUndirected_1000_half) +} diff --git a/graph/undirect.go b/graph/undirect.go new file mode 100644 index 00000000..3dd3dfb3 --- /dev/null +++ b/graph/undirect.go @@ -0,0 +1,185 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package graph + +import ( + "golang.org/x/tools/container/intsets" +) + +// Undirect converts a directed graph to an undirected graph, resolving +// edge weight conflicts. +type Undirect struct { + G Directed + + // Absent is the value used to + // represent absent edge weights + // passed to Merge if the reverse + // edge is present. + Absent float64 + + // Merge defines how discordant edge + // weights in G are resolved. A merge + // is performed if at least one edge + // exists between the nodes being + // considered. The edges corresponding + // to the two weights are also passed, + // in the same order. + // The order of weight parameters + // passed to Merge is not defined, so + // the function should be commutative. + // If Merge is nil, the arithmetic + // mean is used to merge weights. + Merge func(x, y float64, xe, ye Edge) float64 +} + +var ( + _ Undirected = Undirect{} + _ Weighter = Undirect{} +) + +// Has returns whether the node exists within the graph. +func (g Undirect) Has(n Node) bool { return g.G.Has(n) } + +// Nodes returns all the nodes in the graph. +func (g Undirect) Nodes() []Node { return g.G.Nodes() } + +// From returns all nodes in g that can be reached directly from u. +func (g Undirect) From(u Node) []Node { + var ( + nodes []Node + seen intsets.Sparse + ) + for _, n := range g.G.From(u) { + seen.Insert(n.ID()) + nodes = append(nodes, n) + } + for _, n := range g.G.To(u) { + id := n.ID() + if seen.Has(id) { + continue + } + seen.Insert(id) + nodes = append(nodes, n) + } + return nodes +} + +// HasEdgeBetween returns whether an edge exists between nodes x and y. +func (g Undirect) HasEdgeBetween(x, y Node) bool { return g.G.HasEdgeBetween(x, y) } + +// Edge returns the edge from u to v if such an edge exists and nil otherwise. +// The node v must be directly reachable from u as defined by the From method. +// If an edge exists, the Edge returned is an EdgePair. The weight of +// the edge is determined by applying the Merge func to the weights of the +// edges between u and v. +func (g Undirect) Edge(u, v Node) Edge { return g.EdgeBetween(u, v) } + +// EdgeBetween returns the edge between nodes x and y. If an edge exists, the +// Edge returned is an EdgePair. The weight of the edge is determined by +// applying the Merge func to the weights of edges between x and y. +func (g Undirect) EdgeBetween(x, y Node) Edge { + fe := g.G.Edge(x, y) + re := g.G.Edge(y, x) + if fe == nil && re == nil { + return nil + } + + var f, r float64 + if wg, ok := g.G.(Weighter); ok { + f, ok = wg.Weight(x, y) + if !ok { + f = g.Absent + } + r, ok = wg.Weight(y, x) + if !ok { + r = g.Absent + } + } else { + f = g.Absent + if fe != nil { + f = fe.Weight() + } + r = g.Absent + if re != nil { + r = re.Weight() + } + } + + var w float64 + if g.Merge == nil { + w = (f + r) / 2 + } else { + w = g.Merge(f, r, fe, re) + } + return EdgePair{E: [2]Edge{fe, re}, W: w} +} + +// Weight returns the weight for the edge between x and y if Edge(x, y) returns a non-nil Edge. +// If x and y are the same node the internal node weight is returned. If there is no joining +// edge between the two nodes the weight value returned is zero. Weight returns true if an edge +// exists between x and y or if x and y have the same ID, false otherwise. +func (g Undirect) Weight(x, y Node) (w float64, ok bool) { + fe := g.G.Edge(x, y) + re := g.G.Edge(y, x) + + var f, r float64 + if wg, wOk := g.G.(Weighter); wOk { + var fOk, rOK bool + f, fOk = wg.Weight(x, y) + if !fOk { + f = g.Absent + } + r, rOK = wg.Weight(y, x) + if !rOK { + r = g.Absent + } + ok = fOk || rOK + } else { + f = g.Absent + if fe != nil { + f = fe.Weight() + ok = true + } + r = g.Absent + if re != nil { + r = re.Weight() + ok = true + } + } + + if g.Merge == nil { + return (f + r) / 2, ok + } + return g.Merge(f, r, fe, re), ok +} + +// EdgePair is an opposed pair of directed edges. +type EdgePair struct { + E [2]Edge + W float64 +} + +// From returns the from node of the first non-nil edge, or nil. +func (e EdgePair) From() Node { + if e.E[0] != nil { + return e.E[0].From() + } else if e.E[1] != nil { + return e.E[1].From() + } + return nil +} + +// To returns the to node of the first non-nil edge, or nil. +func (e EdgePair) To() Node { + if e.E[0] != nil { + return e.E[0].To() + } else if e.E[1] != nil { + return e.E[1].To() + } + return nil +} + +// Weight returns the merged edge weights of the two edges. +func (e EdgePair) Weight() float64 { return e.W } diff --git a/graph/undirect_test.go b/graph/undirect_test.go new file mode 100644 index 00000000..d811e954 --- /dev/null +++ b/graph/undirect_test.go @@ -0,0 +1,126 @@ +// Copyright ©2015 The gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package graph_test + +import ( + "math" + "testing" + + "github.com/gonum/graph" + "github.com/gonum/graph/simple" + "github.com/gonum/matrix/mat64" +) + +var directedGraphs = []struct { + g func() graph.DirectedBuilder + edges []simple.Edge + absent float64 + merge func(x, y float64, xe, ye graph.Edge) float64 + + want mat64.Matrix +}{ + { + g: func() graph.DirectedBuilder { return simple.NewDirectedGraph(0, 0) }, + edges: []simple.Edge{ + {F: simple.Node(0), T: simple.Node(1), W: 2}, + {F: simple.Node(1), T: simple.Node(0), W: 1}, + {F: simple.Node(1), T: simple.Node(2), W: 1}, + }, + want: mat64.NewSymDense(3, []float64{ + 0, (1. + 2.) / 2., 0, + (1. + 2.) / 2., 0, 1. / 2., + 0, 1. / 2., 0, + }), + }, + { + g: func() graph.DirectedBuilder { return simple.NewDirectedGraph(0, 0) }, + edges: []simple.Edge{ + {F: simple.Node(0), T: simple.Node(1), W: 2}, + {F: simple.Node(1), T: simple.Node(0), W: 1}, + {F: simple.Node(1), T: simple.Node(2), W: 1}, + }, + absent: 1, + merge: func(x, y float64, _, _ graph.Edge) float64 { return math.Sqrt(x * y) }, + want: mat64.NewSymDense(3, []float64{ + 0, math.Sqrt(1 * 2), 0, + math.Sqrt(1 * 2), 0, math.Sqrt(1 * 1), + 0, math.Sqrt(1 * 1), 0, + }), + }, + { + g: func() graph.DirectedBuilder { return simple.NewDirectedGraph(0, 0) }, + edges: []simple.Edge{ + {F: simple.Node(0), T: simple.Node(1), W: 2}, + {F: simple.Node(1), T: simple.Node(0), W: 1}, + {F: simple.Node(1), T: simple.Node(2), W: 1}, + }, + merge: func(x, y float64, _, _ graph.Edge) float64 { return math.Min(x, y) }, + want: mat64.NewSymDense(3, []float64{ + 0, math.Min(1, 2), 0, + math.Min(1, 2), 0, math.Min(1, 0), + 0, math.Min(1, 0), 0, + }), + }, + { + g: func() graph.DirectedBuilder { return simple.NewDirectedGraph(0, 0) }, + edges: []simple.Edge{ + {F: simple.Node(0), T: simple.Node(1), W: 2}, + {F: simple.Node(1), T: simple.Node(0), W: 1}, + {F: simple.Node(1), T: simple.Node(2), W: 1}, + }, + merge: func(x, y float64, xe, ye graph.Edge) float64 { + if xe == nil { + return y + } + if ye == nil { + return x + } + return math.Min(x, y) + }, + want: mat64.NewSymDense(3, []float64{ + 0, math.Min(1, 2), 0, + math.Min(1, 2), 0, 1, + 0, 1, 0, + }), + }, + { + g: func() graph.DirectedBuilder { return simple.NewDirectedGraph(0, 0) }, + edges: []simple.Edge{ + {F: simple.Node(0), T: simple.Node(1), W: 2}, + {F: simple.Node(1), T: simple.Node(0), W: 1}, + {F: simple.Node(1), T: simple.Node(2), W: 1}, + }, + merge: func(x, y float64, _, _ graph.Edge) float64 { return math.Max(x, y) }, + want: mat64.NewSymDense(3, []float64{ + 0, math.Max(1, 2), 0, + math.Max(1, 2), 0, math.Max(1, 0), + 0, math.Max(1, 0), 0, + }), + }, +} + +func TestUndirect(t *testing.T) { + for _, test := range directedGraphs { + g := test.g() + for _, e := range test.edges { + g.SetEdge(e) + } + + src := graph.Undirect{G: g, Absent: test.absent, Merge: test.merge} + dst := simple.NewUndirectedMatrixFrom(src.Nodes(), 0, 0, 0) + for _, u := range src.Nodes() { + for _, v := range src.From(u) { + dst.SetEdge(src.Edge(u, v)) + } + } + + if !mat64.Equal(dst.Matrix(), test.want) { + t.Errorf("unexpected result:\ngot:\n%.4v\nwant:\n%.4v", + mat64.Formatted(dst.Matrix()), + mat64.Formatted(test.want), + ) + } + } +}