Files
gonum/graph/formats/rdf/debug.go
Dan Kortschak 782095d131 graph/formats/rdf: implement canonicalisation hashing
Test cases test001-in.nq to test062-in.nq are from [1] licensed under the
W3C BSD 3-clause license. Test cases hogan*.nq are from the original paper
used with permission.

[1]https://json-ld.github.io/normalization/tests/index.html
2021-05-05 14:52:27 +09:30

77 lines
1.5 KiB
Go

// Copyright ©2020 The Gonum Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build debug
package rdf
import (
"fmt"
"os"
"sort"
"strings"
"text/tabwriter"
)
type debugger bool
const debug debugger = true
func (d debugger) log(depth int, args ...interface{}) {
if !d {
return
}
fmt.Fprint(os.Stderr, strings.Repeat("\t", depth))
fmt.Fprintln(os.Stderr, args...)
}
func (d debugger) logf(depth int, format string, args ...interface{}) {
if !d {
return
}
fmt.Fprint(os.Stderr, strings.Repeat("\t", depth))
fmt.Fprintf(os.Stderr, format, args...)
}
func (d debugger) logHashes(depth int, hashes map[string][]byte, size int) {
if !d {
return
}
prefix := strings.Repeat("\t", depth)
if len(hashes) != 0 {
keys := make([]string, len(hashes))
i := 0
for k := range hashes {
keys[i] = k
i++
}
sort.Strings(keys)
w := tabwriter.NewWriter(os.Stderr, 0, 4, 8, ' ', 0)
for _, k := range keys {
fmt.Fprintf(w, prefix+"%s\t%0*x\n", k, 2*size, hashes[k])
}
w.Flush()
} else {
fmt.Fprintln(os.Stderr, prefix+"none")
}
fmt.Fprintln(os.Stderr)
}
func (d debugger) logParts(depth int, parts byLengthHash) {
if !d {
return
}
prefix := strings.Repeat("\t", depth)
if parts.Len() != 0 {
w := tabwriter.NewWriter(os.Stderr, 0, 4, 8, ' ', 0)
for i, p := range parts.nodes {
fmt.Fprintf(w, prefix+"%v\t%x\n", p, parts.hashes[i])
}
w.Flush()
} else {
fmt.Fprintln(os.Stderr, prefix+"none")
}
fmt.Fprintln(os.Stderr)
}