import tables, intsets
type
DiGraphObj = object
id: int
label: string
inodes: Table[int, string]
rnodes: Table[string, int]
iedges: Table[int, IntSet]
DiGraph* = ref DiGraphObj
proc newDiGraph(label: string = ""): DiGraph=
new(result)
result.label = label
result.inodes = initTable[int, string]()
result.rnodes = initTable[string, int]()
result.iedges = initTable[int, IntSet]()
proc has_node*(g: DiGraph, n: string): bool=
return n in g.rnodes
proc add_node*(g: DiGraph, n: string)=
g.inodes[g.id] = n
g.rnodes[n] = g.id
g.id += 1
proc add_edge*(g: var DiGraph, a, b: string)=
if not g.has_node(a):
g.add_node(a)
if not g.has_node(b):
g.add_node(b)
let
key_a = g.rnodes[a]
key_b = g.rnodes[b]
if key_a notin g.iedges:
g.iedges[key_a] = initIntSet()
g.iedges[key_a].incl(key_b)
var G = newDiGraph()
G.add_edge("foo", "bar")
But when I'm trying to add generics I get strange errors:
graph.nim(38, 2) template/generic instantiation from here
graph.nim(26, 11) template/generic instantiation from here
lib/nim/system.nim(1098, 67) template/generic instantiation from here
lib/nim/pure/collections/tables.nim(154, 22) template/generic instantiation from here
lib/nim/pure/collections/tables.nim(150, 18) template/generic instantiation from here
lib/nim/pure/collections/tableimpl.nim(37, 48) Error: type mismatch: got (int, string)
but expected one of:
system.==(x: array[I, T], y: array[I, T])
system.==(x: char, y: char)
system.==(x: string, y: string)
system.==(x: float, y: float)
system.==(x: int8, y: int8)
system.==(x: int16, y: int16)
system.==(x: int64, y: int64)
system.==(x: pointer, y: pointer)
system.==(x: ptr T, y: ptr T)
system.==(x: T, y: T)
system.==(x: int32, y: int32)
system.==(x: float32, y: float32)
system.==(x: T, y: T)
system.==(x: int, y: int)
system.==(x: seq[T], y: seq[T])
system.==(x: ref T, y: ref T)
system.==(x: T, y: T)
system.==(x: Enum, y: Enum)
system.==(x: cstring, y: cstring)
system.==(x: set[T], y: set[T])
system.==(x: bool, y: bool)
tables.==(s: Table[==.A, ==.B], t: Table[==.A, ==.B])
tables.==(s: TableRef[==.A, ==.B], t: TableRef[==.A, ==.B])
Here is my code based on Nim tutorial Part 2:
import tables, intsets
type
DiGraphObj[T] = object
id: int
label: string
inodes: Table[int, T]
rnodes: Table[T, int]
iedges: Table[int, IntSet]
DiGraph*[T] = ref DiGraphObj[T]
proc newDiGraph[T](label: string = ""): DiGraph[T]=
new(result)
result.label = label
result.inodes = initTable[int, T]()
result.rnodes = initTable[T, int]()
result.iedges = initTable[int, IntSet]()
proc has_node*[T](g: DiGraph[T], n: T): bool=
return n in g.rnodes
proc add_node*[T](g: DiGraph[T], n: T)=
g.inodes[g.id] = n
g.rnodes[n] = g.id
g.id += 1
proc add_edge*[T](g: var DiGraph[T], a, b: T)=
if not g.has_node(a):
g.add_node(a)
if not g.has_node(b):
g.add_node(b)
let
key_a = g.rnodes[a]
key_b = g.rnodes[b]
if key_a notin g.iedges:
g.iedges[key_a] = initIntSet()
g.iedges[key_a].incl(key_b)
var G = newDiGraph[string]()
G.add_edge("foo", "bar")
Nim version: 0.12.1
This is almost certainly a compiler bug, caused by having two tables with different key types that both reference the same generic parameter in the same object.
A minimal example to reproduce it is:
import tables
type
G[T] = object
inodes: Table[int, T]
rnodes: Table[T, int]
var g: G[string]
echo g.rnodes["foo"]
A temporary workaround would be to make inodes a seq[T], since it can be filled from the start (it'd also be significantly faster).
Wow. Compiler bug... Thanks!
Replaced with this structure and it seems to work:
GraphEdge[T] = ref GraphEdgeObj[T]
GraphEdgeObj[T] = object
val: T
adj: IntSet
DiGraphObj[T] = object
edg: seq[GraphEdge[T]]
nod: Table[T, int]
DiGraph*[T] = ref DiGraphObj[T]
type
GraphEdge[T] = ref GraphEdgeObj[T]
GraphEdgeObj[T] = object
val: T
adj: Table[int, bool]
nodes_in: int
nodes_out: int
init: bool
DiGraphObj[T] = object
id: int
label: string
edg: seq[GraphEdge[T]]
nod: Table[T, int]
DiGraph*[T] = ref DiGraphObj[T]
proc newDiGraph[T](label: string = ""): DiGraph[T]=
new(result)
result.label = label
result.nod = initTable[T, int]()
result.edg = @[]
{.push inline.}
proc has_node*[T](g: DiGraph[T], n: T): bool=
return n in g.nod
proc has_edge*[T](g: DiGraph[T], a, b: T): bool=
return g.has_node(a) and g.has_node(b) and g.nod[b] in g.edg[g.nod[a]].adj
{.pop.}
proc add_node*[T](g: DiGraph[T], n: T)=
if g.has_node(n): return
g.nod[n] = g.edg.len
g.edg.add(GraphEdge[T](val: n))
proc add_edge*[T](g: DiGraph[T], a, b: T)=
if not g.has_node(a): g.add_node(a)
if not g.has_node(b): g.add_node(b)
let
key_a = g.nod[a]
key_b = g.nod[b]
if not g.edg[key_a].init: g.edg[key_a].adj = initTable[int, bool](2)
g.edg[key_a].adj[key_b] = true
g.edg[key_a].nodes_out += 1
g.edg[key_b].nodes_in += 1
proc rm_node*[T](g: DiGraph[T], n: string)=
if not g.has_node(n): return
let id = g.nod[n]
g.nod.del(n)
for k in g.edg[id].adj.keys():
g.edg[k].nodes_in -= 1
g.edg[id] = nil
for v in g.edg:
if v == nil: continue
if v.init:
v.adj.del(id)
v.nodes_out -= 1
proc rm_edge*[T](g: DiGraph[T], a, b: string)=
if not has_edge(g, a, b): return
g.edg[g.nod[a]].adj.del(g.nod[b])
iterator nodes*[T](g: DiGraph[T]): string =
for k in g.nod.keys():
yield k
iterator edges*[T](g: DiGraph[T]): tuple[a,b:string] =
for v in g.edg:
if v == nil: continue
for i in v.adj.keys():
if g.edg[i] == nil: continue
yield (v.val, g.edg[i].val)
iterator out_degree_iter*[T](g: DiGraph[T]): tuple[n: string, d: int] =
for k, v in g.nod.pairs():
yield (k, g.edg[v].nodes_out)
iterator in_degree_iter*[T](g: DiGraph[T]): tuple[n: string, d: int] =
for k, v in g.nod.pairs():
yield (k, g.edg[v].nodes_in)
proc relabel_nodes*[T](g: DiGraph[T], a: Table[T, T])=
for k, v in a.pairs():
if k in g.nod:
g.nod[v] = g.nod[k]
g.nod.del(k)
API completely copies Python's NetworkX library.