Commit 870d079c authored by Matthew Dempsky's avatar Matthew Dempsky

cmd/compile/internal/gc: replace Node.Ullman with Node.HasCall

Since switching to SSA, the only remaining use for the Ullman field
was in tracking whether or not an expression contained a function
call. Give it a new name and encode it in our fancy new bitset field.

Passes toolstash-check.

Change-Id: I95b7f9cb053856320c0d66efe14996667e6011c2
Reviewed-on: https://go-review.googlesource.com/37721
Run-TryBot: Matthew Dempsky <mdempsky@google.com>
TryBot-Result: Gobot Gobot <gobot@golang.org>
Reviewed-by: default avatarJosh Bleecher Snyder <josharian@gmail.com>
parent 9fd359a2
...@@ -23,3 +23,13 @@ func (f *bitset16) set(mask uint16, b bool) { ...@@ -23,3 +23,13 @@ func (f *bitset16) set(mask uint16, b bool) {
*(*uint16)(f) &^= mask *(*uint16)(f) &^= mask
} }
} }
type bitset32 uint32
func (f *bitset32) set(mask uint32, b bool) {
if b {
*(*uint32)(f) |= mask
} else {
*(*uint32)(f) &^= mask
}
}
...@@ -336,8 +336,6 @@ func transformclosure(xfunc *Node) { ...@@ -336,8 +336,6 @@ func transformclosure(xfunc *Node) {
if v.Name.Byval() { if v.Name.Byval() {
// If v is captured by value, we merely downgrade it to PPARAM. // If v is captured by value, we merely downgrade it to PPARAM.
v.Class = PPARAM v.Class = PPARAM
v.Ullman = 1
fld.Nname = v fld.Nname = v
} else { } else {
// If v of type T is captured by reference, // If v of type T is captured by reference,
...@@ -393,7 +391,6 @@ func transformclosure(xfunc *Node) { ...@@ -393,7 +391,6 @@ func transformclosure(xfunc *Node) {
if v.Name.Byval() && v.Type.Width <= int64(2*Widthptr) { if v.Name.Byval() && v.Type.Width <= int64(2*Widthptr) {
// If it is a small variable captured by value, downgrade it to PAUTO. // If it is a small variable captured by value, downgrade it to PAUTO.
v.Class = PAUTO v.Class = PAUTO
v.Ullman = 1
xfunc.Func.Dcl = append(xfunc.Func.Dcl, v) xfunc.Func.Dcl = append(xfunc.Func.Dcl, v)
body = append(body, nod(OAS, v, cv)) body = append(body, nod(OAS, v, cv))
} else { } else {
...@@ -628,7 +625,6 @@ func makepartialcall(fn *Node, t0 *Type, meth *Sym) *Node { ...@@ -628,7 +625,6 @@ func makepartialcall(fn *Node, t0 *Type, meth *Sym) *Node {
ptr.Sym = lookup("rcvr") ptr.Sym = lookup("rcvr")
ptr.Class = PAUTO ptr.Class = PAUTO
ptr.SetAddable(true) ptr.SetAddable(true)
ptr.Ullman = 1
ptr.SetUsed(true) ptr.SetUsed(true)
ptr.Name.Curfn = xfunc ptr.Name.Curfn = xfunc
ptr.Xoffset = 0 ptr.Xoffset = 0
......
...@@ -292,7 +292,6 @@ func newname(s *Sym) *Node { ...@@ -292,7 +292,6 @@ func newname(s *Sym) *Node {
n := nod(ONAME, nil, nil) n := nod(ONAME, nil, nil)
n.Sym = s n.Sym = s
n.SetAddable(true) n.SetAddable(true)
n.Ullman = 1
n.Xoffset = 0 n.Xoffset = 0
return n return n
} }
...@@ -305,7 +304,6 @@ func newnoname(s *Sym) *Node { ...@@ -305,7 +304,6 @@ func newnoname(s *Sym) *Node {
n := nod(ONONAME, nil, nil) n := nod(ONONAME, nil, nil)
n.Sym = s n.Sym = s
n.SetAddable(true) n.SetAddable(true)
n.Ullman = 1
n.Xoffset = 0 n.Xoffset = 0
return n return n
} }
...@@ -376,7 +374,6 @@ func oldname(s *Sym) *Node { ...@@ -376,7 +374,6 @@ func oldname(s *Sym) *Node {
c.SetIsddd(n.Isddd()) c.SetIsddd(n.Isddd())
c.Name.Defn = n c.Name.Defn = n
c.SetAddable(false) c.SetAddable(false)
c.Ullman = 2
c.Name.Funcdepth = funcdepth c.Name.Funcdepth = funcdepth
// Link into list of active closure variables. // Link into list of active closure variables.
......
...@@ -269,10 +269,6 @@ func (n *Node) Format(s fmt.State, verb rune) { ...@@ -269,10 +269,6 @@ func (n *Node) Format(s fmt.State, verb rune) {
func (n *Node) jconv(s fmt.State, flag FmtFlag) { func (n *Node) jconv(s fmt.State, flag FmtFlag) {
c := flag & FmtShort c := flag & FmtShort
if c == 0 && n.Ullman != 0 {
fmt.Fprintf(s, " u(%d)", n.Ullman)
}
if c == 0 && n.Addable() { if c == 0 && n.Addable() {
fmt.Fprintf(s, " a(%v)", n.Addable()) fmt.Fprintf(s, " a(%v)", n.Addable())
} }
...@@ -361,6 +357,10 @@ func (n *Node) jconv(s fmt.State, flag FmtFlag) { ...@@ -361,6 +357,10 @@ func (n *Node) jconv(s fmt.State, flag FmtFlag) {
fmt.Fprint(s, " nonnil") fmt.Fprint(s, " nonnil")
} }
if c == 0 && n.HasCall() {
fmt.Fprintf(s, " hascall")
}
if c == 0 && n.Used() { if c == 0 && n.Used() {
fmt.Fprintf(s, " used(%v)", n.Used()) fmt.Fprintf(s, " used(%v)", n.Used())
} }
......
...@@ -173,7 +173,6 @@ func moveToHeap(n *Node) { ...@@ -173,7 +173,6 @@ func moveToHeap(n *Node) {
// Modify n in place so that uses of n now mean indirection of the heapaddr. // Modify n in place so that uses of n now mean indirection of the heapaddr.
n.Class = PAUTOHEAP n.Class = PAUTOHEAP
n.Ullman = 2
n.Xoffset = 0 n.Xoffset = 0
n.Name.Param.Heapaddr = heapaddr n.Name.Param.Heapaddr = heapaddr
n.Esc = EscHeap n.Esc = EscHeap
...@@ -208,7 +207,6 @@ func tempname(nn *Node, t *Type) { ...@@ -208,7 +207,6 @@ func tempname(nn *Node, t *Type) {
n.Type = t n.Type = t
n.Class = PAUTO n.Class = PAUTO
n.SetAddable(true) n.SetAddable(true)
n.Ullman = 1
n.Esc = EscNever n.Esc = EscNever
n.Name.Curfn = Curfn n.Name.Curfn = Curfn
n.Name.SetAutoTemp(true) n.Name.SetAutoTemp(true)
......
...@@ -630,5 +630,5 @@ func appendinit(np **Node, init Nodes) { ...@@ -630,5 +630,5 @@ func appendinit(np **Node, init Nodes) {
} }
n.Ninit.AppendNodes(&init) n.Ninit.AppendNodes(&init)
n.Ullman = UINF n.SetHasCall(true)
} }
...@@ -988,7 +988,6 @@ func typename(t *Type) *Node { ...@@ -988,7 +988,6 @@ func typename(t *Type) *Node {
n := nod(OADDR, s.Def, nil) n := nod(OADDR, s.Def, nil)
n.Type = ptrto(s.Def.Type) n.Type = ptrto(s.Def.Type)
n.SetAddable(true) n.SetAddable(true)
n.Ullman = 2
n.Typecheck = 1 n.Typecheck = 1
return n return n
} }
...@@ -1011,7 +1010,6 @@ func itabname(t, itype *Type) *Node { ...@@ -1011,7 +1010,6 @@ func itabname(t, itype *Type) *Node {
n := nod(OADDR, s.Def, nil) n := nod(OADDR, s.Def, nil)
n.Type = ptrto(s.Def.Type) n.Type = ptrto(s.Def.Type)
n.SetAddable(true) n.SetAddable(true)
n.Ullman = 2
n.Typecheck = 1 n.Typecheck = 1
return n return n
} }
......
...@@ -4885,7 +4885,6 @@ func (e *ssaExport) namedAuto(name string, typ ssa.Type) ssa.GCNode { ...@@ -4885,7 +4885,6 @@ func (e *ssaExport) namedAuto(name string, typ ssa.Type) ssa.GCNode {
n.Type = t n.Type = t
n.Class = PAUTO n.Class = PAUTO
n.SetAddable(true) n.SetAddable(true)
n.Ullman = 1
n.Esc = EscNever n.Esc = EscNever
n.Xoffset = 0 n.Xoffset = 0
n.Name.Curfn = Curfn n.Name.Curfn = Curfn
......
...@@ -426,7 +426,6 @@ func nodintconst(v int64) *Node { ...@@ -426,7 +426,6 @@ func nodintconst(v int64) *Node {
c.SetVal(Val{new(Mpint)}) c.SetVal(Val{new(Mpint)})
c.Val().U.(*Mpint).SetInt64(v) c.Val().U.(*Mpint).SetInt64(v)
c.Type = Types[TIDEAL] c.Type = Types[TIDEAL]
ullmancalc(c)
return c return c
} }
...@@ -436,7 +435,6 @@ func nodfltconst(v *Mpflt) *Node { ...@@ -436,7 +435,6 @@ func nodfltconst(v *Mpflt) *Node {
c.SetVal(Val{newMpflt()}) c.SetVal(Val{newMpflt()})
c.Val().U.(*Mpflt).Set(v) c.Val().U.(*Mpflt).Set(v)
c.Type = Types[TIDEAL] c.Type = Types[TIDEAL]
ullmancalc(c)
return c return c
} }
...@@ -444,7 +442,6 @@ func nodconst(n *Node, t *Type, v int64) { ...@@ -444,7 +442,6 @@ func nodconst(n *Node, t *Type, v int64) {
*n = Node{} *n = Node{}
n.Op = OLITERAL n.Op = OLITERAL
n.SetAddable(true) n.SetAddable(true)
ullmancalc(n)
n.SetVal(Val{new(Mpint)}) n.SetVal(Val{new(Mpint)})
n.Val().U.(*Mpint).SetInt64(v) n.Val().U.(*Mpint).SetInt64(v)
n.Type = t n.Type = t
...@@ -1145,73 +1142,55 @@ func printframenode(n *Node) { ...@@ -1145,73 +1142,55 @@ func printframenode(n *Node) {
} }
} }
// calculate sethi/ullman number // updateHasCall checks whether expression n contains any function
// roughly how many registers needed to // calls and sets the n.HasCall flag if so.
// compile a node. used to compile the func updateHasCall(n *Node) {
// hardest side first to minimize registers.
func ullmancalc(n *Node) {
if n == nil { if n == nil {
return return
} }
var ul int b := false
var ur int
if n.Ninit.Len() != 0 { if n.Ninit.Len() != 0 {
ul = UINF // TODO(mdempsky): This seems overly conservative.
b = true
goto out goto out
} }
switch n.Op { switch n.Op {
case OLITERAL, ONAME: case OLITERAL, ONAME:
ul = 1
if n.Class == PAUTOHEAP {
ul++
}
goto out
case OAS: case OAS:
if !needwritebarrier(n.Left) { if needwritebarrier(n.Left) {
break b = true
goto out
} }
fallthrough
case OCALL, OCALLFUNC, OCALLMETH, OCALLINTER: case OCALL, OCALLFUNC, OCALLMETH, OCALLINTER:
ul = UINF b = true
goto out goto out
// hard with instrumented code
case OANDAND, OOROR: case OANDAND, OOROR:
// hard with instrumented code
if instrumenting { if instrumenting {
ul = UINF b = true
goto out goto out
} }
case OINDEX, OSLICE, OSLICEARR, OSLICE3, OSLICE3ARR, OSLICESTR, case OINDEX, OSLICE, OSLICEARR, OSLICE3, OSLICE3ARR, OSLICESTR,
OIND, ODOTPTR, ODOTTYPE, ODIV, OMOD: OIND, ODOTPTR, ODOTTYPE, ODIV, OMOD:
// These ops might panic, make sure they are done // These ops might panic, make sure they are done
// before we start marshaling args for a call. See issue 16760. // before we start marshaling args for a call. See issue 16760.
ul = UINF b = true
goto out goto out
} }
ul = 1 if n.Left != nil && n.Left.HasCall() {
if n.Left != nil { b = true
ul = int(n.Left.Ullman) goto out
}
ur = 1
if n.Right != nil {
ur = int(n.Right.Ullman)
}
if ul == ur {
ul += 1
} }
if ur > ul { if n.Right != nil && n.Right.HasCall() {
ul = ur b = true
goto out
} }
out: out:
if ul > 200 { n.SetHasCall(b)
ul = 200 // clamp to uchar with room to grow
}
n.Ullman = uint8(ul)
} }
func badtype(op Op, tl *Type, tr *Type) { func badtype(op Op, tl *Type, tr *Type) {
...@@ -2032,7 +2011,7 @@ func addinit(n *Node, init []*Node) *Node { ...@@ -2032,7 +2011,7 @@ func addinit(n *Node, init []*Node) *Node {
} }
n.Ninit.Prepend(init...) n.Ninit.Prepend(init...)
n.Ullman = UINF n.SetHasCall(true)
return n return n
} }
......
...@@ -49,12 +49,11 @@ type Node struct { ...@@ -49,12 +49,11 @@ type Node struct {
Pos src.XPos Pos src.XPos
flags bitset16 flags bitset32
Esc uint16 // EscXXX Esc uint16 // EscXXX
Op Op Op Op
Ullman uint8 // sethi/ullman number
Etype EType // op for OASOP, etype for OTYPE, exclam for export, 6g saved reg, ChanDir for OTCHAN, for OINDEXMAP 1=LHS,0=RHS Etype EType // op for OASOP, etype for OTYPE, exclam for export, 6g saved reg, ChanDir for OTCHAN, for OINDEXMAP 1=LHS,0=RHS
Class Class // PPARAM, PAUTO, PEXTERN, etc Class Class // PPARAM, PAUTO, PEXTERN, etc
Embedded uint8 // ODCLFIELD embedded type Embedded uint8 // ODCLFIELD embedded type
...@@ -91,6 +90,7 @@ const ( ...@@ -91,6 +90,7 @@ const (
nodeBounded // bounds check unnecessary nodeBounded // bounds check unnecessary
nodeAddable // addressable nodeAddable // addressable
nodeUsed // for variable/label declared and not used error nodeUsed // for variable/label declared and not used error
nodeHasCall // expression contains a function call
) )
func (n *Node) HasBreak() bool { return n.flags&nodeHasBreak != 0 } func (n *Node) HasBreak() bool { return n.flags&nodeHasBreak != 0 }
...@@ -109,6 +109,7 @@ func (n *Node) Noescape() bool { return n.flags&nodeNoescape != 0 } ...@@ -109,6 +109,7 @@ func (n *Node) Noescape() bool { return n.flags&nodeNoescape != 0 }
func (n *Node) Bounded() bool { return n.flags&nodeBounded != 0 } func (n *Node) Bounded() bool { return n.flags&nodeBounded != 0 }
func (n *Node) Addable() bool { return n.flags&nodeAddable != 0 } func (n *Node) Addable() bool { return n.flags&nodeAddable != 0 }
func (n *Node) Used() bool { return n.flags&nodeUsed != 0 } func (n *Node) Used() bool { return n.flags&nodeUsed != 0 }
func (n *Node) HasCall() bool { return n.flags&nodeHasCall != 0 }
func (n *Node) SetHasBreak(b bool) { n.flags.set(nodeHasBreak, b) } func (n *Node) SetHasBreak(b bool) { n.flags.set(nodeHasBreak, b) }
func (n *Node) SetIsClosureVar(b bool) { n.flags.set(nodeIsClosureVar, b) } func (n *Node) SetIsClosureVar(b bool) { n.flags.set(nodeIsClosureVar, b) }
...@@ -126,6 +127,7 @@ func (n *Node) SetNoescape(b bool) { n.flags.set(nodeNoescape, b) } ...@@ -126,6 +127,7 @@ func (n *Node) SetNoescape(b bool) { n.flags.set(nodeNoescape, b) }
func (n *Node) SetBounded(b bool) { n.flags.set(nodeBounded, b) } func (n *Node) SetBounded(b bool) { n.flags.set(nodeBounded, b) }
func (n *Node) SetAddable(b bool) { n.flags.set(nodeAddable, b) } func (n *Node) SetAddable(b bool) { n.flags.set(nodeAddable, b) }
func (n *Node) SetUsed(b bool) { n.flags.set(nodeUsed, b) } func (n *Node) SetUsed(b bool) { n.flags.set(nodeUsed, b) }
func (n *Node) SetHasCall(b bool) { n.flags.set(nodeHasCall, b) }
// Val returns the Val for the node. // Val returns the Val for the node.
func (n *Node) Val() Val { func (n *Node) Val() Val {
......
...@@ -685,7 +685,7 @@ opswitch: ...@@ -685,7 +685,7 @@ opswitch:
lr := ascompatte(n, n.Isddd(), t.Params(), n.List.Slice(), 0, init) lr := ascompatte(n, n.Isddd(), t.Params(), n.List.Slice(), 0, init)
ll = append(ll, lr...) ll = append(ll, lr...)
n.Left.Left = nil n.Left.Left = nil
ullmancalc(n.Left) updateHasCall(n.Left)
n.List.Set(reorder1(ll)) n.List.Set(reorder1(ll))
case OAS: case OAS:
...@@ -1617,7 +1617,7 @@ opswitch: ...@@ -1617,7 +1617,7 @@ opswitch:
n = typecheck(n, Erv) n = typecheck(n, Erv)
} }
ullmancalc(n) updateHasCall(n)
if Debug['w'] != 0 && n != nil { if Debug['w'] != 0 && n != nil {
Dump("walk", n) Dump("walk", n)
...@@ -1698,7 +1698,7 @@ func ascompatee(op Op, nl, nr []*Node, init *Nodes) []*Node { ...@@ -1698,7 +1698,7 @@ func ascompatee(op Op, nl, nr []*Node, init *Nodes) []*Node {
// evaluating the lv or a function call // evaluating the lv or a function call
// in the conversion of the types // in the conversion of the types
func fncall(l *Node, rt *Type) bool { func fncall(l *Node, rt *Type) bool {
if l.Ullman >= UINF || l.Op == OINDEXMAP { if l.HasCall() || l.Op == OINDEXMAP {
return true return true
} }
if needwritebarrier(l) { if needwritebarrier(l) {
...@@ -1743,8 +1743,8 @@ func ascompatet(op Op, nl Nodes, nr *Type) []*Node { ...@@ -1743,8 +1743,8 @@ func ascompatet(op Op, nl Nodes, nr *Type) []*Node {
a := nod(OAS, l, nodarg(r, 0)) a := nod(OAS, l, nodarg(r, 0))
a = convas(a, &nn) a = convas(a, &nn)
ullmancalc(a) updateHasCall(a)
if a.Ullman >= UINF { if a.HasCall() {
Dump("ascompatet ucount", a) Dump("ascompatet ucount", a)
ullmanOverflow = true ullmanOverflow = true
} }
...@@ -2104,7 +2104,7 @@ func convas(n *Node, init *Nodes) *Node { ...@@ -2104,7 +2104,7 @@ func convas(n *Node, init *Nodes) *Node {
} }
out: out:
ullmancalc(n) updateHasCall(n)
return n return n
} }
...@@ -2120,8 +2120,8 @@ func reorder1(all []*Node) []*Node { ...@@ -2120,8 +2120,8 @@ func reorder1(all []*Node) []*Node {
for _, n := range all { for _, n := range all {
t++ t++
ullmancalc(n) updateHasCall(n)
if n.Ullman >= UINF { if n.HasCall() {
c++ c++
} }
} }
...@@ -2136,7 +2136,7 @@ func reorder1(all []*Node) []*Node { ...@@ -2136,7 +2136,7 @@ func reorder1(all []*Node) []*Node {
d := 0 d := 0
var a *Node var a *Node
for _, n := range all { for _, n := range all {
if n.Ullman < UINF { if !n.HasCall() {
r = append(r, n) r = append(r, n)
continue continue
} }
...@@ -2436,10 +2436,10 @@ func vmatch1(l *Node, r *Node) bool { ...@@ -2436,10 +2436,10 @@ func vmatch1(l *Node, r *Node) bool {
case PPARAM, PAUTO: case PPARAM, PAUTO:
break break
// assignment to non-stack variable
// must be delayed if right has function calls.
default: default:
if r.Ullman >= UINF { // assignment to non-stack variable must be
// delayed if right has function calls.
if r.HasCall() {
return true return true
} }
} }
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment