Commit dc7b54be authored by Russ Cox's avatar Russ Cox

[dev.cc] cmd/internal/obj, cmd/internal/gc, new6g: reconvert

Reconvert using rsc.io/c2go rev 27b3f59.

Changes to converter:
 - fatal does not return, so no fallthrough after fatal in switch
 - many more function results and variables identified as bool
 - simplification of negated boolean expressions

Change-Id: I3bc67da5e46cb7ee613e230cf7e9533036cc870b
Reviewed-on: https://go-review.googlesource.com/5171Reviewed-by: default avatarJosh Bleecher Snyder <josharian@gmail.com>
parent 786825c5
......@@ -138,7 +138,7 @@ func dowidth(t *Type) {
if t.Width == -2 {
lno = int(lineno)
lineno = int32(t.Lineno)
if !(t.Broke != 0) {
if t.Broke == 0 {
t.Broke = 1
Yyerror("invalid recursive type %v", Tconv(t, 0))
}
......@@ -253,14 +253,14 @@ func dowidth(t *Type) {
checkwidth(t.Down)
case TFORW: // should have been filled in
if !(t.Broke != 0) {
if t.Broke == 0 {
Yyerror("invalid recursive type %v", Tconv(t, 0))
}
w = 1 // anything will do
// dummy type; should be replaced before use.
case TANY:
if !(Debug['A'] != 0) {
if Debug['A'] == 0 {
Fatal("dowidth any")
}
w = 1 // anything will do
......@@ -294,7 +294,7 @@ func dowidth(t *Type) {
checkwidth(t.Type)
t.Align = uint8(Widthptr)
} else if t.Bound == -100 {
if !(t.Broke != 0) {
if t.Broke == 0 {
Yyerror("use of [...] array outside of array literal")
t.Broke = 1
}
......@@ -394,7 +394,7 @@ func checkwidth(t *Type) {
Fatal("checkwidth %v", Tconv(t, 0))
}
if !(defercalc != 0) {
if defercalc == 0 {
dowidth(t)
return
}
......@@ -427,7 +427,7 @@ func defercheckwidth() {
func resumecheckwidth() {
var l *TypeList
if !(defercalc != 0) {
if defercalc == 0 {
Fatal("resumecheckwidth")
}
for l = tlq; l != nil; l = tlq {
......
......@@ -66,15 +66,15 @@ bnot(Bits a)
return c;
}
*/
func bany(a *Bits) int {
func bany(a *Bits) bool {
var i int
for i = 0; i < BITS; i++ {
if a.b[i] != 0 {
return 1
return true
}
}
return 0
return false
}
/*
......@@ -112,8 +112,8 @@ func blsh(n uint) Bits {
return c
}
func btest(a *Bits, n uint) int {
return bool2int(a.b[n/64]&(1<<(n%64)) != 0)
func btest(a *Bits, n uint) bool {
return a.b[n/64]&(1<<(n%64)) != 0
}
func biset(a *Bits, n uint) {
......@@ -144,7 +144,7 @@ func Qconv(bits Bits, flag int) string {
first = 1
for bany(&bits) != 0 {
for bany(&bits) {
i = bnum(bits)
if first != 0 {
first = 0
......
......@@ -120,15 +120,15 @@ func bvnext(bv *Bvec, i int32) int {
return int(i)
}
func bvisempty(bv *Bvec) int {
func bvisempty(bv *Bvec) bool {
var i int32
for i = 0; i < bv.n; i += WORDBITS {
if bv.b[i>>WORDSHIFT] != 0 {
return 0
return false
}
}
return 1
return true
}
func bvnot(bv *Bvec) {
......
......@@ -91,7 +91,7 @@ func typecheckclosure(func_ *Node, top int) {
for l = func_.Cvars; l != nil; l = l.Next {
n = l.N.Closure
if !(n.Captured != 0) {
if n.Captured == 0 {
n.Captured = 1
if n.Decldepth == 0 {
Fatal("typecheckclosure: var %v does not have decldepth assigned", Nconv(n, obj.FmtShort))
......@@ -218,7 +218,7 @@ func capturevars(xfunc *Node) {
v.Outerexpr = nil
// out parameters will be assigned to implicitly upon return.
if outer.Class != PPARAMOUT && !(v.Closure.Addrtaken != 0) && !(v.Closure.Assigned != 0) && v.Type.Width <= 128 {
if outer.Class != PPARAMOUT && v.Closure.Addrtaken == 0 && v.Closure.Assigned == 0 && v.Type.Width <= 128 {
v.Byval = 1
} else {
v.Closure.Addrtaken = 1
......@@ -351,7 +351,7 @@ func transformclosure(xfunc *Node) {
cv = Nod(OCLOSUREVAR, nil, nil)
cv.Type = v.Type
if !(v.Byval != 0) {
if v.Byval == 0 {
cv.Type = Ptrto(v.Type)
}
offset = Rnd(offset, int64(cv.Type.Align))
......@@ -389,7 +389,7 @@ func transformclosure(xfunc *Node) {
typechecklist(body, Etop)
walkstmtlist(body)
xfunc.Enter = body
xfunc.Needctxt = uint8(bool2int(nvar > 0))
xfunc.Needctxt = nvar > 0
}
lineno = int32(lno)
......@@ -430,7 +430,7 @@ func walkclosure(func_ *Node, init **NodeList) *Node {
continue
}
typ1 = typenod(v.Type)
if !(v.Byval != 0) {
if v.Byval == 0 {
typ1 = Nod(OIND, typ1, nil)
}
typ.List = list(typ.List, Nod(ODCLFIELD, newname(v.Sym), typ1))
......@@ -594,7 +594,7 @@ func makepartialcall(fn *Node, t0 *Type, meth *Node) *Node {
// Declare and initialize variable holding receiver.
body = nil
xfunc.Needctxt = 1
xfunc.Needctxt = true
cv = Nod(OCLOSUREVAR, nil, nil)
cv.Xoffset = int64(Widthptr)
cv.Type = rcvrtype
......@@ -609,7 +609,7 @@ func makepartialcall(fn *Node, t0 *Type, meth *Node) *Node {
ptr.Used = 1
ptr.Curfn = xfunc
xfunc.Dcl = list(xfunc.Dcl, ptr)
if Isptr[rcvrtype.Etype] != 0 || Isinter(rcvrtype) != 0 {
if Isptr[rcvrtype.Etype] != 0 || Isinter(rcvrtype) {
ptr.Ntype = typenod(rcvrtype)
body = list(body, Nod(OAS, ptr, cv))
} else {
......@@ -652,7 +652,7 @@ func walkpartialcall(n *Node, init **NodeList) *Node {
//
// Like walkclosure above.
if Isinter(n.Left.Type) != 0 {
if Isinter(n.Left.Type) {
// Trigger panic for method on nil interface now.
// Otherwise it happens in the wrapper and is confusing.
n.Left = cheapexpr(n.Left, init)
......
This diff is collapsed.
......@@ -10,12 +10,12 @@ func CASE(a int, b int) int {
return a<<16 | b
}
func overlap_cplx(f *Node, t *Node) int {
func overlap_cplx(f *Node, t *Node) bool {
// check whether f and t could be overlapping stack references.
// not exact, because it's hard to check for the stack register
// in portable code. close enough: worst case we will allocate
// an extra temporary and the registerizer will clean it up.
return bool2int(f.Op == OINDREG && t.Op == OINDREG && f.Xoffset+f.Type.Width >= t.Xoffset && t.Xoffset+t.Type.Width >= f.Xoffset)
return f.Op == OINDREG && t.Op == OINDREG && f.Xoffset+f.Type.Width >= t.Xoffset && t.Xoffset+t.Type.Width >= f.Xoffset
}
func Complexbool(op int, nl *Node, nr *Node, true_ bool, likely int, to *obj.Prog) {
......@@ -31,20 +31,20 @@ func Complexbool(op int, nl *Node, nr *Node, true_ bool, likely int, to *obj.Pro
// make both sides addable in ullman order
if nr != nil {
if nl.Ullman > nr.Ullman && !(nl.Addable != 0) {
if nl.Ullman > nr.Ullman && nl.Addable == 0 {
Tempname(&tnl, nl.Type)
Thearch.Cgen(nl, &tnl)
nl = &tnl
}
if !(nr.Addable != 0) {
if nr.Addable == 0 {
Tempname(&tnr, nr.Type)
Thearch.Cgen(nr, &tnr)
nr = &tnr
}
}
if !(nl.Addable != 0) {
if nl.Addable == 0 {
Tempname(&tnl, nl.Type)
Thearch.Cgen(nl, &tnl)
nl = &tnl
......@@ -87,7 +87,7 @@ func subnode(nr *Node, ni *Node, nc *Node) {
var tc int
var t *Type
if !(nc.Addable != 0) {
if nc.Addable == 0 {
Fatal("subnode not addable")
}
......@@ -243,7 +243,7 @@ func nodfconst(n *Node, t *Type, fval *Mpflt) {
n.Val.Ctype = CTFLT
n.Type = t
if !(Isfloat[t.Etype] != 0) {
if Isfloat[t.Etype] == 0 {
Fatal("nodfconst: bad type %v", Tconv(t, 0))
}
}
......@@ -251,7 +251,7 @@ func nodfconst(n *Node, t *Type, fval *Mpflt) {
/*
* cplx.c
*/
func Complexop(n *Node, res *Node) int {
func Complexop(n *Node, res *Node) bool {
if n != nil && n.Type != nil {
if Iscomplex[n.Type.Etype] != 0 {
goto maybe
......@@ -292,11 +292,11 @@ maybe:
//dump("\ncomplex-no", n);
no:
return 0
return false
//dump("\ncomplex-yes", n);
yes:
return 1
return true
}
func Complexmove(f *Node, t *Node) {
......@@ -313,7 +313,7 @@ func Complexmove(f *Node, t *Node) {
Dump("complexmove-t", t)
}
if !(t.Addable != 0) {
if t.Addable == 0 {
Fatal("complexmove: to not addable")
}
......@@ -322,7 +322,6 @@ func Complexmove(f *Node, t *Node) {
switch uint32(ft)<<16 | uint32(tt) {
default:
Fatal("complexmove: unknown conversion: %v -> %v\n", Tconv(f.Type, 0), Tconv(t.Type, 0))
fallthrough
// complex to complex move/convert.
// make f addable.
......@@ -331,7 +330,7 @@ func Complexmove(f *Node, t *Node) {
TCOMPLEX64<<16 | TCOMPLEX128,
TCOMPLEX128<<16 | TCOMPLEX64,
TCOMPLEX128<<16 | TCOMPLEX128:
if !(f.Addable != 0) || overlap_cplx(f, t) != 0 {
if f.Addable == 0 || overlap_cplx(f, t) {
Tempname(&tmp, f.Type)
Complexmove(f, &tmp)
f = &tmp
......@@ -380,7 +379,7 @@ func Complexgen(n *Node, res *Node) {
case OREAL,
OIMAG:
nl = n.Left
if !(nl.Addable != 0) {
if nl.Addable == 0 {
Tempname(&tmp, nl.Type)
Complexgen(nl, &tmp)
nl = &tmp
......@@ -403,7 +402,7 @@ func Complexgen(n *Node, res *Node) {
tr = Simsimtype(n.Type)
tr = cplxsubtype(tr)
if tl != tr {
if !(n.Addable != 0) {
if n.Addable == 0 {
Tempname(&n1, n.Type)
Complexmove(n, &n1)
n = &n1
......@@ -413,7 +412,7 @@ func Complexgen(n *Node, res *Node) {
return
}
if !(res.Addable != 0) {
if res.Addable == 0 {
Thearch.Igen(res, &n1, nil)
Thearch.Cgen(n, &n1)
Thearch.Regfree(&n1)
......@@ -429,7 +428,6 @@ func Complexgen(n *Node, res *Node) {
default:
Dump("complexgen: unknown op", n)
Fatal("complexgen: unknown op %v", Oconv(int(n.Op), 0))
fallthrough
case ODOT,
ODOTPTR,
......@@ -464,20 +462,20 @@ func Complexgen(n *Node, res *Node) {
// make both sides addable in ullman order
if nr != nil {
if nl.Ullman > nr.Ullman && !(nl.Addable != 0) {
if nl.Ullman > nr.Ullman && nl.Addable == 0 {
Tempname(&tnl, nl.Type)
Thearch.Cgen(nl, &tnl)
nl = &tnl
}
if !(nr.Addable != 0) {
if nr.Addable == 0 {
Tempname(&tnr, nr.Type)
Thearch.Cgen(nr, &tnr)
nr = &tnr
}
}
if !(nl.Addable != 0) {
if nl.Addable == 0 {
Tempname(&tnl, nl.Type)
Thearch.Cgen(nl, &tnl)
nl = &tnl
......
......@@ -10,17 +10,17 @@ import (
"strings"
)
func dflag() int {
if !(Debug['d'] != 0) {
return 0
func dflag() bool {
if Debug['d'] == 0 {
return false
}
if Debug['y'] != 0 {
return 1
return true
}
if incannedimport != 0 {
return 0
return false
}
return 1
return true
}
/*
......@@ -49,7 +49,7 @@ func pushdcl(s *Sym) *Sym {
d = push()
dcopy(d, s)
if dflag() != 0 {
if dflag() {
fmt.Printf("\t%v push %v %p\n", Ctxt.Line(int(lineno)), Sconv(s, 0), s.Def)
}
return d
......@@ -71,7 +71,7 @@ func popdcl() {
lno = int(s.Lastlineno)
dcopy(s, d)
d.Lastlineno = int32(lno)
if dflag() != 0 {
if dflag() {
fmt.Printf("\t%v pop %v %p\n", Ctxt.Line(int(lineno)), Sconv(s, 0), s.Def)
}
}
......@@ -195,7 +195,7 @@ func declare(n *Node, ctxt int) {
s = n.Sym
// kludgy: typecheckok means we're past parsing. Eg genwrapper may declare out of package names later.
if importpkg == nil && !(typecheckok != 0) && s.Pkg != localpkg {
if importpkg == nil && typecheckok == 0 && s.Pkg != localpkg {
Yyerror("cannot declare name %v", Sconv(s, 0))
}
......@@ -206,7 +206,7 @@ func declare(n *Node, ctxt int) {
gen = 0
if ctxt == PEXTERN {
externdcl = list(externdcl, n)
if dflag() != 0 {
if dflag() {
fmt.Printf("\t%v global decl %v %p\n", Ctxt.Line(int(lineno)), Sconv(s, 0), n)
}
} else {
......@@ -264,14 +264,14 @@ func addvar(n *Node, t *Type, ctxt int) {
* new_name_list (type | [type] = expr_list)
*/
func variter(vl *NodeList, t *Node, el *NodeList) *NodeList {
var doexpr int
var doexpr bool
var v *Node
var e *Node
var as2 *Node
var init *NodeList
init = nil
doexpr = bool2int(el != nil)
doexpr = el != nil
if count(el) == 1 && count(vl) > 1 {
e = el.N
......@@ -293,7 +293,7 @@ func variter(vl *NodeList, t *Node, el *NodeList) *NodeList {
}
for ; vl != nil; vl = vl.Next {
if doexpr != 0 {
if doexpr {
if el == nil {
Yyerror("missing expression in var declaration")
break
......@@ -479,17 +479,17 @@ func oldname(s *Sym) *Node {
/*
* := declarations
*/
func colasname(n *Node) int {
func colasname(n *Node) bool {
switch n.Op {
case ONAME,
ONONAME,
OPACK,
OTYPE,
OLITERAL:
return bool2int(n.Sym != nil)
return n.Sym != nil
}
return 0
return false
}
func colasdefn(left *NodeList, defn *Node) {
......@@ -511,7 +511,7 @@ func colasdefn(left *NodeList, defn *Node) {
if isblank(n) {
continue
}
if !(colasname(n) != 0) {
if !colasname(n) {
yyerrorl(int(defn.Lineno), "non-name %v on left side of :=", Nconv(n, 0))
nerr++
continue
......@@ -735,7 +735,7 @@ func funcargs2(t *Type) {
if t.Thistuple != 0 {
for ft = getthisx(t).Type; ft != nil; ft = ft.Down {
if !(ft.Nname != nil) || !(ft.Nname.Sym != nil) {
if ft.Nname == nil || ft.Nname.Sym == nil {
continue
}
n = ft.Nname // no need for newname(ft->nname->sym)
......@@ -746,7 +746,7 @@ func funcargs2(t *Type) {
if t.Intuple != 0 {
for ft = getinargx(t).Type; ft != nil; ft = ft.Down {
if !(ft.Nname != nil) || !(ft.Nname.Sym != nil) {
if ft.Nname == nil || ft.Nname.Sym == nil {
continue
}
n = ft.Nname
......@@ -757,7 +757,7 @@ func funcargs2(t *Type) {
if t.Outtuple != 0 {
for ft = getoutargx(t).Type; ft != nil; ft = ft.Down {
if !(ft.Nname != nil) || !(ft.Nname.Sym != nil) {
if ft.Nname == nil || ft.Nname.Sym == nil {
continue
}
n = ft.Nname
......@@ -925,7 +925,7 @@ func tostruct(l *NodeList) *Type {
tp = &f.Down
}
for f = t.Type; f != nil && !(t.Broke != 0); f = f.Down {
for f = t.Type; f != nil && t.Broke == 0; f = f.Down {
if f.Broke != 0 {
t.Broke = 1
}
......@@ -934,7 +934,7 @@ func tostruct(l *NodeList) *Type {
uniqgen++
checkdupfields(t.Type, "field")
if !(t.Broke != 0) {
if t.Broke == 0 {
checkwidth(t)
}
......@@ -962,7 +962,7 @@ func tofunargs(l *NodeList) *Type {
tp = &f.Down
}
for f = t.Type; f != nil && !(t.Broke != 0); f = f.Down {
for f = t.Type; f != nil && t.Broke == 0; f = f.Down {
if f.Broke != 0 {
t.Broke = 1
}
......@@ -1072,7 +1072,7 @@ func tointerface(l *NodeList) *Type {
}
}
for f = t.Type; f != nil && !(t.Broke != 0); f = f.Down {
for f = t.Type; f != nil && t.Broke == 0; f = f.Down {
if f.Broke != 0 {
t.Broke = 1
}
......@@ -1199,7 +1199,7 @@ func checkarglist(all *NodeList, input int) *NodeList {
}
n = Nod(ODCLFIELD, n, t)
if n.Right != nil && n.Right.Op == ODDD {
if !(input != 0) {
if input == 0 {
Yyerror("cannot use ... in output argument list")
} else if l.Next != nil {
Yyerror("can only use ... as final argument in list")
......@@ -1232,23 +1232,23 @@ func fakethis() *Node {
* *struct{} as the receiver.
* (See fakethis above.)
*/
func isifacemethod(f *Type) int {
func isifacemethod(f *Type) bool {
var rcvr *Type
var t *Type
rcvr = getthisx(f).Type
if rcvr.Sym != nil {
return 0
return false
}
t = rcvr.Type
if !(Isptr[t.Etype] != 0) {
return 0
if Isptr[t.Etype] == 0 {
return false
}
t = t.Type
if t.Sym != nil || t.Etype != TSTRUCT || t.Type != nil {
return 0
return false
}
return 1
return true
}
/*
......@@ -1480,7 +1480,7 @@ func addmethod(sf *Sym, t *Type, local bool, nointerface bool) {
}
}
if local && !(pa.Local != 0) {
if local && pa.Local == 0 {
// defining method on non-local type.
Yyerror("cannot define new methods on non-local type %v", Tconv(pa, 0))
......@@ -1506,7 +1506,7 @@ func addmethod(sf *Sym, t *Type, local bool, nointerface bool) {
}
f = structfield(n)
f.Nointerface = uint8(bool2int(nointerface))
f.Nointerface = nointerface
// during import unexported method names should be in the type's package
if importpkg != nil && f.Sym != nil && !exportname(f.Sym.Name) && f.Sym.Pkg != structpkg {
......
......@@ -69,7 +69,7 @@ func escapes(all *NodeList) {
func visit(n *Node) uint32 {
var min uint32
var recursive uint32
var recursive bool
var l *NodeList
var block *NodeList
......@@ -95,7 +95,7 @@ func visit(n *Node) uint32 {
// If visitcodelist found its way back to n->walkgen, then this
// block is a set of mutually recursive functions.
// Otherwise it's just a lone function that does not recurse.
recursive = uint32(bool2int(min == n.Walkgen))
recursive = min == n.Walkgen
// Remove connected component from stack.
// Mark walkgen so that future visits return a large number
......@@ -110,7 +110,7 @@ func visit(n *Node) uint32 {
l.Next = nil
// Run escape analysis on this set of functions.
analyze(block, int(recursive))
analyze(block, recursive)
}
return min
......@@ -199,7 +199,7 @@ type EscState struct {
dstcount int
edgecount int
noesc *NodeList
recursive int
recursive bool
}
var tags [16]*Strlit
......@@ -247,7 +247,7 @@ func parsetag(note *Strlit) int {
return EscReturn | em<<EscBits
}
func analyze(all *NodeList, recursive int) {
func analyze(all *NodeList, recursive bool) {
var l *NodeList
var es EscState
var e *EscState
......@@ -351,7 +351,7 @@ func escfunc(e *EscState, func_ *Node) {
}
// in a mutually recursive group we lose track of the return values
if e.recursive != 0 {
if e.recursive {
for ll = Curfn.Dcl; ll != nil; ll = ll.Next {
if ll.N.Op == ONAME && ll.N.Class == PPARAMOUT {
escflows(e, &e.theSink, ll.N)
......@@ -387,7 +387,7 @@ func escloopdepth(e *EscState, n *Node) {
switch n.Op {
case OLABEL:
if !(n.Left != nil) || !(n.Left.Sym != nil) {
if n.Left == nil || n.Left.Sym == nil {
Fatal("esc:label without label: %v", Nconv(n, obj.FmtSign))
}
......@@ -398,7 +398,7 @@ func escloopdepth(e *EscState, n *Node) {
n.Left.Sym.Label = &nonlooping
case OGOTO:
if !(n.Left != nil) || !(n.Left.Sym != nil) {
if n.Left == nil || n.Left.Sym == nil {
Fatal("esc:goto without label: %v", Nconv(n, obj.FmtSign))
}
......@@ -509,7 +509,7 @@ func esc(e *EscState, n *Node, up *Node) {
// Everything but fixed array is a dereference.
case ORANGE:
if Isfixedarray(n.Type) != 0 && n.List != nil && n.List.Next != nil {
if Isfixedarray(n.Type) && n.List != nil && n.List.Next != nil {
escassign(e, n.List.Next.N, n.Right)
}
......@@ -639,7 +639,7 @@ func esc(e *EscState, n *Node, up *Node) {
escassign(e, &e.theSink, n.Left)
case OAPPEND:
if !(n.Isddd != 0) {
if n.Isddd == 0 {
for ll = n.List.Next; ll != nil; ll = ll.Next {
escassign(e, &e.theSink, ll.N) // lose track of assign to dereference
}
......@@ -651,7 +651,7 @@ func esc(e *EscState, n *Node, up *Node) {
escassign(e, n, n.Left)
case OARRAYLIT:
if Isslice(n.Type) != 0 {
if Isslice(n.Type) {
n.Esc = EscNone // until proven otherwise
e.noesc = list(e.noesc, n)
n.Escloopdepth = e.loopdepth
......@@ -708,7 +708,7 @@ func esc(e *EscState, n *Node, up *Node) {
continue
}
a = v.Closure
if !(v.Byval != 0) {
if v.Byval == 0 {
a = Nod(OADDR, a, nil)
a.Lineno = v.Lineno
a.Escloopdepth = e.loopdepth
......@@ -805,7 +805,6 @@ func escassign(e *EscState, dst *Node, src *Node) {
default:
Dump("dst", dst)
Fatal("escassign: unexpected dst")
fallthrough
case OARRAYLIT,
OCLOSURE,
......@@ -829,7 +828,7 @@ func escassign(e *EscState, dst *Node, src *Node) {
return
case OINDEX:
if Isfixedarray(dst.Left.Type) != 0 {
if Isfixedarray(dst.Left.Type) {
escassign(e, dst.Left, src)
return
}
......@@ -914,7 +913,7 @@ func escassign(e *EscState, dst *Node, src *Node) {
// Index of array preserves input value.
case OINDEX:
if Isfixedarray(src.Left.Type) != 0 {
if Isfixedarray(src.Left.Type) {
escassign(e, dst, src.Left)
}
......@@ -999,7 +998,6 @@ func esccall(e *EscState, n *Node, up *Node) {
switch n.Op {
default:
Fatal("esccall")
fallthrough
case OCALLFUNC:
fn = n.Left
......@@ -1044,7 +1042,7 @@ func esccall(e *EscState, n *Node, up *Node) {
for lr = fn.Ntype.List; ll != nil && lr != nil; (func() { ll = ll.Next; lr = lr.Next })() {
src = ll.N
if lr.N.Isddd != 0 && !(n.Isddd != 0) {
if lr.N.Isddd != 0 && n.Isddd == 0 {
// Introduce ODDDARG node to represent ... allocation.
src = Nod(ODDDARG, nil, nil)
......@@ -1110,7 +1108,7 @@ func esccall(e *EscState, n *Node, up *Node) {
for t = getinargx(fntype).Type; ll != nil; ll = ll.Next {
src = ll.N
if t.Isddd != 0 && !(n.Isddd != 0) {
if t.Isddd != 0 && n.Isddd == 0 {
// Introduce ODDDARG node to represent ... allocation.
src = Nod(ODDDARG, nil, nil)
......@@ -1243,7 +1241,7 @@ const (
func escwalk(e *EscState, level int, dst *Node, src *Node) {
var ll *NodeList
var leaks int
var leaks bool
var newlevel int
if src.Walkgen == walkgen && src.Esclevel <= int32(level) {
......@@ -1292,11 +1290,11 @@ func escwalk(e *EscState, level int, dst *Node, src *Node) {
// The second clause is for values pointed at by an object passed to a call
// that returns something reached via indirect from the object.
// We don't know which result it is or how many indirects, so we treat it as leaking.
leaks = bool2int(level <= 0 && dst.Escloopdepth < src.Escloopdepth || level < 0 && dst == &e.funcParam && haspointers(src.Type))
leaks = level <= 0 && dst.Escloopdepth < src.Escloopdepth || level < 0 && dst == &e.funcParam && haspointers(src.Type)
switch src.Op {
case ONAME:
if src.Class == PPARAM && (leaks != 0 || dst.Escloopdepth < 0) && src.Esc != EscHeap {
if src.Class == PPARAM && (leaks || dst.Escloopdepth < 0) && src.Esc != EscHeap {
src.Esc = EscScope
if Debug['m'] != 0 {
Warnl(int(src.Lineno), "leaking param: %v", Nconv(src, obj.FmtShort))
......@@ -1306,7 +1304,7 @@ func escwalk(e *EscState, level int, dst *Node, src *Node) {
// Treat a PPARAMREF closure variable as equivalent to the
// original variable.
if src.Class == PPARAMREF {
if leaks != 0 && Debug['m'] != 0 {
if leaks && Debug['m'] != 0 {
Warnl(int(src.Lineno), "leaking closure reference %v", Nconv(src, obj.FmtShort))
}
escwalk(e, level, dst, src.Closure)
......@@ -1314,7 +1312,7 @@ func escwalk(e *EscState, level int, dst *Node, src *Node) {
case OPTRLIT,
OADDR:
if leaks != 0 {
if leaks {
src.Esc = EscHeap
addrescapes(src.Left)
if Debug['m'] != 0 {
......@@ -1329,7 +1327,7 @@ func escwalk(e *EscState, level int, dst *Node, src *Node) {
escwalk(e, newlevel, dst, src.Left)
case OARRAYLIT:
if Isfixedarray(src.Type) != 0 {
if Isfixedarray(src.Type) {
break
}
fallthrough
......@@ -1349,7 +1347,7 @@ func escwalk(e *EscState, level int, dst *Node, src *Node) {
OCLOSURE,
OCALLPART,
ORUNESTR:
if leaks != 0 {
if leaks {
src.Esc = EscHeap
if Debug['m'] != 0 {
Warnl(int(src.Lineno), "%v escapes to heap", Nconv(src, obj.FmtShort))
......@@ -1365,7 +1363,7 @@ func escwalk(e *EscState, level int, dst *Node, src *Node) {
escwalk(e, level, dst, src.Left)
case OINDEX:
if Isfixedarray(src.Left.Type) != 0 {
if Isfixedarray(src.Left.Type) {
escwalk(e, level, dst, src.Left)
break
}
......
......@@ -42,19 +42,19 @@ func exportname(s string) bool {
return unicode.IsUpper(r)
}
func initname(s string) int {
return bool2int(s == "init")
func initname(s string) bool {
return s == "init"
}
// exportedsym reports whether a symbol will be visible
// to files that import our package.
func exportedsym(sym *Sym) int {
func exportedsym(sym *Sym) bool {
// Builtins are visible everywhere.
if sym.Pkg == builtinpkg || sym.Origpkg == builtinpkg {
return 1
return true
}
return bool2int(sym.Pkg == localpkg && exportname(sym.Name))
return sym.Pkg == localpkg && exportname(sym.Name)
}
func autoexport(n *Node, ctxt int) {
......@@ -69,10 +69,10 @@ func autoexport(n *Node, ctxt int) {
}
// -A is for cmd/gc/mkbuiltin script, so export everything
if Debug['A'] != 0 || exportname(n.Sym.Name) || initname(n.Sym.Name) != 0 {
if Debug['A'] != 0 || exportname(n.Sym.Name) || initname(n.Sym.Name) {
exportsym(n)
}
if asmhdr != "" && n.Sym.Pkg == localpkg && !(n.Sym.Flags&SymAsm != 0) {
if asmhdr != "" && n.Sym.Pkg == localpkg && n.Sym.Flags&SymAsm == 0 {
n.Sym.Flags |= SymAsm
asmlist = list(asmlist, n)
}
......@@ -86,7 +86,7 @@ func dumppkg(p *Pkg) {
}
p.Exported = 1
suffix = ""
if !(p.Direct != 0) {
if p.Direct == 0 {
suffix = " // indirect"
}
fmt.Fprintf(bout, "\timport %s \"%v\"%s\n", p.Name, Zconv(p.Path, 0), suffix)
......@@ -102,7 +102,7 @@ func reexportdeplist(ll *NodeList) {
func reexportdep(n *Node) {
var t *Type
if !(n != nil) {
if n == nil {
return
}
......@@ -118,14 +118,14 @@ func reexportdep(n *Node) {
}
// nodes for method calls.
if !(n.Type != nil) || n.Type.Thistuple > 0 {
if n.Type == nil || n.Type.Thistuple > 0 {
break
}
fallthrough
// fallthrough
case PEXTERN:
if n.Sym != nil && !(exportedsym(n.Sym) != 0) {
if n.Sym != nil && !exportedsym(n.Sym) {
if Debug['E'] != 0 {
fmt.Printf("reexport name %v\n", Sconv(n.Sym, 0))
}
......@@ -141,7 +141,7 @@ func reexportdep(n *Node) {
if Isptr[t.Etype] != 0 {
t = t.Type
}
if t != nil && t.Sym != nil && t.Sym.Def != nil && !(exportedsym(t.Sym) != 0) {
if t != nil && t.Sym != nil && t.Sym.Def != nil && !exportedsym(t.Sym) {
if Debug['E'] != 0 {
fmt.Printf("reexport type %v from declaration\n", Sconv(t.Sym, 0))
}
......@@ -155,7 +155,7 @@ func reexportdep(n *Node) {
if Isptr[t.Etype] != 0 {
t = t.Type
}
if t != nil && t.Sym != nil && t.Sym.Def != nil && !(exportedsym(t.Sym) != 0) {
if t != nil && t.Sym != nil && t.Sym.Def != nil && !exportedsym(t.Sym) {
if Debug['E'] != 0 {
fmt.Printf("reexport literal type %v\n", Sconv(t.Sym, 0))
}
......@@ -166,7 +166,7 @@ func reexportdep(n *Node) {
// fallthrough
case OTYPE:
if n.Sym != nil && !(exportedsym(n.Sym) != 0) {
if n.Sym != nil && !exportedsym(n.Sym) {
if Debug['E'] != 0 {
fmt.Printf("reexport literal/type %v\n", Sconv(n.Sym, 0))
}
......@@ -192,10 +192,10 @@ func reexportdep(n *Node) {
OMAKECHAN:
t = n.Type
if !(t.Sym != nil) && t.Type != nil {
if t.Sym == nil && t.Type != nil {
t = t.Type
}
if t != nil && t.Sym != nil && t.Sym.Def != nil && !(exportedsym(t.Sym) != 0) {
if t != nil && t.Sym != nil && t.Sym.Def != nil && !exportedsym(t.Sym) {
if Debug['E'] != 0 {
fmt.Printf("reexport type for expression %v\n", Sconv(t.Sym, 0))
}
......@@ -227,7 +227,7 @@ func dumpexportconst(s *Sym) {
t = n.Type // may or may not be specified
dumpexporttype(t)
if t != nil && !(isideal(t) != 0) {
if t != nil && !isideal(t) {
fmt.Fprintf(bout, "\tconst %v %v = %v\n", Sconv(s, obj.FmtSharp), Tconv(t, obj.FmtSharp), Vconv(&n.Val, obj.FmtSharp))
} else {
fmt.Fprintf(bout, "\tconst %v = %v\n", Sconv(s, obj.FmtSharp), Vconv(&n.Val, obj.FmtSharp))
......@@ -329,7 +329,7 @@ func dumpexporttype(t *Type) {
fmt.Fprintf(bout, "\ttype %v %v\n", Sconv(t.Sym, obj.FmtSharp), Tconv(t, obj.FmtSharp|obj.FmtLong))
for i = 0; i < n; i++ {
f = m[i]
if f.Nointerface != 0 {
if f.Nointerface {
fmt.Fprintf(bout, "\t//go:nointerface\n")
}
if f.Type.Nname != nil && f.Type.Nname.Inl != nil { // nname was set by caninl
......@@ -428,7 +428,7 @@ func importsym(s *Sym, op int) *Sym {
// mark the symbol so it is not reexported
if s.Def == nil {
if exportname(s.Name) || initname(s.Name) != 0 {
if exportname(s.Name) || initname(s.Name) {
s.Flags |= SymExport
} else {
s.Flags |= SymPackage // package scope
......@@ -474,7 +474,7 @@ func importimport(s *Sym, z *Strlit) {
Yyerror("conflicting names %s and %s for package \"%v\"", p.Name, s.Name, Zconv(p.Path, 0))
}
if !(incannedimport != 0) && myimportpath != "" && z.S == myimportpath {
if incannedimport == 0 && myimportpath != "" && z.S == myimportpath {
Yyerror("import \"%v\": package depends on \"%v\" (import cycle)", Zconv(importpkg.Path, 0), Zconv(z, 0))
errorexit()
}
......
......@@ -207,15 +207,15 @@ func Jconv(n *Node, flag int) string {
c = flag & obj.FmtShort
if !(c != 0) && n.Ullman != 0 {
if c == 0 && n.Ullman != 0 {
fp += fmt.Sprintf(" u(%d)", n.Ullman)
}
if !(c != 0) && n.Addable != 0 {
if c == 0 && n.Addable != 0 {
fp += fmt.Sprintf(" a(%d)", n.Addable)
}
if !(c != 0) && n.Vargen != 0 {
if c == 0 && n.Vargen != 0 {
fp += fmt.Sprintf(" g(%d)", n.Vargen)
}
......@@ -223,7 +223,7 @@ func Jconv(n *Node, flag int) string {
fp += fmt.Sprintf(" l(%d)", n.Lineno)
}
if !(c != 0) && n.Xoffset != BADWIDTH {
if c == 0 && n.Xoffset != BADWIDTH {
fp += fmt.Sprintf(" x(%d%+d)", n.Xoffset, n.Stkdelta)
}
......@@ -261,7 +261,7 @@ func Jconv(n *Node, flag int) string {
fp += fmt.Sprintf(" esc(no)")
case EscNever:
if !(c != 0) {
if c == 0 {
fp += fmt.Sprintf(" esc(N)")
}
......@@ -273,11 +273,11 @@ func Jconv(n *Node, flag int) string {
fp += fmt.Sprintf(" ld(%d)", n.Escloopdepth)
}
if !(c != 0) && n.Typecheck != 0 {
if c == 0 && n.Typecheck != 0 {
fp += fmt.Sprintf(" tc(%d)", n.Typecheck)
}
if !(c != 0) && n.Dodata != 0 {
if c == 0 && n.Dodata != 0 {
fp += fmt.Sprintf(" dd(%d)", n.Dodata)
}
......@@ -301,7 +301,7 @@ func Jconv(n *Node, flag int) string {
fp += fmt.Sprintf(" assigned")
}
if !(c != 0) && n.Used != 0 {
if c == 0 && n.Used != 0 {
fp += fmt.Sprintf(" used(%d)", n.Used)
}
return fp
......@@ -497,7 +497,7 @@ func symfmt(s *Sym, flag int) string {
var p string
if s.Pkg != nil && !(flag&obj.FmtShort != 0 /*untyped*/) {
if s.Pkg != nil && flag&obj.FmtShort == 0 /*untyped*/ {
switch fmtmode {
case FErr: // This is for the user
if s.Pkg == localpkg {
......@@ -608,7 +608,7 @@ func typefmt(t *Type, flag int) string {
}
// Unless the 'l' flag was specified, if the type has a name, just print that name.
if !(flag&obj.FmtLong != 0 /*untyped*/) && t.Sym != nil && t.Etype != TFIELD && t != Types[t.Etype] {
if flag&obj.FmtLong == 0 /*untyped*/ && t.Sym != nil && t.Etype != TFIELD && t != Types[t.Etype] {
switch fmtmode {
case FTypeId:
if flag&obj.FmtShort != 0 /*untyped*/ {
......@@ -802,7 +802,7 @@ func typefmt(t *Type, flag int) string {
return fp
case TFIELD:
if !(flag&obj.FmtShort != 0 /*untyped*/) {
if flag&obj.FmtShort == 0 /*untyped*/ {
s = t.Sym
// Take the name from the original, lest we substituted it with ~r%d or ~b%d.
......@@ -822,7 +822,7 @@ func typefmt(t *Type, flag int) string {
}
}
if s != nil && !(t.Embedded != 0) {
if s != nil && t.Embedded == 0 {
if t.Funarg != 0 {
fp += fmt.Sprintf("%v ", Nconv(t.Nname, 0))
} else if flag&obj.FmtLong != 0 /*untyped*/ {
......@@ -850,7 +850,7 @@ func typefmt(t *Type, flag int) string {
fp += fmt.Sprintf("%v", Tconv(t.Type, 0))
}
if !(flag&obj.FmtShort != 0 /*untyped*/) && t.Note != nil {
if flag&obj.FmtShort == 0 /*untyped*/ && t.Note != nil {
fp += fmt.Sprintf(" \"%v\"", Zconv(t.Note, 0))
}
return fp
......@@ -882,23 +882,23 @@ func typefmt(t *Type, flag int) string {
}
// Statements which may be rendered with a simplestmt as init.
func stmtwithinit(op int) int {
func stmtwithinit(op int) bool {
switch op {
case OIF,
OFOR,
OSWITCH:
return 1
return true
}
return 0
return false
}
func stmtfmt(n *Node) string {
var f string
var complexinit int
var simpleinit int
var extrablock int
var complexinit bool
var simpleinit bool
var extrablock bool
// some statements allow for an init, but at most one,
// but we may have an arbitrary number added, eg by typecheck
......@@ -906,19 +906,19 @@ func stmtfmt(n *Node) string {
// block starting with the init statements.
// if we can just say "for" n->ninit; ... then do so
simpleinit = bool2int(n.Ninit != nil && !(n.Ninit.Next != nil) && !(n.Ninit.N.Ninit != nil) && stmtwithinit(int(n.Op)) != 0)
simpleinit = n.Ninit != nil && n.Ninit.Next == nil && n.Ninit.N.Ninit == nil && stmtwithinit(int(n.Op))
// otherwise, print the inits as separate statements
complexinit = bool2int(n.Ninit != nil && !(simpleinit != 0) && (fmtmode != FErr))
complexinit = n.Ninit != nil && !simpleinit && (fmtmode != FErr)
// but if it was for if/for/switch, put in an extra surrounding block to limit the scope
extrablock = bool2int(complexinit != 0 && stmtwithinit(int(n.Op)) != 0)
extrablock = complexinit && stmtwithinit(int(n.Op))
if extrablock != 0 {
if extrablock {
f += "{"
}
if complexinit != 0 {
if complexinit {
f += fmt.Sprintf(" %v; ", Hconv(n.Ninit, 0))
}
......@@ -951,7 +951,7 @@ func stmtfmt(n *Node) string {
break
}
if n.Colas != 0 && !(complexinit != 0) {
if n.Colas != 0 && !complexinit {
f += fmt.Sprintf("%v := %v", Nconv(n.Left, 0), Nconv(n.Right, 0))
} else {
f += fmt.Sprintf("%v = %v", Nconv(n.Left, 0), Nconv(n.Right, 0))
......@@ -970,7 +970,7 @@ func stmtfmt(n *Node) string {
f += fmt.Sprintf("%v %v= %v", Nconv(n.Left, 0), Oconv(int(n.Etype), obj.FmtSharp), Nconv(n.Right, 0))
case OAS2:
if n.Colas != 0 && !(complexinit != 0) {
if n.Colas != 0 && !complexinit {
f += fmt.Sprintf("%v := %v", Hconv(n.List, obj.FmtComma), Hconv(n.Rlist, obj.FmtComma))
break
}
......@@ -996,7 +996,7 @@ func stmtfmt(n *Node) string {
f += fmt.Sprintf("defer %v", Nconv(n.Left, 0))
case OIF:
if simpleinit != 0 {
if simpleinit {
f += fmt.Sprintf("if %v; %v { %v }", Nconv(n.Ninit.N, 0), Nconv(n.Ntest, 0), Hconv(n.Nbody, 0))
} else {
f += fmt.Sprintf("if %v { %v }", Nconv(n.Ntest, 0), Hconv(n.Nbody, 0))
......@@ -1012,7 +1012,7 @@ func stmtfmt(n *Node) string {
}
f += "for"
if simpleinit != 0 {
if simpleinit {
f += fmt.Sprintf(" %v;", Nconv(n.Ninit.N, 0))
} else if n.Nincr != nil {
f += " ;"
......@@ -1024,7 +1024,7 @@ func stmtfmt(n *Node) string {
if n.Nincr != nil {
f += fmt.Sprintf("; %v", Nconv(n.Nincr, 0))
} else if simpleinit != 0 {
} else if simpleinit {
f += ";"
}
......@@ -1051,7 +1051,7 @@ func stmtfmt(n *Node) string {
}
f += fmt.Sprintf("%v", Oconv(int(n.Op), obj.FmtSharp))
if simpleinit != 0 {
if simpleinit {
f += fmt.Sprintf(" %v;", Nconv(n.Ninit.N, 0))
}
if n.Ntest != nil {
......@@ -1087,7 +1087,7 @@ func stmtfmt(n *Node) string {
}
ret:
if extrablock != 0 {
if extrablock {
f += "}"
}
......@@ -1211,7 +1211,7 @@ func exprfmt(n *Node, prec int) string {
var f string
var nprec int
var ptrlit int
var ptrlit bool
var l *NodeList
for n != nil && n.Implicit != 0 && (n.Op == OIND || n.Op == OADDR) {
......@@ -1368,10 +1368,10 @@ func exprfmt(n *Node, prec int) string {
return f
case OCOMPLIT:
ptrlit = bool2int(n.Right != nil && n.Right.Implicit != 0 && n.Right.Type != nil && Isptr[n.Right.Type.Etype] != 0)
ptrlit = n.Right != nil && n.Right.Implicit != 0 && n.Right.Type != nil && Isptr[n.Right.Type.Etype] != 0
if fmtmode == FErr {
if n.Right != nil && n.Right.Type != nil && !(n.Implicit != 0) {
if ptrlit != 0 {
if n.Right != nil && n.Right.Type != nil && n.Implicit == 0 {
if ptrlit {
f += fmt.Sprintf("&%v literal", Tconv(n.Right.Type.Type, 0))
return f
} else {
......@@ -1384,7 +1384,7 @@ func exprfmt(n *Node, prec int) string {
return f
}
if fmtmode == FExp && ptrlit != 0 {
if fmtmode == FExp && ptrlit {
// typecheck has overwritten OIND by OTYPE with pointer type.
f += fmt.Sprintf("(&%v{ %v })", Tconv(n.Right.Type.Type, 0), Hconv(n.List, obj.FmtComma))
return f
......@@ -1418,7 +1418,7 @@ func exprfmt(n *Node, prec int) string {
}
}
if !(n.Implicit != 0) {
if n.Implicit == 0 {
f += "})"
return f
}
......@@ -1454,11 +1454,11 @@ func exprfmt(n *Node, prec int) string {
}
}
if !(n.Left != nil) && n.Right != nil {
if n.Left == nil && n.Right != nil {
f += fmt.Sprintf(":%v", Nconv(n.Right, 0))
return f
}
if n.Left != nil && !(n.Right != nil) {
if n.Left != nil && n.Right == nil {
f += fmt.Sprintf("%v:", Nconv(n.Left, 0))
return f
}
......@@ -1686,15 +1686,15 @@ func indent(s string) string {
func nodedump(n *Node, flag int) string {
var fp string
var recur int
var recur bool
if n == nil {
return fp
}
recur = bool2int(!(flag&obj.FmtShort != 0 /*untyped*/))
recur = flag&obj.FmtShort == 0 /*untyped*/
if recur != 0 {
if recur {
fp = indent(fp)
if dumpdepth > 10 {
fp += "..."
......@@ -1727,7 +1727,7 @@ func nodedump(n *Node, flag int) string {
} else {
fp += fmt.Sprintf("%v%v", Oconv(int(n.Op), 0), Jconv(n, 0))
}
if recur != 0 && n.Type == nil && n.Ntype != nil {
if recur && n.Type == nil && n.Ntype != nil {
fp = indent(fp)
fp += fmt.Sprintf("%v-ntype%v", Oconv(int(n.Op), 0), Nconv(n.Ntype, 0))
}
......@@ -1737,7 +1737,7 @@ func nodedump(n *Node, flag int) string {
case OTYPE:
fp += fmt.Sprintf("%v %v%v type=%v", Oconv(int(n.Op), 0), Sconv(n.Sym, 0), Jconv(n, 0), Tconv(n.Type, 0))
if recur != 0 && n.Type == nil && n.Ntype != nil {
if recur && n.Type == nil && n.Ntype != nil {
fp = indent(fp)
fp += fmt.Sprintf("%v-ntype%v", Oconv(int(n.Op), 0), Nconv(n.Ntype, 0))
}
......@@ -1751,7 +1751,7 @@ func nodedump(n *Node, flag int) string {
fp += fmt.Sprintf(" %v", Tconv(n.Type, 0))
}
if recur != 0 {
if recur {
if n.Left != nil {
fp += fmt.Sprintf("%v", Nconv(n.Left, 0))
}
......
......@@ -109,7 +109,7 @@ func addrescapes(n *Node) {
// is always a heap pointer anyway.
case ODOT,
OINDEX:
if !(Isslice(n.Left.Type) != 0) {
if !Isslice(n.Left.Type) {
addrescapes(n.Left)
}
}
......@@ -253,7 +253,6 @@ func cgen_proc(n *Node, proc int) {
switch n.Left.Op {
default:
Fatal("cgen_proc: unknown call %v", Oconv(int(n.Left.Op), 0))
fallthrough
case OCALLMETH:
Cgen_callmeth(n.Left, proc)
......@@ -280,7 +279,7 @@ func cgen_dcl(n *Node) {
Fatal("cgen_dcl")
}
if !(n.Class&PHEAP != 0) {
if n.Class&PHEAP == 0 {
return
}
if compiling_runtime != 0 {
......@@ -304,7 +303,7 @@ func cgen_discard(nr *Node) {
switch nr.Op {
case ONAME:
if !(nr.Class&PHEAP != 0) && nr.Class != PEXTERN && nr.Class != PFUNC && nr.Class != PPARAMREF {
if nr.Class&PHEAP == 0 && nr.Class != PEXTERN && nr.Class != PFUNC && nr.Class != PPARAMREF {
gused(nr)
}
......@@ -480,7 +479,7 @@ func Cgen_slice(n *Node, res *Node) {
tmpcap = tmplen
}
if isnil(n.Left) != 0 {
if isnil(n.Left) {
Tempname(&src, n.Left.Type)
Thearch.Cgen(n.Left, &src)
} else {
......@@ -491,7 +490,7 @@ func Cgen_slice(n *Node, res *Node) {
}
if n.Op == OSLICEARR || n.Op == OSLICE3ARR {
if !(Isptr[n.Left.Type.Etype] != 0) {
if Isptr[n.Left.Type.Etype] == 0 {
Fatal("slicearr is supposed to work on pointer: %v\n", Nconv(n, obj.FmtSign))
}
Thearch.Cgen(&src, base)
......@@ -668,13 +667,12 @@ func gen(n *Node) {
var p2 *obj.Prog
var p3 *obj.Prog
var lab *Label
var wasregalloc int32
//dump("gen", n);
lno = setlineno(n)
wasregalloc = int32(Thearch.Anyregalloc())
wasregalloc := Thearch.Anyregalloc()
if n == nil {
goto ret
......@@ -879,7 +877,7 @@ func gen(n *Node) {
cgen_dcl(n.Left)
case OAS:
if gen_as_init(n) != 0 {
if gen_as_init(n) {
break
}
Cgen_as(n.Left, n.Right)
......@@ -911,7 +909,7 @@ func gen(n *Node) {
}
ret:
if int32(Thearch.Anyregalloc()) != wasregalloc {
if Thearch.Anyregalloc() != wasregalloc {
Dump("node", n)
Fatal("registers left allocated")
}
......@@ -936,7 +934,7 @@ func Cgen_as(nl *Node, nr *Node) {
return
}
if nr == nil || iszero(nr) != 0 {
if nr == nil || iszero(nr) {
// heaps should already be clear
if nr == nil && (nl.Class&PHEAP != 0) {
return
......@@ -946,7 +944,7 @@ func Cgen_as(nl *Node, nr *Node) {
if tl == nil {
return
}
if Isfat(tl) != 0 {
if Isfat(tl) {
if nl.Op == ONAME {
Gvardef(nl)
}
......@@ -1002,7 +1000,7 @@ func checklabels() {
continue
}
if lab.Use == nil && !(lab.Used != 0) {
if lab.Use == nil && lab.Used == 0 {
yyerrorl(int(lab.Def.Lineno), "label %v defined and not used", Sconv(lab.Sym, 0))
continue
}
......
......@@ -7,7 +7,6 @@ package gc
import (
"bytes"
"cmd/internal/obj"
"encoding/binary"
)
// Copyright 2009 The Go Authors. All rights reserved.
......@@ -162,7 +161,7 @@ type Node struct {
Addable uint8
Trecur uint8
Etype uint8
Bounded uint8
Bounded bool
Class uint8
Method uint8
Embedded uint8
......@@ -191,7 +190,7 @@ type Node struct {
Likely int8
Hasbreak uint8
Needzero uint8
Needctxt uint8
Needctxt bool
Esc uint
Funcdepth int
Type *Type
......@@ -245,7 +244,7 @@ type NodeList struct {
type Type struct {
Etype uint8
Nointerface uint8
Nointerface bool
Noalg uint8
Chan uint8
Trecur uint8
......@@ -918,6 +917,10 @@ var nblank *Node
var Use_sse int
var hunk string
var nhunk int32
var thunk int32
var Funcdepth int
......@@ -1119,7 +1122,6 @@ const (
)
type Arch struct {
ByteOrder binary.ByteOrder
Thechar int
Thestring string
Thelinkarch *obj.LinkArch
......@@ -1127,7 +1129,7 @@ type Arch struct {
REGSP int
REGCTXT int
MAXWIDTH int64
Anyregalloc func() int
Anyregalloc func() bool
Betypeinit func()
Bgen func(*Node, bool, int, *obj.Prog)
Cgen func(*Node, *Node)
......@@ -1148,10 +1150,10 @@ type Arch struct {
Proginfo func(*ProgInfo, *obj.Prog)
Regalloc func(*Node, *Type, *Node)
Regfree func(*Node)
Regtyp func(*obj.Addr) int
Sameaddr func(*obj.Addr, *obj.Addr) int
Smallindir func(*obj.Addr, *obj.Addr) int
Stackaddr func(*obj.Addr) int
Regtyp func(*obj.Addr) bool
Sameaddr func(*obj.Addr, *obj.Addr) bool
Smallindir func(*obj.Addr, *obj.Addr) bool
Stackaddr func(*obj.Addr) bool
Excludedregs func() uint64
RtoB func(int) uint64
FtoB func(int) uint64
......
......@@ -41,7 +41,7 @@ var dpc *obj.Prog
/*
* Is this node a memory operand?
*/
func Ismem(n *Node) int {
func Ismem(n *Node) bool {
switch n.Op {
case OITAB,
OSPTR,
......@@ -51,29 +51,29 @@ func Ismem(n *Node) int {
ONAME,
OPARAM,
OCLOSUREVAR:
return 1
return true
case OADDR:
return bool2int(Thearch.Thechar == '6' || Thearch.Thechar == '9') // because 6g uses PC-relative addressing; TODO(rsc): not sure why 9g too
return Thearch.Thechar == '6' || Thearch.Thechar == '9' // because 6g uses PC-relative addressing; TODO(rsc): not sure why 9g too
}
return 0
return false
}
func Samereg(a *Node, b *Node) int {
func Samereg(a *Node, b *Node) bool {
if a == nil || b == nil {
return 0
return false
}
if a.Op != OREGISTER {
return 0
return false
}
if b.Op != OREGISTER {
return 0
return false
}
if a.Val.U.Reg != b.Val.U.Reg {
return 0
return false
}
return 1
return true
}
/*
......@@ -174,15 +174,15 @@ func fixautoused(p *obj.Prog) {
for lp = &p; ; {
p = *lp
if !(p != nil) {
if p == nil {
break
}
if p.As == obj.ATYPE && p.From.Node != nil && p.From.Name == obj.NAME_AUTO && !(((p.From.Node).(*Node)).Used != 0) {
if p.As == obj.ATYPE && p.From.Node != nil && p.From.Name == obj.NAME_AUTO && ((p.From.Node).(*Node)).Used == 0 {
*lp = p.Link
continue
}
if (p.As == obj.AVARDEF || p.As == obj.AVARKILL) && p.To.Node != nil && !(((p.To.Node).(*Node)).Used != 0) {
if (p.As == obj.AVARDEF || p.As == obj.AVARKILL) && p.To.Node != nil && ((p.To.Node).(*Node)).Used == 0 {
// Cannot remove VARDEF instruction, because - unlike TYPE handled above -
// VARDEFs are interspersed with other code, and a jump might be using the
// VARDEF as a target. Replace with a no-op instead. A later pass will remove
......@@ -256,18 +256,18 @@ func gused(n *Node) {
Thearch.Gins(obj.ANOP, n, nil) // used
}
func Isfat(t *Type) int {
func Isfat(t *Type) bool {
if t != nil {
switch t.Etype {
case TSTRUCT,
TARRAY,
TSTRING,
TINTER: // maybe remove later
return 1
return true
}
}
return 0
return false
}
func markautoused(p *obj.Prog) {
......@@ -289,7 +289,7 @@ func markautoused(p *obj.Prog) {
func Naddr(n *Node, a *obj.Addr, canemitcode int) {
var s *Sym
*a = obj.Zprog.From
*a = obj.Addr{}
if n == nil {
return
}
......@@ -343,7 +343,7 @@ func Naddr(n *Node, a *obj.Addr, canemitcode int) {
a.Node = n.Left.Orig
case OCLOSUREVAR:
if !(Curfn.Needctxt != 0) {
if !Curfn.Needctxt {
Fatal("closurevar without needctxt")
}
a.Type = obj.TYPE_MEM
......@@ -383,7 +383,6 @@ func Naddr(n *Node, a *obj.Addr, canemitcode int) {
switch n.Class {
default:
Fatal("naddr: ONAME class %v %d\n", Sconv(n.Sym, 0), n.Class)
fallthrough
case PEXTERN:
a.Name = obj.NAME_EXTERN
......
......@@ -53,7 +53,7 @@ func renameinit() *Sym {
* return (11)
* }
*/
func anyinit(n *NodeList) int {
func anyinit(n *NodeList) bool {
var h uint32
var s *Sym
var l *NodeList
......@@ -68,20 +68,20 @@ func anyinit(n *NodeList) int {
break
case OAS:
if isblank(l.N.Left) && candiscard(l.N.Right) != 0 {
if isblank(l.N.Left) && candiscard(l.N.Right) {
break
}
fallthrough
// fall through
default:
return 1
return true
}
}
// is this main
if localpkg.Name == "main" {
return 1
return true
}
// is there an explicit init function
......@@ -89,7 +89,7 @@ func anyinit(n *NodeList) int {
s = Lookup(namebuf)
if s.Def != nil {
return 1
return true
}
// are there any imported init functions
......@@ -101,12 +101,12 @@ func anyinit(n *NodeList) int {
if s.Def == nil {
continue
}
return 1
return true
}
}
// then none
return 0
return false
}
func fninit(n *NodeList) {
......@@ -126,7 +126,7 @@ func fninit(n *NodeList) {
}
n = initfix(n)
if !(anyinit(n) != 0) {
if !anyinit(n) {
return
}
......
......@@ -56,7 +56,7 @@ func fnpkg(fn *Node) *Pkg {
if Isptr[rcvr.Etype] != 0 {
rcvr = rcvr.Type
}
if !(rcvr.Sym != nil) {
if rcvr.Sym == nil {
Fatal("receiver with no sym: [%v] %v (%v)", Sconv(fn.Sym, 0), Nconv(fn, obj.FmtLong), Tconv(rcvr, 0))
}
return rcvr.Sym.Pkg
......@@ -114,7 +114,7 @@ func caninl(fn *Node) {
if fn.Op != ODCLFUNC {
Fatal("caninl %v", Nconv(fn, 0))
}
if !(fn.Nname != nil) {
if fn.Nname == nil {
Fatal("caninl no nname %v", Nconv(fn, obj.FmtSign))
}
......@@ -137,7 +137,7 @@ func caninl(fn *Node) {
}
budget = 40 // allowed hairyness
if ishairylist(fn.Nbody, &budget) != 0 {
if ishairylist(fn.Nbody, &budget) {
return
}
......@@ -162,18 +162,18 @@ func caninl(fn *Node) {
}
// Look for anything we want to punt on.
func ishairylist(ll *NodeList, budget *int) int {
func ishairylist(ll *NodeList, budget *int) bool {
for ; ll != nil; ll = ll.Next {
if ishairy(ll.N, budget) != 0 {
return 1
if ishairy(ll.N, budget) {
return true
}
}
return 0
return false
}
func ishairy(n *Node, budget *int) int {
if !(n != nil) {
return 0
func ishairy(n *Node, budget *int) bool {
if n == nil {
return false
}
// Things that are too hairy, irrespective of the budget
......@@ -185,7 +185,7 @@ func ishairy(n *Node, budget *int) int {
OPANIC,
ORECOVER:
if Debug['l'] < 4 {
return 1
return true
}
case OCLOSURE,
......@@ -199,12 +199,12 @@ func ishairy(n *Node, budget *int) int {
ODCLTYPE, // can't print yet
ODCLCONST, // can't print yet
ORETJMP:
return 1
return true
}
(*budget)--
return bool2int(*budget < 0 || ishairy(n.Left, budget) != 0 || ishairy(n.Right, budget) != 0 || ishairylist(n.List, budget) != 0 || ishairylist(n.Rlist, budget) != 0 || ishairylist(n.Ninit, budget) != 0 || ishairy(n.Ntest, budget) != 0 || ishairy(n.Nincr, budget) != 0 || ishairylist(n.Nbody, budget) != 0 || ishairylist(n.Nelse, budget) != 0)
return *budget < 0 || ishairy(n.Left, budget) || ishairy(n.Right, budget) || ishairylist(n.List, budget) || ishairylist(n.Rlist, budget) || ishairylist(n.Ninit, budget) || ishairy(n.Ntest, budget) || ishairy(n.Nincr, budget) || ishairylist(n.Nbody, budget) || ishairylist(n.Nelse, budget)
}
// Inlcopy and inlcopylist recursively copy the body of a function.
......@@ -506,7 +506,7 @@ func mkinlcall(np **Node, fn *Node, isddd int) {
func tinlvar(t *Type) *Node {
if t.Nname != nil && !isblank(t.Nname) {
if !(t.Nname.Inlvar != nil) {
if t.Nname.Inlvar == nil {
Fatal("missing inlvar for %v\n", Nconv(t.Nname, 0))
}
return t.Nname.Inlvar
......@@ -524,7 +524,7 @@ var inlgen int
// parameters.
func mkinlcall1(np **Node, fn *Node, isddd int) {
var i int
var chkargcount int
var chkargcount bool
var n *Node
var call *Node
var saveinlfn *Node
......@@ -535,7 +535,7 @@ func mkinlcall1(np **Node, fn *Node, isddd int) {
var ninit *NodeList
var body *NodeList
var t *Type
var variadic int
var variadic bool
var varargcount int
var multiret int
var vararg *Node
......@@ -623,10 +623,10 @@ func mkinlcall1(np **Node, fn *Node, isddd int) {
// method call with a receiver.
t = getthisx(fn.Type).Type
if t != nil && t.Nname != nil && !isblank(t.Nname) && !(t.Nname.Inlvar != nil) {
if t != nil && t.Nname != nil && !isblank(t.Nname) && t.Nname.Inlvar == nil {
Fatal("missing inlvar for %v\n", Nconv(t.Nname, 0))
}
if !(n.Left.Left != nil) {
if n.Left.Left == nil {
Fatal("method call without receiver: %v", Nconv(n, obj.FmtSign))
}
if t == nil {
......@@ -640,26 +640,26 @@ func mkinlcall1(np **Node, fn *Node, isddd int) {
}
// check if inlined function is variadic.
variadic = 0
variadic = false
varargtype = nil
varargcount = 0
for t = fn.Type.Type.Down.Down.Type; t != nil; t = t.Down {
if t.Isddd != 0 {
variadic = 1
variadic = true
varargtype = t.Type
}
}
// but if argument is dotted too forget about variadicity.
if variadic != 0 && isddd != 0 {
variadic = 0
if variadic && isddd != 0 {
variadic = false
}
// check if argument is actually a returned tuple from call.
multiret = 0
if n.List != nil && !(n.List.Next != nil) {
if n.List != nil && n.List.Next == nil {
switch n.List.N.Op {
case OCALL,
OCALLFUNC,
......@@ -671,7 +671,7 @@ func mkinlcall1(np **Node, fn *Node, isddd int) {
}
}
if variadic != 0 {
if variadic {
varargcount = count(n.List) + multiret
if n.Left.Op != ODOTMETH {
varargcount -= fn.Type.Thistuple
......@@ -688,14 +688,14 @@ func mkinlcall1(np **Node, fn *Node, isddd int) {
// TODO: if len(nlist) == 1 but multiple args, check that n->list->n is a call?
if fn.Type.Thistuple != 0 && n.Left.Op != ODOTMETH {
// non-method call to method
if !(n.List != nil) {
if n.List == nil {
Fatal("non-method call to method without first arg: %v", Nconv(n, obj.FmtSign))
}
// append receiver inlvar to LHS.
t = getthisx(fn.Type).Type
if t != nil && t.Nname != nil && !isblank(t.Nname) && !(t.Nname.Inlvar != nil) {
if t != nil && t.Nname != nil && !isblank(t.Nname) && t.Nname.Inlvar == nil {
Fatal("missing inlvar for %v\n", Nconv(t.Nname, 0))
}
if t == nil {
......@@ -706,14 +706,14 @@ func mkinlcall1(np **Node, fn *Node, isddd int) {
}
// append ordinary arguments to LHS.
chkargcount = bool2int(n.List != nil && n.List.Next != nil)
chkargcount = n.List != nil && n.List.Next != nil
vararg = nil // the slice argument to a variadic call
varargs = nil // the list of LHS names to put in vararg.
if !(chkargcount != 0) {
if !chkargcount {
// 0 or 1 expression on RHS.
for t = getinargx(fn.Type).Type; t != nil; t = t.Down {
if variadic != 0 && t.Isddd != 0 {
if variadic && t.Isddd != 0 {
vararg = tinlvar(t)
for i = 0; i < varargcount && ll != nil; i++ {
m = argvar(varargtype, i)
......@@ -729,10 +729,10 @@ func mkinlcall1(np **Node, fn *Node, isddd int) {
} else {
// match arguments except final variadic (unless the call is dotted itself)
for t = getinargx(fn.Type).Type; t != nil; {
if !(ll != nil) {
if ll == nil {
break
}
if variadic != 0 && t.Isddd != 0 {
if variadic && t.Isddd != 0 {
break
}
as.List = list(as.List, tinlvar(t))
......@@ -741,7 +741,7 @@ func mkinlcall1(np **Node, fn *Node, isddd int) {
}
// match varargcount arguments with variadic parameters.
if variadic != 0 && t != nil && t.Isddd != 0 {
if variadic && t != nil && t.Isddd != 0 {
vararg = tinlvar(t)
for i = 0; i < varargcount && ll != nil; i++ {
m = argvar(varargtype, i)
......@@ -766,9 +766,9 @@ func mkinlcall1(np **Node, fn *Node, isddd int) {
}
// turn the variadic args into a slice.
if variadic != 0 {
if variadic {
as = Nod(OAS, vararg, nil)
if !(varargcount != 0) {
if varargcount == 0 {
as.Right = nodnil()
as.Right.Type = varargtype
} else {
......@@ -1019,7 +1019,7 @@ func setlnolist(ll *NodeList, lno int) {
}
func setlno(n *Node, lno int) {
if !(n != nil) {
if n == nil {
return
}
......
......@@ -504,20 +504,20 @@ func arsize(b *obj.Biobuf, name string) int {
return i
}
func skiptopkgdef(b *obj.Biobuf) int {
func skiptopkgdef(b *obj.Biobuf) bool {
var p string
var sz int
/* archive header */
p = obj.Brdline(b, '\n')
if p == "" {
return 0
return false
}
if obj.Blinelen(b) != 8 {
return 0
return false
}
if p != "!<arch>\n" {
return 0
return false
}
/* symbol table may be first; skip it */
......@@ -533,9 +533,9 @@ func skiptopkgdef(b *obj.Biobuf) int {
sz = arsize(b, "__.PKGDEF")
if sz <= 0 {
return 0
return false
}
return 1
return true
}
func addidir(dir string) {
......@@ -560,7 +560,7 @@ func islocalname(name *Strlit) bool {
strings.HasPrefix(name.S, "../") || name.S == ".."
}
func findpkg(name *Strlit) int {
func findpkg(name *Strlit) bool {
var p *Idir
var q string
var suffix string
......@@ -568,7 +568,7 @@ func findpkg(name *Strlit) int {
if islocalname(name) {
if safemode != 0 || nolocalimports != 0 {
return 0
return false
}
// try .a before .6. important for building libraries:
......@@ -577,13 +577,13 @@ func findpkg(name *Strlit) int {
namebuf = fmt.Sprintf("%v.a", Zconv(name, 0))
if obj.Access(namebuf, 0) >= 0 {
return 1
return true
}
namebuf = fmt.Sprintf("%v.%c", Zconv(name, 0), Thearch.Thechar)
if obj.Access(namebuf, 0) >= 0 {
return 1
return true
}
return 0
return false
}
// local imports should be canonicalized already.
......@@ -592,17 +592,17 @@ func findpkg(name *Strlit) int {
_ = q
if path.Clean(name.S) != name.S {
Yyerror("non-canonical import path %v (should be %s)", Zconv(name, 0), q)
return 0
return false
}
for p = idirs; p != nil; p = p.link {
namebuf = fmt.Sprintf("%s/%v.a", p.dir, Zconv(name, 0))
if obj.Access(namebuf, 0) >= 0 {
return 1
return true
}
namebuf = fmt.Sprintf("%s/%v.%c", p.dir, Zconv(name, 0), Thearch.Thechar)
if obj.Access(namebuf, 0) >= 0 {
return 1
return true
}
}
......@@ -619,15 +619,15 @@ func findpkg(name *Strlit) int {
namebuf = fmt.Sprintf("%s/pkg/%s_%s%s%s/%v.a", goroot, goos, goarch, suffixsep, suffix, Zconv(name, 0))
if obj.Access(namebuf, 0) >= 0 {
return 1
return true
}
namebuf = fmt.Sprintf("%s/pkg/%s_%s%s%s/%v.%c", goroot, goos, goarch, suffixsep, suffix, Zconv(name, 0), Thearch.Thechar)
if obj.Access(namebuf, 0) >= 0 {
return 1
return true
}
}
return 0
return false
}
func fakeimport() {
......@@ -714,7 +714,7 @@ func importfile(f *Val, line int) {
}
}
if !(findpkg(path_) != 0) {
if !findpkg(path_) {
Yyerror("can't find import: \"%v\"", Zconv(f.U.Sval, 0))
errorexit()
}
......@@ -748,7 +748,7 @@ func importfile(f *Val, line int) {
n = len(namebuf)
if n > 2 && namebuf[n-2] == '.' && namebuf[n-1] == 'a' {
if !(skiptopkgdef(imp) != 0) {
if !skiptopkgdef(imp) {
Yyerror("import %s: not a package file", file)
errorexit()
}
......@@ -946,7 +946,7 @@ l0:
for {
if escchar('"', &escflag, &v) != 0 {
if escchar('"', &escflag, &v) {
break
}
if v < utf8.RuneSelf || escflag != 0 {
......@@ -988,12 +988,12 @@ l0:
/* '.' */
case '\'':
if escchar('\'', &escflag, &v) != 0 {
if escchar('\'', &escflag, &v) {
Yyerror("empty character literal or unescaped ' in character literal")
v = '\''
}
if !(escchar('\'', &escflag, &v) != 0) {
if !escchar('\'', &escflag, &v) {
Yyerror("missing '")
ungetc(int(v))
}
......@@ -1629,7 +1629,7 @@ go_:
}
if verb == "go:linkname" {
if !(imported_unsafe != 0) {
if imported_unsafe == 0 {
Yyerror("//go:linkname only allowed in Go files that import \"unsafe\"")
}
f := strings.Fields(cmd)
......@@ -1658,7 +1658,7 @@ go_:
}
if verb == "go:nowritebarrier" {
if !(compiling_runtime != 0) {
if compiling_runtime == 0 {
Yyerror("//go:nowritebarrier only allowed in runtime")
}
nowritebarrier = true
......@@ -1961,7 +1961,7 @@ func getr() int32 {
}
}
func escchar(e int, escflg *int, val *int64) int {
func escchar(e int, escflg *int, val *int64) bool {
var i int
var u int
var c int
......@@ -1973,21 +1973,21 @@ func escchar(e int, escflg *int, val *int64) int {
switch c {
case EOF:
Yyerror("eof in string")
return 1
return true
case '\n':
Yyerror("newline in string")
return 1
return true
case '\\':
break
default:
if c == e {
return 1
return true
}
*val = int64(c)
return 0
return false
}
u = 0
......@@ -2043,7 +2043,7 @@ func escchar(e int, escflg *int, val *int64) int {
}
*val = int64(c)
return 0
return false
hex:
l = 0
......@@ -2075,7 +2075,7 @@ hex:
}
*val = l
return 0
return false
oct:
l = int64(c) - '0'
......@@ -2095,7 +2095,7 @@ oct:
}
*val = l
return 0
return false
}
var syms = []struct {
......@@ -2530,12 +2530,12 @@ func lexinit() {
idealbool = typ(TBOOL)
s = Pkglookup("true", builtinpkg)
s.Def = Nodbool(1)
s.Def = Nodbool(true)
s.Def.Sym = Lookup("true")
s.Def.Type = idealbool
s = Pkglookup("false", builtinpkg)
s.Def = Nodbool(0)
s.Def = Nodbool(false)
s.Def.Sym = Lookup("false")
s.Def.Type = idealbool
......@@ -2704,14 +2704,14 @@ func lexfini() {
s = Lookup("true")
if s.Def == nil {
s.Def = Nodbool(1)
s.Def = Nodbool(true)
s.Def.Sym = s
s.Origpkg = builtinpkg
}
s = Lookup("false")
if s.Def == nil {
s.Def = Nodbool(0)
s.Def = Nodbool(false)
s.Def.Sym = s
s.Origpkg = builtinpkg
}
......@@ -3163,7 +3163,7 @@ func mkpackage(pkgname string) {
// leave s->block set to cause redeclaration
// errors if a conflicting top-level name is
// introduced by a different file.
if !(s.Def.Used != 0) && !(nsyntaxerrors != 0) {
if s.Def.Used == 0 && nsyntaxerrors == 0 {
pkgnotused(int(s.Def.Lineno), s.Def.Pkg.Path, s.Name)
}
s.Def = nil
......@@ -3173,7 +3173,7 @@ func mkpackage(pkgname string) {
if s.Def.Sym != s {
// throw away top-level name left over
// from previous import . "x"
if s.Def.Pack != nil && !(s.Def.Pack.Used != 0) && !(nsyntaxerrors != 0) {
if s.Def.Pack != nil && s.Def.Pack.Used == 0 && nsyntaxerrors == 0 {
pkgnotused(int(s.Def.Pack.Lineno), s.Def.Pack.Pkg.Path, "")
s.Def.Pack.Used = 1
}
......
......@@ -44,7 +44,7 @@ func mplsh(a *Mpint, quiet int) {
}
a.Ovf = uint8(c)
if a.Ovf != 0 && !(quiet != 0) {
if a.Ovf != 0 && quiet == 0 {
Yyerror("constant shift overflow")
}
}
......@@ -59,7 +59,7 @@ func mplshw(a *Mpint, quiet int) {
i = Mpprec - 1
if a.A[i] != 0 {
a.Ovf = 1
if !(quiet != 0) {
if quiet == 0 {
Yyerror("constant shift overflow")
}
}
......@@ -223,7 +223,7 @@ func mpaddfixfix(a *Mpint, b *Mpint, quiet int) {
}
a.Ovf = uint8(c)
if a.Ovf != 0 && !(quiet != 0) {
if a.Ovf != 0 && quiet == 0 {
Yyerror("constant addition overflow")
}
......@@ -663,15 +663,15 @@ func mpdivmodfixfix(q *Mpint, r *Mpint, n *Mpint, d *Mpint) {
q.Neg = uint8(ns ^ ds)
}
func mpiszero(a *Mpint) int {
func mpiszero(a *Mpint) bool {
var i int
for i = Mpprec - 1; i >= 0; i-- {
if a.A[i] != 0 {
return 0
return false
}
}
return 1
return true
}
func mpdivfract(a *Mpint, b *Mpint) {
......@@ -694,7 +694,7 @@ func mpdivfract(a *Mpint, b *Mpint) {
for j = 0; j < Mpscale; j++ {
x <<= 1
if mpcmp(&d, &n) <= 0 {
if !(mpiszero(&d) != 0) {
if !mpiszero(&d) {
x |= 1
}
mpsubfixfix(&n, &d)
......
This diff is collapsed.
......@@ -272,7 +272,7 @@ func allocauto(ptxt *obj.Prog) {
ll = Curfn.Dcl
n = ll.N
if n.Class == PAUTO && n.Op == ONAME && !(n.Used != 0) {
if n.Class == PAUTO && n.Op == ONAME && n.Used == 0 {
// No locals used at all
Curfn.Dcl = nil
......@@ -282,7 +282,7 @@ func allocauto(ptxt *obj.Prog) {
for ll = Curfn.Dcl; ll.Next != nil; ll = ll.Next {
n = ll.Next.N
if n.Class == PAUTO && n.Op == ONAME && !(n.Used != 0) {
if n.Class == PAUTO && n.Op == ONAME && n.Used == 0 {
ll.Next = nil
Curfn.Dcl.End = ll
break
......@@ -360,12 +360,12 @@ func Cgen_checknil(n *Node) {
}
// Ideally we wouldn't see any integer types here, but we do.
if n.Type == nil || (!(Isptr[n.Type.Etype] != 0) && !(Isint[n.Type.Etype] != 0) && n.Type.Etype != TUNSAFEPTR) {
if n.Type == nil || (Isptr[n.Type.Etype] == 0 && Isint[n.Type.Etype] == 0 && n.Type.Etype != TUNSAFEPTR) {
Dump("checknil", n)
Fatal("bad checknil")
}
if ((Thearch.Thechar == '5' || Thearch.Thechar == '9') && n.Op != OREGISTER) || !(n.Addable != 0) || n.Op == OLITERAL {
if ((Thearch.Thechar == '5' || Thearch.Thechar == '9') && n.Op != OREGISTER) || n.Addable == 0 || n.Op == OLITERAL {
Thearch.Regalloc(&reg, Types[Tptr], n)
Thearch.Cgen(n, &reg)
Thearch.Gins(obj.ACHECKNIL, &reg, nil)
......@@ -478,7 +478,7 @@ func compile(fn *Node) {
if fn.Wrapper != 0 {
ptxt.From3.Offset |= obj.WRAPPER
}
if fn.Needctxt != 0 {
if fn.Needctxt {
ptxt.From3.Offset |= obj.NEEDCTXT
}
if fn.Nosplit {
......@@ -557,7 +557,7 @@ func compile(fn *Node) {
Pc.Lineno = lineno
fixjmp(ptxt)
if !(Debug['N'] != 0) || Debug['R'] != 0 || Debug['P'] != 0 {
if Debug['N'] == 0 || Debug['R'] != 0 || Debug['P'] != 0 {
regopt(ptxt)
nilopt(ptxt)
}
......
......@@ -183,18 +183,13 @@ func printblock(bb *BasicBlock) {
// are two criteria for termination. If the end of basic block is reached a
// value of zero is returned. If the callback returns a non-zero value, the
// iteration is stopped and the value of the callback is returned.
func blockany(bb *BasicBlock, callback func(*obj.Prog) int) int {
var p *obj.Prog
var result int
for p = bb.last; p != nil; p = p.Opt.(*obj.Prog) {
result = callback(p)
if result != 0 {
return result
func blockany(bb *BasicBlock, f func(*obj.Prog) bool) bool {
for p := bb.last; p != nil; p = p.Opt.(*obj.Prog) {
if f(p) {
return true
}
}
return 0
return false
}
// Collects and returns and array of Node*s for functions arguments and local
......@@ -303,7 +298,7 @@ func iscall(prog *obj.Prog, name *obj.LSym) bool {
var isselectcommcasecall_names [5]*obj.LSym
func isselectcommcasecall(prog *obj.Prog) int {
func isselectcommcasecall(prog *obj.Prog) bool {
var i int32
if isselectcommcasecall_names[0] == nil {
......@@ -315,41 +310,41 @@ func isselectcommcasecall(prog *obj.Prog) int {
for i = 0; isselectcommcasecall_names[i] != nil; i++ {
if iscall(prog, isselectcommcasecall_names[i]) {
return 1
return true
}
}
return 0
return false
}
// Returns true for call instructions that target runtime·newselect.
var isnewselect_sym *obj.LSym
func isnewselect(prog *obj.Prog) int {
func isnewselect(prog *obj.Prog) bool {
if isnewselect_sym == nil {
isnewselect_sym = Linksym(Pkglookup("newselect", Runtimepkg))
}
return bool2int(iscall(prog, isnewselect_sym))
return iscall(prog, isnewselect_sym)
}
// Returns true for call instructions that target runtime·selectgo.
var isselectgocall_sym *obj.LSym
func isselectgocall(prog *obj.Prog) int {
func isselectgocall(prog *obj.Prog) bool {
if isselectgocall_sym == nil {
isselectgocall_sym = Linksym(Pkglookup("selectgo", Runtimepkg))
}
return bool2int(iscall(prog, isselectgocall_sym))
return iscall(prog, isselectgocall_sym)
}
var isdeferreturn_sym *obj.LSym
func isdeferreturn(prog *obj.Prog) int {
func isdeferreturn(prog *obj.Prog) bool {
if isdeferreturn_sym == nil {
isdeferreturn_sym = Linksym(Pkglookup("deferreturn", Runtimepkg))
}
return bool2int(iscall(prog, isdeferreturn_sym))
return iscall(prog, isdeferreturn_sym)
}
// Walk backwards from a runtime·selectgo call up to its immediately dominating
......@@ -366,7 +361,7 @@ func addselectgosucc(selectgo *BasicBlock) {
Fatal("selectgo does not have a newselect")
}
pred = pred.pred[0]
if blockany(pred, isselectcommcasecall) != 0 {
if blockany(pred, isselectcommcasecall) {
// A select comm case block should have exactly one
// successor.
if len(pred.succ) != 1 {
......@@ -386,7 +381,7 @@ func addselectgosucc(selectgo *BasicBlock) {
addedge(selectgo, succ)
}
if blockany(pred, isnewselect) != 0 {
if blockany(pred, isnewselect) {
// Reached the matching newselect.
break
}
......@@ -451,7 +446,7 @@ func newcfg(firstp *obj.Prog) []*BasicBlock {
p.Link.Opt = newblock(p.Link)
cfg = append(cfg, p.Link.Opt.(*BasicBlock))
}
} else if isselectcommcasecall(p) != 0 || isselectgocall(p) != 0 {
} else if isselectcommcasecall(p) || isselectgocall(p) {
// Accommodate implicit selectgo control flow.
if p.Link.Opt == nil {
p.Link.Opt = newblock(p.Link)
......@@ -478,7 +473,7 @@ func newcfg(firstp *obj.Prog) []*BasicBlock {
}
// Collect basic blocks with selectgo calls.
if isselectgocall(p) != 0 {
if isselectgocall(p) {
selectgo = append(selectgo, bb)
}
}
......@@ -627,7 +622,7 @@ func progeffects(prog *obj.Prog, vars []*Node, uevar *Bvec, varkill *Bvec, avari
// non-tail-call return instructions; see note above
// the for loop for details.
case PPARAMOUT:
if !(node.Addrtaken != 0) && prog.To.Type == obj.TYPE_NONE {
if node.Addrtaken == 0 && prog.To.Type == obj.TYPE_NONE {
bvset(uevar, i)
}
}
......@@ -674,7 +669,7 @@ func progeffects(prog *obj.Prog, vars []*Node, uevar *Bvec, varkill *Bvec, avari
bvset(uevar, pos)
}
if info.Flags&LeftWrite != 0 {
if from.Node != nil && !(Isfat(((from.Node).(*Node)).Type) != 0) {
if from.Node != nil && !Isfat(((from.Node).(*Node)).Type) {
bvset(varkill, pos)
}
}
......@@ -718,7 +713,7 @@ Next:
bvset(uevar, pos)
}
if info.Flags&RightWrite != 0 {
if to.Node != nil && (!(Isfat(((to.Node).(*Node)).Type) != 0) || prog.As == obj.AVARDEF) {
if to.Node != nil && (!Isfat(((to.Node).(*Node)).Type) || prog.As == obj.AVARDEF) {
bvset(varkill, pos)
}
}
......@@ -1050,7 +1045,7 @@ func twobitwalktype1(t *Type, xoffset *int64, bv *Bvec) {
if t.Bound < -1 {
Fatal("twobitwalktype1: invalid bound, %v", Tconv(t, 0))
}
if Isslice(t) != 0 {
if Isslice(t) {
// struct { byte *array; uintgo len; uintgo cap; }
if *xoffset&int64(Widthptr-1) != 0 {
Fatal("twobitwalktype1: invalid TARRAY alignment, %v", Tconv(t, 0))
......@@ -1101,7 +1096,7 @@ func twobitlivepointermap(lv *Liveness, liveout *Bvec, vars []*Node, args *Bvec,
for i = 0; ; i++ {
i = int32(bvnext(liveout, i))
if !(i >= 0) {
if i < 0 {
break
}
node = vars[i]
......@@ -1163,8 +1158,8 @@ func newpcdataprog(prog *obj.Prog, index int32) *obj.Prog {
// Returns true for instructions that are safe points that must be annotated
// with liveness information.
func issafepoint(prog *obj.Prog) int {
return bool2int(prog.As == obj.ATEXT || prog.As == obj.ACALL)
func issafepoint(prog *obj.Prog) bool {
return prog.As == obj.ATEXT || prog.As == obj.ACALL
}
// Initializes the sets for solving the live variables. Visits all the
......@@ -1332,7 +1327,7 @@ func livenesssolve(lv *Liveness) {
// This function is slow but it is only used for generating debug prints.
// Check whether n is marked live in args/locals.
func islive(n *Node, args *Bvec, locals *Bvec) int {
func islive(n *Node, args *Bvec, locals *Bvec) bool {
var i int
switch n.Class {
......@@ -1340,19 +1335,19 @@ func islive(n *Node, args *Bvec, locals *Bvec) int {
PPARAMOUT:
for i = 0; int64(i) < n.Type.Width/int64(Widthptr)*obj.BitsPerPointer; i++ {
if bvget(args, int32(n.Xoffset/int64(Widthptr)*obj.BitsPerPointer+int64(i))) != 0 {
return 1
return true
}
}
case PAUTO:
for i = 0; int64(i) < n.Type.Width/int64(Widthptr)*obj.BitsPerPointer; i++ {
if bvget(locals, int32((n.Xoffset+stkptrsize)/int64(Widthptr)*obj.BitsPerPointer+int64(i))) != 0 {
return 1
return true
}
}
}
return 0
return false
}
// Visits all instructions in a basic block and computes a bit vector of live
......@@ -1427,21 +1422,21 @@ func livenessepilogue(lv *Liveness) {
bvor(any, any, avarinit)
bvor(all, all, avarinit)
if issafepoint(p) != 0 {
if issafepoint(p) {
// Annotate ambiguously live variables so that they can
// be zeroed at function entry.
// livein and liveout are dead here and used as temporaries.
bvresetall(livein)
bvandnot(liveout, any, all)
if !(bvisempty(liveout) != 0) {
if !bvisempty(liveout) {
for pos = 0; pos < liveout.n; pos++ {
if !(bvget(liveout, pos) != 0) {
if bvget(liveout, pos) == 0 {
continue
}
bvset(all, pos) // silence future warnings in this block
n = lv.vars[pos]
if !(n.Needzero != 0) {
if n.Needzero == 0 {
n.Needzero = 1
if debuglive >= 1 {
Warnl(int(p.Lineno), "%v: %v is ambiguously live", Nconv(Curfn.Nname, 0), Nconv(n, obj.FmtLong))
......@@ -1517,7 +1512,7 @@ func livenessepilogue(lv *Liveness) {
bvcopy(liveout, livein)
bvandnot(livein, liveout, varkill)
bvor(livein, livein, uevar)
if debuglive >= 3 && issafepoint(p) != 0 {
if debuglive >= 3 && issafepoint(p) {
fmt.Printf("%v\n", p)
printvars("uevar", uevar, lv.vars)
printvars("varkill", varkill, lv.vars)
......@@ -1525,7 +1520,7 @@ func livenessepilogue(lv *Liveness) {
printvars("liveout", liveout, lv.vars)
}
if issafepoint(p) != 0 {
if issafepoint(p) {
// Found an interesting instruction, record the
// corresponding liveness information.
......@@ -1534,7 +1529,7 @@ func livenessepilogue(lv *Liveness) {
// input parameters.
if p.As == obj.ATEXT {
for j = 0; j < liveout.n; j++ {
if !(bvget(liveout, j) != 0) {
if bvget(liveout, j) == 0 {
continue
}
n = lv.vars[j]
......@@ -1574,7 +1569,7 @@ func livenessepilogue(lv *Liveness) {
numlive = 0
for j = 0; j < int32(len(lv.vars)); j++ {
n = lv.vars[j]
if islive(n, args, locals) != 0 {
if islive(n, args, locals) {
fmt_ += fmt.Sprintf(" %v", Nconv(n, 0))
numlive++
}
......@@ -1592,7 +1587,7 @@ func livenessepilogue(lv *Liveness) {
// Only CALL instructions need a PCDATA annotation.
// The TEXT instruction annotation is implicit.
if p.As == obj.ACALL {
if isdeferreturn(p) != 0 {
if isdeferreturn(p) {
// runtime.deferreturn modifies its return address to return
// back to the CALL, not to the subsequent instruction.
// Because the return comes back one instruction early,
......@@ -1760,11 +1755,11 @@ func printbitset(printed int, name string, vars []*Node, bits *Bvec) int {
started = 0
for i = 0; i < len(vars); i++ {
if !(bvget(bits, int32(i)) != 0) {
if bvget(bits, int32(i)) == 0 {
continue
}
if !(started != 0) {
if !(printed != 0) {
if started == 0 {
if printed == 0 {
fmt.Printf("\t")
} else {
fmt.Printf(" ")
......@@ -1856,14 +1851,14 @@ func livenessprintdebug(lv *Liveness) {
if printed != 0 {
fmt.Printf("\n")
}
if issafepoint(p) != 0 {
if issafepoint(p) {
args = lv.argslivepointers[pcdata]
locals = lv.livepointers[pcdata]
fmt.Printf("\tlive=")
printed = 0
for j = 0; j < len(lv.vars); j++ {
n = lv.vars[j]
if islive(n, args, locals) != 0 {
if islive(n, args, locals) {
tmp9 := printed
printed++
if tmp9 != 0 {
......
......@@ -179,7 +179,7 @@ void proginfo(ProgInfo*, Prog*);
var noreturn_symlist [10]*Sym
func Noreturn(p *obj.Prog) int {
func Noreturn(p *obj.Prog) bool {
var s *Sym
var i int
......@@ -195,18 +195,18 @@ func Noreturn(p *obj.Prog) int {
}
if p.To.Node == nil {
return 0
return false
}
s = ((p.To.Node).(*Node)).Sym
if s == nil {
return 0
return false
}
for i = 0; noreturn_symlist[i] != nil; i++ {
if s == noreturn_symlist[i] {
return 1
return true
}
}
return 0
return false
}
// JMP chasing and removal.
......@@ -325,7 +325,7 @@ func fixjmp(firstp *obj.Prog) {
// pass 4: elide JMP to next instruction.
// only safe if there are no jumps to JMPs anymore.
if !(jmploop != 0) {
if jmploop == 0 {
last = nil
for p = firstp; p != nil; p = p.Link {
if p.As == obj.AJMP && p.To.Type == obj.TYPE_BRANCH && p.To.U.Branch == p.Link {
......@@ -434,7 +434,7 @@ func Flowstart(firstp *obj.Prog, newData func() interface{}) *Graph {
for f = start; f != nil; f = f.Link {
p = f.Prog
Thearch.Proginfo(&info, p)
if !(info.Flags&Break != 0) {
if info.Flags&Break == 0 {
f1 = f.Link
f.S1 = f1
f1.P1 = f
......@@ -492,11 +492,11 @@ func postorder(r *Flow, rpo2r []*Flow, n int32) int32 {
r.Rpo = 1
r1 = r.S1
if r1 != nil && !(r1.Rpo != 0) {
if r1 != nil && r1.Rpo == 0 {
n = postorder(r1, rpo2r, n)
}
r1 = r.S2
if r1 != nil && !(r1.Rpo != 0) {
if r1 != nil && r1.Rpo == 0 {
n = postorder(r1, rpo2r, n)
}
rpo2r[n] = r
......@@ -529,26 +529,26 @@ func rpolca(idom []int32, rpo1 int32, rpo2 int32) int32 {
return rpo1
}
func doms(idom []int32, r int32, s int32) int {
func doms(idom []int32, r int32, s int32) bool {
for s > r {
s = idom[s]
}
return bool2int(s == r)
return s == r
}
func loophead(idom []int32, r *Flow) int {
func loophead(idom []int32, r *Flow) bool {
var src int32
src = r.Rpo
if r.P1 != nil && doms(idom, src, r.P1.Rpo) != 0 {
return 1
if r.P1 != nil && doms(idom, src, r.P1.Rpo) {
return true
}
for r = r.P2; r != nil; r = r.P2link {
if doms(idom, src, r.Rpo) != 0 {
return 1
if doms(idom, src, r.Rpo) {
return true
}
}
return 0
return false
}
func loopmark(rpo2r **Flow, head int32, r *Flow) {
......@@ -620,7 +620,7 @@ func flowrpo(g *Graph) {
for i = 0; i < nr; i++ {
r1 = rpo2r[i]
r1.Loop++
if r1.P2 != nil && loophead(idom, r1) != 0 {
if r1.P2 != nil && loophead(idom, r1) {
loopmark(&rpo2r[0], i, r1)
}
}
......@@ -718,8 +718,8 @@ func (x startcmp) Less(i, j int) bool {
}
// Is n available for merging?
func canmerge(n *Node) int {
return bool2int(n.Class == PAUTO && strings.HasPrefix(n.Sym.Name, "autotmp"))
func canmerge(n *Node) bool {
return n.Class == PAUTO && strings.HasPrefix(n.Sym.Name, "autotmp")
}
func mergetemp(firstp *obj.Prog) {
......@@ -757,7 +757,7 @@ func mergetemp(firstp *obj.Prog) {
// Build list of all mergeable variables.
nvar = 0
for l = Curfn.Dcl; l != nil; l = l.Next {
if canmerge(l.N) != 0 {
if canmerge(l.N) {
nvar++
}
}
......@@ -766,7 +766,7 @@ func mergetemp(firstp *obj.Prog) {
nvar = 0
for l = Curfn.Dcl; l != nil; l = l.Next {
n = l.N
if canmerge(n) != 0 {
if canmerge(n) {
v = &var_[nvar]
nvar++
n.Opt = v
......@@ -826,9 +826,9 @@ func mergetemp(firstp *obj.Prog) {
if f != nil && f.Data.(*Flow) == nil {
p = f.Prog
Thearch.Proginfo(&info, p)
if p.To.Node == v.node && (info.Flags&RightWrite != 0) && !(info.Flags&RightRead != 0) {
if p.To.Node == v.node && (info.Flags&RightWrite != 0) && info.Flags&RightRead == 0 {
p.As = obj.ANOP
p.To = obj.Zprog.To
p.To = obj.Addr{}
v.removed = 1
if debugmerge > 0 && Debug['v'] != 0 {
fmt.Printf("drop write-only %v\n", Sconv(v.node.Sym, 0))
......@@ -851,7 +851,7 @@ func mergetemp(firstp *obj.Prog) {
const (
SizeAny = SizeB | SizeW | SizeL | SizeQ | SizeF | SizeD
)
if p.From.Node == v.node && p1.To.Node == v.node && (info.Flags&Move != 0) && !((info.Flags|info1.Flags)&(LeftAddr|RightAddr) != 0) && info.Flags&SizeAny == info1.Flags&SizeAny {
if p.From.Node == v.node && p1.To.Node == v.node && (info.Flags&Move != 0) && (info.Flags|info1.Flags)&(LeftAddr|RightAddr) == 0 && info.Flags&SizeAny == info1.Flags&SizeAny {
p1.From = p.From
Thearch.Excise(f)
v.removed = 1
......@@ -1010,7 +1010,7 @@ func mergetemp(firstp *obj.Prog) {
// Delete merged nodes from declaration list.
for lp = &Curfn.Dcl; ; {
l = *lp
if !(l != nil) {
if l == nil {
break
}
......@@ -1126,11 +1126,11 @@ func nilopt(firstp *obj.Prog) {
nkill = 0
for f = g.Start; f != nil; f = f.Link {
p = f.Prog
if p.As != obj.ACHECKNIL || !(Thearch.Regtyp(&p.From) != 0) {
if p.As != obj.ACHECKNIL || !Thearch.Regtyp(&p.From) {
continue
}
ncheck++
if Thearch.Stackaddr(&p.From) != 0 {
if Thearch.Stackaddr(&p.From) {
if Debug_checknil != 0 && p.Lineno > 1 {
Warnl(int(p.Lineno), "removed nil check of SP address")
}
......@@ -1177,13 +1177,13 @@ func nilwalkback(fcheck *Flow) {
for f = fcheck; f != nil; f = Uniqp(f) {
p = f.Prog
Thearch.Proginfo(&info, p)
if (info.Flags&RightWrite != 0) && Thearch.Sameaddr(&p.To, &fcheck.Prog.From) != 0 {
if (info.Flags&RightWrite != 0) && Thearch.Sameaddr(&p.To, &fcheck.Prog.From) {
// Found initialization of value we're checking for nil.
// without first finding the check, so this one is unchecked.
return
}
if f != fcheck && p.As == obj.ACHECKNIL && Thearch.Sameaddr(&p.From, &fcheck.Prog.From) != 0 {
if f != fcheck && p.As == obj.ACHECKNIL && Thearch.Sameaddr(&p.From, &fcheck.Prog.From) {
fcheck.Data = &killed
return
}
......@@ -1249,12 +1249,12 @@ func nilwalkfwd(fcheck *Flow) {
p = f.Prog
Thearch.Proginfo(&info, p)
if (info.Flags&LeftRead != 0) && Thearch.Smallindir(&p.From, &fcheck.Prog.From) != 0 {
if (info.Flags&LeftRead != 0) && Thearch.Smallindir(&p.From, &fcheck.Prog.From) {
fcheck.Data = &killed
return
}
if (info.Flags&(RightRead|RightWrite) != 0) && Thearch.Smallindir(&p.To, &fcheck.Prog.From) != 0 {
if (info.Flags&(RightRead|RightWrite) != 0) && Thearch.Smallindir(&p.To, &fcheck.Prog.From) {
fcheck.Data = &killed
return
}
......@@ -1265,12 +1265,12 @@ func nilwalkfwd(fcheck *Flow) {
}
// Stop if value is lost.
if (info.Flags&RightWrite != 0) && Thearch.Sameaddr(&p.To, &fcheck.Prog.From) != 0 {
if (info.Flags&RightWrite != 0) && Thearch.Sameaddr(&p.To, &fcheck.Prog.From) {
return
}
// Stop if memory write.
if (info.Flags&RightWrite != 0) && !(Thearch.Regtyp(&p.To) != 0) {
if (info.Flags&RightWrite != 0) && !Thearch.Regtyp(&p.To) {
return
}
......
......@@ -30,27 +30,27 @@ var omit_pkgs = []string{"runtime", "runtime/race"}
// Memory accesses in the packages are either uninteresting or will cause false positives.
var noinst_pkgs = []string{"sync", "sync/atomic"}
func ispkgin(pkgs []string) int {
func ispkgin(pkgs []string) bool {
var i int
if myimportpath != "" {
for i = 0; i < len(pkgs); i++ {
if myimportpath == pkgs[i] {
return 1
return true
}
}
}
return 0
return false
}
func isforkfunc(fn *Node) int {
func isforkfunc(fn *Node) bool {
// Special case for syscall.forkAndExecInChild.
// In the child, this function must not acquire any locks, because
// they might have been locked at the time of the fork. This means
// no rescheduling, no malloc calls, and no new stack segments.
// Race instrumentation does all of the above.
return bool2int(myimportpath != "" && myimportpath == "syscall" && fn.Nname.Sym.Name == "forkAndExecInChild")
return myimportpath != "" && myimportpath == "syscall" && fn.Nname.Sym.Name == "forkAndExecInChild"
}
func racewalk(fn *Node) {
......@@ -58,11 +58,11 @@ func racewalk(fn *Node) {
var nodpc *Node
var s string
if ispkgin(omit_pkgs) != 0 || isforkfunc(fn) != 0 {
if ispkgin(omit_pkgs) || isforkfunc(fn) {
return
}
if !(ispkgin(noinst_pkgs) != 0) {
if !ispkgin(noinst_pkgs) {
racewalklist(fn.Nbody, nil)
// nothing interesting for race detector in fn->enter
......@@ -147,7 +147,6 @@ func racewalknode(np **Node, init **NodeList, wr int, skip int) {
switch n.Op {
default:
Fatal("racewalk: unknown node type %v", Oconv(int(n.Op), 0))
fallthrough
case OAS,
OAS2FUNC:
......@@ -263,7 +262,7 @@ func racewalknode(np **Node, init **NodeList, wr int, skip int) {
OLEN,
OCAP:
racewalknode(&n.Left, init, 0, 0)
if Istype(n.Left.Type, TMAP) != 0 {
if Istype(n.Left.Type, TMAP) {
n1 = Nod(OCONVNOP, n.Left, nil)
n1.Type = Ptrto(Types[TUINT8])
n1 = Nod(OIND, n1, nil)
......@@ -326,9 +325,9 @@ func racewalknode(np **Node, init **NodeList, wr int, skip int) {
goto ret
case OINDEX:
if !(Isfixedarray(n.Left.Type) != 0) {
if !Isfixedarray(n.Left.Type) {
racewalknode(&n.Left, init, 0, 0)
} else if !(islvalue(n.Left) != 0) {
} else if !islvalue(n.Left) {
// index of unaddressable array, like Map[k][i].
racewalknode(&n.Left, init, wr, 0)
......@@ -468,34 +467,34 @@ ret:
*np = n
}
func isartificial(n *Node) int {
func isartificial(n *Node) bool {
// compiler-emitted artificial things that we do not want to instrument,
// cant' possibly participate in a data race.
if n.Op == ONAME && n.Sym != nil && n.Sym.Name != "" {
if n.Sym.Name == "_" {
return 1
return true
}
// autotmp's are always local
if strings.HasPrefix(n.Sym.Name, "autotmp_") {
return 1
return true
}
// statictmp's are read-only
if strings.HasPrefix(n.Sym.Name, "statictmp_") {
return 1
return true
}
// go.itab is accessed only by the compiler and runtime (assume safe)
if n.Sym.Pkg != nil && n.Sym.Pkg.Name != "" && n.Sym.Pkg.Name == "go.itab" {
return 1
return true
}
}
return 0
return false
}
func callinstr(np **Node, init **NodeList, wr int, skip int) int {
func callinstr(np **Node, init **NodeList, wr int, skip int) bool {
var name string
var f *Node
var b *Node
......@@ -510,18 +509,18 @@ func callinstr(np **Node, init **NodeList, wr int, skip int) int {
// n, n->op, n->type ? n->type->etype : -1, n->class);
if skip != 0 || n.Type == nil || n.Type.Etype >= TIDEAL {
return 0
return false
}
t = n.Type
if isartificial(n) != 0 {
return 0
if isartificial(n) {
return false
}
b = outervalue(n)
// it skips e.g. stores to ... parameter array
if isartificial(b) != 0 {
return 0
if isartificial(b) {
return false
}
class = int(b.Class)
......@@ -539,7 +538,7 @@ func callinstr(np **Node, init **NodeList, wr int, skip int) int {
n = treecopy(n)
makeaddable(n)
if t.Etype == TSTRUCT || Isfixedarray(t) != 0 {
if t.Etype == TSTRUCT || Isfixedarray(t) {
name = "racereadrange"
if wr != 0 {
name = "racewriterange"
......@@ -554,10 +553,10 @@ func callinstr(np **Node, init **NodeList, wr int, skip int) int {
}
*init = list(*init, f)
return 1
return true
}
return 0
return false
}
// makeaddable returns a node whose memory location is the
......@@ -572,7 +571,7 @@ func makeaddable(n *Node) {
// an addressable value.
switch n.Op {
case OINDEX:
if Isfixedarray(n.Left.Type) != 0 {
if Isfixedarray(n.Left.Type) {
makeaddable(n.Left)
}
......@@ -596,7 +595,7 @@ func uintptraddr(n *Node) *Node {
var r *Node
r = Nod(OADDR, n, nil)
r.Bounded = 1
r.Bounded = true
r = conv(r, Types[TUNSAFEPTR])
r = conv(r, Types[TUINTPTR])
return r
......
......@@ -43,7 +43,7 @@ func typecheckrange(n *Node) {
}
}
if Isptr[t.Etype] != 0 && Isfixedarray(t.Type) != 0 {
if Isptr[t.Etype] != 0 && Isfixedarray(t.Type) {
t = t.Type
}
n.Type = t
......@@ -63,7 +63,7 @@ func typecheckrange(n *Node) {
t2 = t.Type
case TCHAN:
if !(t.Chan&Crecv != 0) {
if t.Chan&Crecv == 0 {
Yyerror("invalid operation: range %v (receive from send-only type %v)", Nconv(n.Right, 0), Tconv(n.Right.Type, 0))
goto out
}
......@@ -184,7 +184,6 @@ func walkrange(n *Node) {
switch t.Etype {
default:
Fatal("walkrange")
fallthrough
// Lower n into runtime·memclr if possible, for
// fast zeroing of slices and arrays (issue 5373).
......@@ -196,8 +195,8 @@ func walkrange(n *Node) {
//
// in which the evaluation of a is side-effect-free.
case TARRAY:
if !(Debug['N'] != 0) {
if !(flag_race != 0) {
if Debug['N'] == 0 {
if flag_race == 0 {
if v1 != nil {
if v2 == nil {
if n.Nbody != nil {
......@@ -206,10 +205,10 @@ func walkrange(n *Node) {
tmp = n.Nbody.N // first statement of body
if tmp.Op == OAS {
if tmp.Left.Op == OINDEX {
if samesafeexpr(tmp.Left.Left, a) != 0 {
if samesafeexpr(tmp.Left.Right, v1) != 0 {
if samesafeexpr(tmp.Left.Left, a) {
if samesafeexpr(tmp.Left.Right, v1) {
if t.Type.Width > 0 {
if iszero(tmp.Right) != 0 {
if iszero(tmp.Right) {
// Convert to
// if len(a) != 0 {
// hp = &a[0]
......@@ -227,7 +226,7 @@ func walkrange(n *Node) {
hp = temp(Ptrto(Types[TUINT8]))
tmp = Nod(OINDEX, a, Nodintconst(0))
tmp.Bounded = 1
tmp.Bounded = true
tmp = Nod(OADDR, tmp, nil)
tmp = Nod(OCONVNOP, tmp, nil)
tmp.Type = Ptrto(Types[TUINT8])
......@@ -282,7 +281,7 @@ func walkrange(n *Node) {
if v2 != nil {
hp = temp(Ptrto(n.Type.Type))
tmp = Nod(OINDEX, ha, Nodintconst(0))
tmp.Bounded = 1
tmp.Bounded = true
init = list(init, Nod(OAS, hp, Nod(OADDR, tmp, nil)))
}
......@@ -369,7 +368,7 @@ func walkrange(n *Node) {
}
hb = temp(Types[TBOOL])
n.Ntest = Nod(ONE, hb, Nodbool(0))
n.Ntest = Nod(ONE, hb, Nodbool(false))
a = Nod(OAS2RECV, nil, nil)
a.Typecheck = 1
a.List = list(list1(hv1), hb)
......
......@@ -355,7 +355,7 @@ func methods(t *Type) *Sig {
// type stored in interface word
it = t
if !(isdirectiface(it) != 0) {
if !isdirectiface(it) {
it = Ptrto(t)
}
......@@ -370,10 +370,10 @@ func methods(t *Type) *Sig {
if f.Type.Etype != TFUNC || f.Type.Thistuple == 0 {
Fatal("non-method on %v method %v %v\n", Tconv(mt, 0), Sconv(f.Sym, 0), Tconv(f, 0))
}
if !(getthisx(f.Type).Type != nil) {
if getthisx(f.Type).Type == nil {
Fatal("receiver with no type on %v method %v %v\n", Tconv(mt, 0), Sconv(f.Sym, 0), Tconv(f, 0))
}
if f.Nointerface != 0 {
if f.Nointerface {
continue
}
......@@ -391,7 +391,7 @@ func methods(t *Type) *Sig {
if Isptr[this.Etype] != 0 && this.Type == t {
continue
}
if Isptr[this.Etype] != 0 && !(Isptr[t.Etype] != 0) && f.Embedded != 2 && !(isifacemethod(f.Type) != 0) {
if Isptr[this.Etype] != 0 && Isptr[t.Etype] == 0 && f.Embedded != 2 && !isifacemethod(f.Type) {
continue
}
......@@ -412,7 +412,7 @@ func methods(t *Type) *Sig {
a.type_ = methodfunc(f.Type, t)
a.mtype = methodfunc(f.Type, nil)
if !(a.isym.Flags&SymSiggen != 0) {
if a.isym.Flags&SymSiggen == 0 {
a.isym.Flags |= SymSiggen
if !Eqtype(this, it) || this.Width < Types[Tptr].Width {
compiling_wrappers = 1
......@@ -421,7 +421,7 @@ func methods(t *Type) *Sig {
}
}
if !(a.tsym.Flags&SymSiggen != 0) {
if a.tsym.Flags&SymSiggen == 0 {
a.tsym.Flags |= SymSiggen
if !Eqtype(this, t) {
compiling_wrappers = 1
......@@ -489,7 +489,7 @@ func imethods(t *Type) *Sig {
// code can refer to it.
isym = methodsym(method, t, 0)
if !(isym.Flags&SymSiggen != 0) {
if isym.Flags&SymSiggen == 0 {
isym.Flags |= SymSiggen
genwrapper(t, f, isym, 0)
}
......@@ -649,7 +649,7 @@ var kinds = []int{
func haspointers(t *Type) bool {
var t1 *Type
var ret int
var ret bool
if t.Haspointers != 0 {
return t.Haspointers-1 != 0
......@@ -672,26 +672,26 @@ func haspointers(t *Type) bool {
TCOMPLEX64,
TCOMPLEX128,
TBOOL:
ret = 0
ret = false
case TARRAY:
if t.Bound < 0 { // slice
ret = 1
ret = true
break
}
if t.Bound == 0 { // empty array
ret = 0
ret = false
break
}
ret = bool2int(haspointers(t.Type))
ret = haspointers(t.Type)
case TSTRUCT:
ret = 0
ret = false
for t1 = t.Type; t1 != nil; t1 = t1.Down {
if haspointers(t1.Type) {
ret = 1
ret = true
break
}
}
......@@ -706,11 +706,11 @@ func haspointers(t *Type) bool {
TFUNC:
fallthrough
default:
ret = 1
ret = true
}
t.Haspointers = uint8(1 + ret)
return ret != 0
t.Haspointers = 1 + uint8(bool2int(ret))
return ret
}
/*
......@@ -724,7 +724,7 @@ func dcommontype(s *Sym, ot int, t *Type) int {
var i int
var alg int
var sizeofAlg int
var gcprog int
var gcprog bool
var sptr *Sym
var algsym *Sym
var zero *Sym
......@@ -751,7 +751,7 @@ func dcommontype(s *Sym, ot int, t *Type) int {
algsym = dalgsym(t)
}
if t.Sym != nil && !(Isptr[t.Etype] != 0) {
if t.Sym != nil && Isptr[t.Etype] == 0 {
sptr = dtypesym(Ptrto(t))
} else {
sptr = weaktypesym(Ptrto(t))
......@@ -811,10 +811,10 @@ func dcommontype(s *Sym, ot int, t *Type) int {
if !haspointers(t) {
i |= obj.KindNoPointers
}
if isdirectiface(t) != 0 {
if isdirectiface(t) {
i |= obj.KindDirectIface
}
if gcprog != 0 {
if gcprog {
i |= obj.KindGCProg
}
ot = duint8(s, ot, uint8(i)) // kind
......@@ -825,7 +825,7 @@ func dcommontype(s *Sym, ot int, t *Type) int {
}
// gc
if gcprog != 0 {
if gcprog {
gengcprog(t, &gcprog0, &gcprog1)
if gcprog0 != nil {
ot = dsymptr(s, ot, gcprog0, 0)
......@@ -937,7 +937,7 @@ func typenamesym(t *Type) *Sym {
var s *Sym
var n *Node
if t == nil || (Isptr[t.Etype] != 0 && t.Type == nil) || isideal(t) != 0 {
if t == nil || (Isptr[t.Etype] != 0 && t.Type == nil) || isideal(t) {
Fatal("typename %v", Tconv(t, 0))
}
s = typesym(t)
......@@ -987,7 +987,7 @@ func weaktypesym(t *Type) *Sym {
* Returns 1 if t has a reflexive equality operator.
* That is, if x==x for all x of type t.
*/
func isreflexive(t *Type) int {
func isreflexive(t *Type) bool {
var t1 *Type
switch t.Etype {
case TBOOL,
......@@ -1007,33 +1007,33 @@ func isreflexive(t *Type) int {
TUNSAFEPTR,
TSTRING,
TCHAN:
return 1
return true
case TFLOAT32,
TFLOAT64,
TCOMPLEX64,
TCOMPLEX128,
TINTER:
return 0
return false
case TARRAY:
if Isslice(t) != 0 {
if Isslice(t) {
Fatal("slice can't be a map key: %v", Tconv(t, 0))
}
return isreflexive(t.Type)
case TSTRUCT:
for t1 = t.Type; t1 != nil; t1 = t1.Down {
if !(isreflexive(t1.Type) != 0) {
return 0
if !isreflexive(t1.Type) {
return false
}
}
return 1
return true
default:
Fatal("bad type for map key: %v", Tconv(t, 0))
return 0
return false
}
}
......@@ -1062,7 +1062,7 @@ func dtypesym(t *Type) *Sym {
t = Types[t.Etype]
}
if isideal(t) != 0 {
if isideal(t) {
Fatal("dtypesym %v", Tconv(t, 0))
}
......@@ -1090,7 +1090,7 @@ func dtypesym(t *Type) *Sym {
}
// named types from other files are defined only by those files
if tbase.Sym != nil && !(tbase.Local != 0) {
if tbase.Sym != nil && tbase.Local == 0 {
return s
}
if isforw[tbase.Etype] != 0 {
......@@ -1230,7 +1230,7 @@ ok:
}
ot = duint16(s, ot, uint16(mapbucket(t).Width))
ot = duint8(s, ot, uint8(isreflexive(t.Down)))
ot = duint8(s, ot, uint8(bool2int(isreflexive(t.Down))))
case TPTR32,
TPTR64:
......@@ -1265,7 +1265,7 @@ ok:
ot = duintxx(s, ot, uint64(n), Widthint)
for t1 = t.Type; t1 != nil; t1 = t1.Down {
// ../../runtime/type.go:/structField
if t1.Sym != nil && !(t1.Embedded != 0) {
if t1.Sym != nil && t1.Embedded == 0 {
ot = dgostringptr(s, ot, t1.Sym.Name)
if exportname(t1.Sym.Name) {
ot = dgostringptr(s, ot, "")
......@@ -1447,12 +1447,12 @@ func dalgsym(t *Type) *Sym {
return s
}
func usegcprog(t *Type) int {
func usegcprog(t *Type) bool {
var size int64
var nptr int64
if !haspointers(t) {
return 0
return false
}
if t.Width == BADWIDTH {
dowidth(t)
......@@ -1473,7 +1473,7 @@ func usegcprog(t *Type) int {
// While large objects usually contain arrays; and even if it don't
// the program uses 2-bits per word while mask uses 4-bits per word,
// so the program is still smaller.
return bool2int(size > int64(2*Widthptr))
return size > int64(2*Widthptr)
}
// Generates sparse GC bitmask (4 bits per word).
......@@ -1483,7 +1483,7 @@ func gengcmask(t *Type, gcmask []byte) {
var nptr int64
var i int64
var j int64
var half int
var half bool
var bits uint8
var pos []byte
......@@ -1505,7 +1505,7 @@ func gengcmask(t *Type, gcmask []byte) {
pos = gcmask
nptr = (t.Width + int64(Widthptr) - 1) / int64(Widthptr)
half = 0
half = false
// If number of words is odd, repeat the mask.
// This makes simpler handling of arrays in runtime.
......@@ -1520,12 +1520,12 @@ func gengcmask(t *Type, gcmask []byte) {
bits = obj.BitsScalar
}
bits <<= 2
if half != 0 {
if half {
bits <<= 4
}
pos[0] |= byte(bits)
half = bool2int(!(half != 0))
if !(half != 0) {
half = !half
if !half {
pos = pos[1:]
}
}
......@@ -1699,7 +1699,7 @@ func gengcprog1(g *ProgGen, t *Type, xoffset *int64) {
*xoffset += t.Width
case TARRAY:
if Isslice(t) != 0 {
if Isslice(t) {
proggendata(g, obj.BitsPointer)
proggendata(g, obj.BitsScalar)
proggendata(g, obj.BitsScalar)
......
......@@ -74,7 +74,7 @@ func setaddrs(bit Bits) {
var v *Var
var node *Node
for bany(&bit) != 0 {
for bany(&bit) {
// convert each bit to a variable
i = bnum(bit)
......@@ -169,9 +169,9 @@ func addmove(r *Flow, bn int, rn int, f int) {
p1.From.Type = obj.TYPE_REG
p1.From.Reg = int16(rn)
p1.From.Name = obj.NAME_NONE
if !(f != 0) {
if f == 0 {
p1.From = *a
*a = obj.Zprog.From
*a = obj.Addr{}
a.Type = obj.TYPE_REG
a.Reg = int16(rn)
}
......@@ -182,18 +182,18 @@ func addmove(r *Flow, bn int, rn int, f int) {
Ostats.Nspill++
}
func overlap_reg(o1 int64, w1 int, o2 int64, w2 int) int {
func overlap_reg(o1 int64, w1 int, o2 int64, w2 int) bool {
var t1 int64
var t2 int64
t1 = o1 + int64(w1)
t2 = o2 + int64(w2)
if !(t1 > o2 && t2 > o1) {
return 0
if t1 <= o2 || t2 <= o1 {
return false
}
return 1
return true
}
func mkvar(f *Flow, a *obj.Addr) Bits {
......@@ -292,7 +292,7 @@ func mkvar(f *Flow, a *obj.Addr) Bits {
if int(v.etype) == et {
if int64(v.width) == w {
// TODO(rsc): Remove special case for arm here.
if !(flag != 0) || Thearch.Thechar != '5' {
if flag == 0 || Thearch.Thechar != '5' {
return blsh(uint(i))
}
}
......@@ -300,7 +300,7 @@ func mkvar(f *Flow, a *obj.Addr) Bits {
}
// if they overlap, disable both
if overlap_reg(v.offset, v.width, o, int(w)) != 0 {
if overlap_reg(v.offset, v.width, o, int(w)) {
// print("disable overlap %s %d %d %d %d, %E != %E\n", s->name, v->offset, v->width, o, w, v->etype, et);
v.addr = 1
......@@ -446,7 +446,7 @@ func prop(f *Flow, ref Bits, cal Bits) {
switch f1.Prog.As {
case obj.ACALL:
if Noreturn(f1.Prog) != 0 {
if Noreturn(f1.Prog) {
break
}
......@@ -499,7 +499,7 @@ func prop(f *Flow, ref Bits, cal Bits) {
// This will set the bits at most twice, keeping the overall loop linear.
v1, _ = v.node.Opt.(*Var)
if v == v1 || !(btest(&cal, uint(v1.id)) != 0) {
if v == v1 || !btest(&cal, uint(v1.id)) {
for ; v1 != nil; v1 = v1.nextinnode {
biset(&cal, uint(v1.id))
}
......@@ -633,7 +633,7 @@ func paint1(f *Flow, bn int) {
return
}
for {
if !(r.refbehind.b[z]&bb != 0) {
if r.refbehind.b[z]&bb == 0 {
break
}
f1 = f.P1
......@@ -641,7 +641,7 @@ func paint1(f *Flow, bn int) {
break
}
r1 = f1.Data.(*Reg)
if !(r1.refahead.b[z]&bb != 0) {
if r1.refahead.b[z]&bb == 0 {
break
}
if r1.act.b[z]&bb != 0 {
......@@ -679,7 +679,7 @@ func paint1(f *Flow, bn int) {
}
}
if !(r.refahead.b[z]&bb != 0) {
if r.refahead.b[z]&bb == 0 {
break
}
f1 = f.S2
......@@ -696,7 +696,7 @@ func paint1(f *Flow, bn int) {
if r.act.b[z]&bb != 0 {
break
}
if !(r.refbehind.b[z]&bb != 0) {
if r.refbehind.b[z]&bb == 0 {
break
}
}
......@@ -714,11 +714,11 @@ func paint2(f *Flow, bn int, depth int) uint64 {
bb = 1 << uint(bn%64)
vreg = regbits
r = f.Data.(*Reg)
if !(r.act.b[z]&bb != 0) {
if r.act.b[z]&bb == 0 {
return vreg
}
for {
if !(r.refbehind.b[z]&bb != 0) {
if r.refbehind.b[z]&bb == 0 {
break
}
f1 = f.P1
......@@ -726,10 +726,10 @@ func paint2(f *Flow, bn int, depth int) uint64 {
break
}
r1 = f1.Data.(*Reg)
if !(r1.refahead.b[z]&bb != 0) {
if r1.refahead.b[z]&bb == 0 {
break
}
if !(r1.act.b[z]&bb != 0) {
if r1.act.b[z]&bb == 0 {
break
}
f = f1
......@@ -753,7 +753,7 @@ func paint2(f *Flow, bn int, depth int) uint64 {
}
}
if !(r.refahead.b[z]&bb != 0) {
if r.refahead.b[z]&bb == 0 {
break
}
f1 = f.S2
......@@ -767,10 +767,10 @@ func paint2(f *Flow, bn int, depth int) uint64 {
break
}
r = f.Data.(*Reg)
if !(r.act.b[z]&bb != 0) {
if r.act.b[z]&bb == 0 {
break
}
if !(r.refbehind.b[z]&bb != 0) {
if r.refbehind.b[z]&bb == 0 {
break
}
}
......@@ -793,7 +793,7 @@ func paint3(f *Flow, bn int, rb uint64, rn int) {
return
}
for {
if !(r.refbehind.b[z]&bb != 0) {
if r.refbehind.b[z]&bb == 0 {
break
}
f1 = f.P1
......@@ -801,7 +801,7 @@ func paint3(f *Flow, bn int, rb uint64, rn int) {
break
}
r1 = f1.Data.(*Reg)
if !(r1.refahead.b[z]&bb != 0) {
if r1.refahead.b[z]&bb == 0 {
break
}
if r1.act.b[z]&bb != 0 {
......@@ -851,7 +851,7 @@ func paint3(f *Flow, bn int, rb uint64, rn int) {
}
}
if !(r.refahead.b[z]&bb != 0) {
if r.refahead.b[z]&bb == 0 {
break
}
f1 = f.S2
......@@ -868,7 +868,7 @@ func paint3(f *Flow, bn int, rb uint64, rn int) {
if r.act.b[z]&bb != 0 {
break
}
if !(r.refbehind.b[z]&bb != 0) {
if r.refbehind.b[z]&bb == 0 {
break
}
}
......@@ -896,33 +896,33 @@ func dumpone(f *Flow, isreg int) {
for z = 0; z < BITS; z++ {
bit.b[z] = r.set.b[z] | r.use1.b[z] | r.use2.b[z] | r.refbehind.b[z] | r.refahead.b[z] | r.calbehind.b[z] | r.calahead.b[z] | r.regdiff.b[z] | r.act.b[z] | 0
}
if bany(&bit) != 0 {
if bany(&bit) {
fmt.Printf("\t")
if bany(&r.set) != 0 {
if bany(&r.set) {
fmt.Printf(" s:%v", Qconv(r.set, 0))
}
if bany(&r.use1) != 0 {
if bany(&r.use1) {
fmt.Printf(" u1:%v", Qconv(r.use1, 0))
}
if bany(&r.use2) != 0 {
if bany(&r.use2) {
fmt.Printf(" u2:%v", Qconv(r.use2, 0))
}
if bany(&r.refbehind) != 0 {
if bany(&r.refbehind) {
fmt.Printf(" rb:%v ", Qconv(r.refbehind, 0))
}
if bany(&r.refahead) != 0 {
if bany(&r.refahead) {
fmt.Printf(" ra:%v ", Qconv(r.refahead, 0))
}
if bany(&r.calbehind) != 0 {
if bany(&r.calbehind) {
fmt.Printf(" cb:%v ", Qconv(r.calbehind, 0))
}
if bany(&r.calahead) != 0 {
if bany(&r.calahead) {
fmt.Printf(" ca:%v ", Qconv(r.calahead, 0))
}
if bany(&r.regdiff) != 0 {
if bany(&r.regdiff) {
fmt.Printf(" d:%v ", Qconv(r.regdiff, 0))
}
if bany(&r.act) != 0 {
if bany(&r.act) {
fmt.Printf(" a:%v ", Qconv(r.act, 0))
}
}
......@@ -1052,7 +1052,7 @@ func regopt(firstp *obj.Prog) {
r.set.b[0] |= info.Regset
bit = mkvar(f, &p.From)
if bany(&bit) != 0 {
if bany(&bit) {
if info.Flags&LeftAddr != 0 {
setaddrs(bit)
}
......@@ -1080,7 +1080,7 @@ func regopt(firstp *obj.Prog) {
}
bit = mkvar(f, &p.To)
if bany(&bit) != 0 {
if bany(&bit) {
if info.Flags&RightAddr != 0 {
setaddrs(bit)
}
......@@ -1143,7 +1143,7 @@ func regopt(firstp *obj.Prog) {
for f = firstf; f != nil; f = f.Link {
p = f.Prog
if p.As == obj.AVARDEF && Isfat(((p.To.Node).(*Node)).Type) != 0 && ((p.To.Node).(*Node)).Opt != nil {
if p.As == obj.AVARDEF && Isfat(((p.To.Node).(*Node)).Type) && ((p.To.Node).(*Node)).Opt != nil {
active++
walkvardef(p.To.Node.(*Node), f, active)
}
......@@ -1172,7 +1172,7 @@ loop11:
for f = firstf; f != nil; f = f1 {
f1 = f.Link
if f1 != nil && f1.Active != 0 && !(f.Active != 0) {
if f1 != nil && f1.Active != 0 && f.Active == 0 {
prop(f, zbits, zbits)
i = 1
}
......@@ -1244,7 +1244,7 @@ loop2:
for z = 0; z < BITS; z++ {
bit.b[z] = (r.refahead.b[z] | r.calahead.b[z]) &^ (externs.b[z] | params.b[z] | addrs.b[z] | consts.b[z])
}
if bany(&bit) != 0 && !(f.Refset != 0) {
if bany(&bit) && f.Refset == 0 {
// should never happen - all variables are preset
if Debug['w'] != 0 {
fmt.Printf("%v: used and not set: %v\n", f.Prog.Line(), Qconv(bit, 0))
......@@ -1262,7 +1262,7 @@ loop2:
for z = 0; z < BITS; z++ {
bit.b[z] = r.set.b[z] &^ (r.refahead.b[z] | r.calahead.b[z] | addrs.b[z])
}
if bany(&bit) != 0 && !(f.Refset != 0) {
if bany(&bit) && f.Refset == 0 {
if Debug['w'] != 0 {
fmt.Printf("%v: set and not used: %v\n", f.Prog.Line(), Qconv(bit, 0))
}
......@@ -1273,7 +1273,7 @@ loop2:
for z = 0; z < BITS; z++ {
bit.b[z] = LOAD(r, z) &^ (r.act.b[z] | addrs.b[z])
}
for bany(&bit) != 0 {
for bany(&bit) {
i = bnum(bit)
change = 0
paint1(f, i)
......@@ -1354,7 +1354,7 @@ brk:
* pass 7
* peep-hole on basic block
*/
if !(Debug['R'] != 0) || Debug['P'] != 0 {
if Debug['R'] == 0 || Debug['P'] != 0 {
Thearch.Peep(firstp)
}
......
......@@ -134,7 +134,6 @@ func walkselect(sel *Node) {
switch n.Op {
default:
Fatal("select %v", Oconv(int(n.Op), 0))
fallthrough
// ok already
case OSEND:
......@@ -232,7 +231,6 @@ func walkselect(sel *Node) {
switch n.Op {
default:
Fatal("select %v", Oconv(int(n.Op), 0))
fallthrough
// if selectnbsend(c, v) { body } else { default body }
case OSEND:
......@@ -299,7 +297,6 @@ func walkselect(sel *Node) {
switch n.Op {
default:
Fatal("select %v", Oconv(int(n.Op), 0))
fallthrough
// selectsend(sel *byte, hchan *chan any, elem *any) (selected bool);
case OSEND:
......
This diff is collapsed.
This diff is collapsed.
......@@ -281,7 +281,7 @@ func casebody(sw *Node, typeswvar *Node) {
var go_ *Node
var br *Node
var lno int32
var needvar int32
var needvar bool
if sw.List == nil {
return
......@@ -301,7 +301,7 @@ func casebody(sw *Node, typeswvar *Node) {
Fatal("casebody %v", Oconv(int(n.Op), 0))
}
n.Op = OCASE
needvar = int32(bool2int(count(n.List) != 1 || n.List.N.Op == OLITERAL))
needvar = count(n.List) != 1 || n.List.N.Op == OLITERAL
go_ = Nod(OGOTO, newlabel_swt(), nil)
if n.List == nil {
......@@ -332,7 +332,7 @@ func casebody(sw *Node, typeswvar *Node) {
}
stat = list(stat, Nod(OLABEL, go_.Left, nil))
if typeswvar != nil && needvar != 0 && n.Nname != nil {
if typeswvar != nil && needvar && n.Nname != nil {
var l *NodeList
l = list1(Nod(ODCL, n.Nname, nil))
......@@ -410,7 +410,7 @@ func mkcaselist(sw *Node, arg int) *Case {
continue
}
if Istype(n.Left.Type, TINTER) != 0 {
if Istype(n.Left.Type, TINTER) {
c.type_ = Ttypevar
continue
}
......@@ -552,7 +552,7 @@ func exprswitch(sw *Node) {
casebody(sw, nil)
arg = Snorm
if Isconst(sw.Ntest, CTBOOL) != 0 {
if Isconst(sw.Ntest, CTBOOL) {
arg = Strue
if sw.Ntest.Val.U.Bval == 0 {
arg = Sfalse
......@@ -572,7 +572,7 @@ func exprswitch(sw *Node) {
cas = nil
if arg == Strue || arg == Sfalse {
exprname = Nodbool(bool2int(arg == Strue))
exprname = Nodbool(arg == Strue)
} else if consttype(sw.Ntest) >= 0 {
// leave constants to enable dead code elimination (issue 9608)
exprname = sw.Ntest
......@@ -600,7 +600,7 @@ loop:
}
// deal with the variables one-at-a-time
if !(okforcmp[t.Etype] != 0) || c0.type_ != Texprconst {
if okforcmp[t.Etype] == 0 || c0.type_ != Texprconst {
a = exprbsw(c0, 1, arg)
cas = list(cas, a)
c0 = c0.link
......@@ -738,7 +738,7 @@ func typeswitch(sw *Node) {
}
walkexpr(&sw.Ntest.Right, &sw.Ninit)
if !(Istype(sw.Ntest.Right.Type, TINTER) != 0) {
if !Istype(sw.Ntest.Right.Type, TINTER) {
Yyerror("type switch must be on an interface")
return
}
......@@ -764,7 +764,7 @@ func typeswitch(sw *Node) {
typecheck(&hashname, Erv)
t = sw.Ntest.Right.Type
if isnilinter(t) != 0 {
if isnilinter(t) {
a = syslook("efacethash", 1)
} else {
a = syslook("ifacethash", 1)
......@@ -871,7 +871,7 @@ func walkswitch(sw *Node) {
* both have inserted OBREAK statements
*/
if sw.Ntest == nil {
sw.Ntest = Nodbool(1)
sw.Ntest = Nodbool(true)
typecheck(&sw.Ntest, Erv)
}
......@@ -933,11 +933,11 @@ func typecheckswitch(n *Node) {
t = Types[TBOOL]
}
if t != nil {
if !(okforeq[t.Etype] != 0) {
if okforeq[t.Etype] == 0 {
Yyerror("cannot switch on %v", Nconv(n.Ntest, obj.FmtLong))
} else if t.Etype == TARRAY && !(Isfixedarray(t) != 0) {
} else if t.Etype == TARRAY && !Isfixedarray(t) {
nilonly = "slice"
} else if t.Etype == TARRAY && Isfixedarray(t) != 0 && algtype1(t, nil) == ANOEQ {
} else if t.Etype == TARRAY && Isfixedarray(t) && algtype1(t, nil) == ANOEQ {
Yyerror("cannot switch on %v", Nconv(n.Ntest, obj.FmtLong))
} else if t.Etype == TSTRUCT && algtype1(t, &badtype) == ANOEQ {
Yyerror("cannot switch on %v (struct containing %v cannot be compared)", Nconv(n.Ntest, obj.FmtLong), Tconv(badtype, 0))
......@@ -976,27 +976,27 @@ func typecheckswitch(n *Node) {
if ll.N.Op == OTYPE {
Yyerror("type %v is not an expression", Tconv(ll.N.Type, 0))
} else if ll.N.Type != nil && !(assignop(ll.N.Type, t, nil) != 0) && !(assignop(t, ll.N.Type, nil) != 0) {
} else if ll.N.Type != nil && assignop(ll.N.Type, t, nil) == 0 && assignop(t, ll.N.Type, nil) == 0 {
if n.Ntest != nil {
Yyerror("invalid case %v in switch on %v (mismatched types %v and %v)", Nconv(ll.N, 0), Nconv(n.Ntest, 0), Tconv(ll.N.Type, 0), Tconv(t, 0))
} else {
Yyerror("invalid case %v in switch (mismatched types %v and bool)", Nconv(ll.N, 0), Tconv(ll.N.Type, 0))
}
} else if nilonly != "" && !(Isconst(ll.N, CTNIL) != 0) {
} else if nilonly != "" && !Isconst(ll.N, CTNIL) {
Yyerror("invalid case %v in switch (can only compare %s %v to nil)", Nconv(ll.N, 0), nilonly, Nconv(n.Ntest, 0))
}
case Etype: // type switch
if ll.N.Op == OLITERAL && Istype(ll.N.Type, TNIL) != 0 {
if ll.N.Op == OLITERAL && Istype(ll.N.Type, TNIL) {
} else if ll.N.Op != OTYPE && ll.N.Type != nil { // should this be ||?
Yyerror("%v is not a type", Nconv(ll.N, obj.FmtLong))
// reset to original type
ll.N = n.Ntest.Right
} else if ll.N.Type.Etype != TINTER && t.Etype == TINTER && !(implements(ll.N.Type, t, &missing, &have, &ptr) != 0) {
if have != nil && !(missing.Broke != 0) && !(have.Broke != 0) {
} else if ll.N.Type.Etype != TINTER && t.Etype == TINTER && !implements(ll.N.Type, t, &missing, &have, &ptr) {
if have != nil && missing.Broke == 0 && have.Broke == 0 {
Yyerror("impossible type switch case: %v cannot have dynamic type %v"+" (wrong type for %v method)\n\thave %v%v\n\twant %v%v", Nconv(n.Ntest.Right, obj.FmtLong), Tconv(ll.N.Type, 0), Sconv(missing.Sym, 0), Sconv(have.Sym, 0), Tconv(have.Type, obj.FmtShort), Sconv(missing.Sym, 0), Tconv(missing.Type, obj.FmtShort))
} else if !(missing.Broke != 0) {
} else if missing.Broke == 0 {
Yyerror("impossible type switch case: %v cannot have dynamic type %v"+" (missing %v method)", Nconv(n.Ntest.Right, obj.FmtLong), Tconv(ll.N.Type, 0), Sconv(missing.Sym, 0))
}
}
......@@ -1008,7 +1008,7 @@ func typecheckswitch(n *Node) {
ll = ncase.List
nvar = ncase.Nname
if nvar != nil {
if ll != nil && ll.Next == nil && ll.N.Type != nil && !(Istype(ll.N.Type, TNIL) != 0) {
if ll != nil && ll.Next == nil && ll.N.Type != nil && !Istype(ll.N.Type, TNIL) {
// single entry type switch
nvar.Ntype = typenod(ll.N.Type)
} else {
......
This diff is collapsed.
......@@ -161,18 +161,18 @@ ret:
return n
}
func isunsafebuiltin(n *Node) int {
func isunsafebuiltin(n *Node) bool {
if n == nil || n.Op != ONAME || n.Sym == nil || n.Sym.Pkg != unsafepkg {
return 0
return false
}
if n.Sym.Name == "Sizeof" {
return 1
return true
}
if n.Sym.Name == "Offsetof" {
return 1
return true
}
if n.Sym.Name == "Alignof" {
return 1
return true
}
return 0
return false
}
This diff is collapsed.
......@@ -851,6 +851,7 @@ type yyLexer interface {
}
type yyParser interface {
Parse(yyLexer) int
Lookahead() int
}
......@@ -862,6 +863,13 @@ func (p *yyParserImpl) Lookahead() int {
return p.lookahead()
}
func yyNewParser() yyParser {
p := &yyParserImpl{
lookahead: func() int { return -1 },
}
return p
}
const yyFlag = -1000
func yyTokname(c int) string {
......@@ -919,6 +927,10 @@ out:
}
func yyParse(yylex yyLexer) int {
return yyNewParser().Parse(yylex)
}
func (yyrcvr *yyParserImpl) Parse(yylex yyLexer) int {
var yyn int
var yylval yySymType
var yyVAL yySymType
......@@ -930,19 +942,12 @@ func yyParse(yylex yyLexer) int {
yystate := 0
yychar := -1
yytoken := -1 // yychar translated into internal numbering
if lx, ok := yylex.(interface {
SetParser(yyParser)
}); ok {
p := &yyParserImpl{
lookahead: func() int { return yychar },
}
lx.SetParser(p)
defer func() {
// Make sure we report no lookahead when not parsing.
yychar = -1
yytoken = -1
}()
}
yyrcvr.lookahead = func() int { return yychar }
defer func() {
// Make sure we report no lookahead when not parsing.
yychar = -1
yytoken = -1
}()
yyp := -1
goto yystack
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment