Commit f0d44dbe authored by Russ Cox's avatar Russ Cox

runtime: look up arg stackmap for makeFuncStub/methodValueStub during traceback

makeFuncStub and methodValueStub are used by reflect as
generic function implementations. Each call might have
different arguments. Extract those arguments from the
closure data instead of assuming it is the same each time.

Because the argument map is now being extracted from the
function itself, we don't need the special cases in reflect.Call
anymore, so delete those.

Fixes an occasional crash seen when stack copying does
not update makeFuncStub's arguments correctly.

Will also help make it safe to require stack maps in the
garbage collector.

Derived from CL 142000044 by khr.

LGTM=khr
R=khr
CC=golang-codereviews
https://golang.org/cl/143890044
parent 70f92869
......@@ -3860,3 +3860,28 @@ func TestCallMethodJump(t *testing.T) {
// Stop garbage collecting during reflect.call.
*CallGC = false
}
func TestMakeFuncStackCopy(t *testing.T) {
target := func(in []Value) []Value {
runtime.GC()
useStack(16)
return []Value{ValueOf(9)}
}
var concrete func(*int, int) int
fn := MakeFunc(ValueOf(concrete).Type(), target)
ValueOf(&concrete).Elem().Set(fn)
x := concrete(nil, 7)
if x != 9 {
t.Errorf("have %#q want 9", x)
}
}
// use about n KB of stack
func useStack(n int) {
if n == 0 {
return
}
var b [1024]byte // makes frame about 1KB
useStack(n - 1 + int(b[99]))
}
......@@ -3,12 +3,14 @@
// license that can be found in the LICENSE file.
#include "textflag.h"
#include "funcdata.h"
// makeFuncStub is the code half of the function returned by MakeFunc.
// See the comment on the declaration of makeFuncStub in makefunc.go
// for more details.
// No argsize here, gc generates argsize info at call site.
TEXT ·makeFuncStub(SB),(NOSPLIT|WRAPPER),$8
NO_LOCAL_POINTERS
MOVL DX, 0(SP)
LEAL argframe+0(FP), CX
MOVL CX, 4(SP)
......@@ -20,6 +22,7 @@ TEXT ·makeFuncStub(SB),(NOSPLIT|WRAPPER),$8
// for more details.
// No argsize here, gc generates argsize info at call site.
TEXT ·methodValueCall(SB),(NOSPLIT|WRAPPER),$8
NO_LOCAL_POINTERS
MOVL DX, 0(SP)
LEAL argframe+0(FP), CX
MOVL CX, 4(SP)
......
......@@ -3,12 +3,14 @@
// license that can be found in the LICENSE file.
#include "textflag.h"
#include "funcdata.h"
// makeFuncStub is the code half of the function returned by MakeFunc.
// See the comment on the declaration of makeFuncStub in makefunc.go
// for more details.
// No argsize here, gc generates argsize info at call site.
// No arg size here; runtime pulls arg map out of the func value.
TEXT ·makeFuncStub(SB),(NOSPLIT|WRAPPER),$16
NO_LOCAL_POINTERS
MOVQ DX, 0(SP)
LEAQ argframe+0(FP), CX
MOVQ CX, 8(SP)
......@@ -18,8 +20,9 @@ TEXT ·makeFuncStub(SB),(NOSPLIT|WRAPPER),$16
// methodValueCall is the code half of the function returned by makeMethodValue.
// See the comment on the declaration of methodValueCall in makefunc.go
// for more details.
// No argsize here, gc generates argsize info at call site.
// No arg size here; runtime pulls arg map out of the func value.
TEXT ·methodValueCall(SB),(NOSPLIT|WRAPPER),$16
NO_LOCAL_POINTERS
MOVQ DX, 0(SP)
LEAQ argframe+0(FP), CX
MOVQ CX, 8(SP)
......
......@@ -3,12 +3,14 @@
// license that can be found in the LICENSE file.
#include "textflag.h"
#include "funcdata.h"
// makeFuncStub is the code half of the function returned by MakeFunc.
// See the comment on the declaration of makeFuncStub in makefunc.go
// for more details.
// No argsize here, gc generates argsize info at call site.
TEXT ·makeFuncStub(SB),(NOSPLIT|WRAPPER),$8
NO_LOCAL_POINTERS
MOVL DX, 0(SP)
LEAL argframe+0(FP), CX
MOVL CX, 4(SP)
......@@ -20,6 +22,7 @@ TEXT ·makeFuncStub(SB),(NOSPLIT|WRAPPER),$8
// for more details.
// No argsize here, gc generates argsize info at call site.
TEXT ·methodValueCall(SB),(NOSPLIT|WRAPPER),$8
NO_LOCAL_POINTERS
MOVL DX, 0(SP)
LEAL argframe+0(FP), CX
MOVL CX, 4(SP)
......
......@@ -3,12 +3,14 @@
// license that can be found in the LICENSE file.
#include "textflag.h"
#include "funcdata.h"
// makeFuncStub is jumped to by the code generated by MakeFunc.
// See the comment on the declaration of makeFuncStub in makefunc.go
// for more details.
// No argsize here, gc generates argsize info at call site.
TEXT ·makeFuncStub(SB),(NOSPLIT|WRAPPER),$8
NO_LOCAL_POINTERS
MOVW R7, 4(R13)
MOVW $argframe+0(FP), R1
MOVW R1, 8(R13)
......@@ -20,6 +22,7 @@ TEXT ·makeFuncStub(SB),(NOSPLIT|WRAPPER),$8
// for more details.
// No argsize here, gc generates argsize info at call site.
TEXT ·methodValueCall(SB),(NOSPLIT|WRAPPER),$8
NO_LOCAL_POINTERS
MOVW R7, 4(R13)
MOVW $argframe+0(FP), R1
MOVW R1, 8(R13)
......
......@@ -14,6 +14,7 @@ import (
// returned by MakeFunc.
type makeFuncImpl struct {
code uintptr
stack *bitVector // stack bitmap for args - offset known to runtime
typ *funcType
fn func([]Value) []Value
}
......@@ -54,7 +55,10 @@ func MakeFunc(typ Type, fn func(args []Value) (results []Value)) Value {
dummy := makeFuncStub
code := **(**uintptr)(unsafe.Pointer(&dummy))
impl := &makeFuncImpl{code: code, typ: ftyp, fn: fn}
// makeFuncImpl contains a stack map for use by the runtime
_, _, _, stack := funcLayout(t, nil)
impl := &makeFuncImpl{code: code, stack: stack, typ: ftyp, fn: fn}
return Value{t, unsafe.Pointer(impl), 0, flag(Func) << flagKindShift}
}
......@@ -68,6 +72,7 @@ func makeFuncStub()
type methodValue struct {
fn uintptr
stack *bitVector // stack bitmap for args - offset known to runtime
method int
rcvr Value
}
......@@ -98,8 +103,12 @@ func makeMethodValue(op string, v Value) Value {
dummy := methodValueCall
code := **(**uintptr)(unsafe.Pointer(&dummy))
// methodValue contains a stack map for use by the runtime
_, _, _, stack := funcLayout(funcType, nil)
fv := &methodValue{
fn: code,
stack: stack,
method: int(v.flag) >> flagMethodShift,
rcvr: rcvr,
}
......
......@@ -242,7 +242,7 @@ const (
// with a unique tag like `reflect:"array"` or `reflect:"ptr"`
// so that code cannot convert from, say, *arrayType to *ptrType.
type rtype struct {
size uintptr // size in bytes
size uintptr
hash uint32 // hash of type; avoids computation in hash tables
_ uint8 // unused/padding
align uint8 // alignment of variable with this type
......@@ -1726,6 +1726,7 @@ type layoutType struct {
t *rtype
argSize uintptr // size of arguments
retOffset uintptr // offset of return values.
stack *bitVector
}
var layoutCache struct {
......@@ -1739,7 +1740,7 @@ var layoutCache struct {
// The returned type exists only for GC, so we only fill out GC relevant info.
// Currently, that's just size and the GC program. We also fill in
// the name for possible debugging use.
func funcLayout(t *rtype, rcvr *rtype) (frametype *rtype, argSize, retOffset uintptr) {
func funcLayout(t *rtype, rcvr *rtype) (frametype *rtype, argSize, retOffset uintptr, stack *bitVector) {
if t.Kind() != Func {
panic("reflect: funcLayout of non-func type")
}
......@@ -1750,19 +1751,21 @@ func funcLayout(t *rtype, rcvr *rtype) (frametype *rtype, argSize, retOffset uin
layoutCache.RLock()
if x := layoutCache.m[k]; x.t != nil {
layoutCache.RUnlock()
return x.t, x.argSize, x.retOffset
return x.t, x.argSize, x.retOffset, x.stack
}
layoutCache.RUnlock()
layoutCache.Lock()
if x := layoutCache.m[k]; x.t != nil {
layoutCache.Unlock()
return x.t, x.argSize, x.retOffset
return x.t, x.argSize, x.retOffset, x.stack
}
tt := (*funcType)(unsafe.Pointer(t))
// compute gc program for arguments
// compute gc program & stack bitmap for arguments
stack = new(bitVector)
var gc gcProg
var offset uintptr
if rcvr != nil {
// Reflect uses the "interface" calling convention for
// methods, where receivers take one word of argument
......@@ -1770,16 +1773,21 @@ func funcLayout(t *rtype, rcvr *rtype) (frametype *rtype, argSize, retOffset uin
if !isDirectIface(rcvr) {
// we pass a pointer to the receiver.
gc.append(bitsPointer)
stack.append2(bitsPointer)
} else if rcvr.pointers() {
// rcvr is a one-word pointer object. Its gc program
// is just what we need here.
gc.append(bitsPointer)
stack.append2(bitsPointer)
} else {
gc.append(bitsScalar)
stack.append2(bitsScalar)
}
offset += ptrSize
}
for _, arg := range tt.in {
gc.appendProg(arg)
addTypeBits(stack, &offset, arg)
}
argSize = gc.size
if runtime.GOARCH == "amd64p32" {
......@@ -1789,6 +1797,7 @@ func funcLayout(t *rtype, rcvr *rtype) (frametype *rtype, argSize, retOffset uin
retOffset = gc.size
for _, res := range tt.out {
gc.appendProg(res)
// stack map does not need result bits
}
gc.align(ptrSize)
......@@ -1813,12 +1822,73 @@ func funcLayout(t *rtype, rcvr *rtype) (frametype *rtype, argSize, retOffset uin
t: x,
argSize: argSize,
retOffset: retOffset,
stack: stack,
}
layoutCache.Unlock()
return x, argSize, retOffset
return x, argSize, retOffset, stack
}
// isDirectIface reports whether t is stored directly in an interface value.
func isDirectIface(t *rtype) bool {
return t.kind&kindDirectIface != 0
}
// Layout matches runtime.BitVector (well enough).
type bitVector struct {
n uint32 // number of bits
data []byte
}
// append a bit pair to the bitmap.
func (bv *bitVector) append2(bits uint8) {
// assume bv.n is a multiple of 2, since append2 is the only operation.
if bv.n%8 == 0 {
bv.data = append(bv.data, 0)
}
bv.data[bv.n/8] |= bits << (bv.n % 8)
bv.n += 2
}
func addTypeBits(bv *bitVector, offset *uintptr, t *rtype) {
*offset = align(*offset, uintptr(t.align))
if t.kind&kindNoPointers != 0 {
*offset += t.size
return
}
switch Kind(t.kind & kindMask) {
case Chan, Func, Map, Ptr, Slice, String, UnsafePointer:
// 1 pointer at start of representation
for bv.n < uint32(*offset/uintptr(ptrSize)) {
bv.append2(bitsScalar)
}
bv.append2(bitsPointer)
case Interface:
// 2 pointers
for bv.n < uint32(*offset/uintptr(ptrSize)) {
bv.append2(bitsScalar)
}
bv.append2(bitsPointer)
bv.append2(bitsPointer)
case Array:
// repeat inner type
tt := (*arrayType)(unsafe.Pointer(t))
for i := 0; i < int(tt.len); i++ {
addTypeBits(bv, offset, tt.elem)
}
case Struct:
// apply fields
tt := (*structType)(unsafe.Pointer(t))
start := *offset
for i := range tt.fields {
f := &tt.fields[i]
off := start + f.offset
addTypeBits(bv, &off, f.typ)
}
}
*offset += t.size
}
......@@ -403,11 +403,6 @@ func (v Value) CallSlice(in []Value) []Value {
var callGC bool // for testing; see TestCallMethodJump
var makeFuncStubFn = makeFuncStub
var makeFuncStubCode = **(**uintptr)(unsafe.Pointer(&makeFuncStubFn))
var methodValueCallFn = methodValueCall
var methodValueCallCode = **(**uintptr)(unsafe.Pointer(&methodValueCallFn))
func (v Value) call(op string, in []Value) []Value {
// Get function pointer, type.
t := v.typ
......@@ -486,30 +481,8 @@ func (v Value) call(op string, in []Value) []Value {
}
nout := t.NumOut()
// If target is makeFuncStub, short circuit the unpack onto stack /
// pack back into []Value for the args and return values. Just do the
// call directly.
// We need to do this here because otherwise we have a situation where
// reflect.callXX calls makeFuncStub, neither of which knows the
// layout of the args. That's bad for precise gc & stack copying.
x := (*makeFuncImpl)(fn)
if x.code == makeFuncStubCode {
return x.fn(in)
}
// If the target is methodValueCall, do its work here: add the receiver
// argument and call the real target directly.
// We need to do this here because otherwise we have a situation where
// reflect.callXX calls methodValueCall, neither of which knows the
// layout of the args. That's bad for precise gc & stack copying.
y := (*methodValue)(fn)
if y.fn == methodValueCallCode {
rcvr = y.rcvr
rcvrtype, t, fn = methodReceiver("call", rcvr, y.method)
}
// Compute frame type, allocate a chunk of memory for frame
frametype, _, retOffset := funcLayout(t, rcvrtype)
frametype, _, retOffset, _ := funcLayout(t, rcvrtype)
args := unsafe_New(frametype)
off := uintptr(0)
......@@ -725,7 +698,7 @@ func align(x, n uintptr) uintptr {
func callMethod(ctxt *methodValue, frame unsafe.Pointer) {
rcvr := ctxt.rcvr
rcvrtype, t, fn := methodReceiver("call", rcvr, ctxt.method)
frametype, argSize, retOffset := funcLayout(t, rcvrtype)
frametype, argSize, retOffset, _ := funcLayout(t, rcvrtype)
// Make a new frame that is one word bigger so we can store the receiver.
args := unsafe_New(frametype)
......
......@@ -586,7 +586,6 @@ void runtime·queuefinalizer(byte *p, FuncVal *fn, uintptr nret, Type *fint, Ptr
bool runtime·freespecial(Special *s, void *p, uintptr size, bool freed);
// Information from the compiler about the layout of stack frames.
typedef struct BitVector BitVector;
struct BitVector
{
int32 n; // # of bits
......
......@@ -673,8 +673,10 @@ scanframe(Stkframe *frame, void *unused)
// Scan arguments.
// Use pointer information if known.
stackmap = runtime·funcdata(f, FUNCDATA_ArgsPointerMaps);
if(stackmap != nil) {
if(frame->argmap != nil) {
bv = *frame->argmap;
scanblock((byte*)frame->argp, bv.n/BitsPerPointer*PtrSize, (byte*)bv.data);
} else if((stackmap = runtime·funcdata(f, FUNCDATA_ArgsPointerMaps)) != nil) {
bv = runtime·stackmapdata(stackmap, pcdata);
scanblock((byte*)frame->argp, bv.n/BitsPerPointer*PtrSize, (byte*)bv.data);
} else {
......
......@@ -666,6 +666,7 @@ struct Panic
* stack traces
*/
typedef struct Stkframe Stkframe;
typedef struct BitVector BitVector;
struct Stkframe
{
Func* fn; // function being run
......@@ -677,6 +678,7 @@ struct Stkframe
uintptr varp; // top of local variables
uintptr argp; // pointer to function arguments
uintptr arglen; // number of bytes at argp
BitVector* argmap; // force use of this argmap
};
intgo runtime·gentraceback(uintptr, uintptr, uintptr, G*, intgo, uintptr*, intgo, bool(**)(Stkframe*, void*), void*, bool);
......
......@@ -481,12 +481,16 @@ adjustframe(Stkframe *frame, void *arg)
}
// adjust inargs and outargs
if(frame->arglen != 0) {
if(frame->argmap != nil) {
bv = *frame->argmap;
} else {
stackmap = runtime·funcdata(f, FUNCDATA_ArgsPointerMaps);
if(stackmap == nil) {
runtime·printf("size %d\n", (int32)frame->arglen);
runtime·throw("no arg info");
}
bv = runtime·stackmapdata(stackmap, pcdata);
}
if(StackDebug >= 3)
runtime·printf(" args\n");
adjustpointers((byte**)frame->argp, &bv, adjinfo, nil);
......
......@@ -189,6 +189,21 @@ func gentraceback(pc0 uintptr, sp0 uintptr, lr0 uintptr, gp *g, skip int, pcbuf
}
if f.args != _ArgsSizeUnknown {
frame.arglen = uintptr(f.args)
} else if callback != nil && (gofuncname(f) == "reflect.makeFuncStub" || gofuncname(f) == "reflect.methodValueCall") {
// NOTE: Two calls to gofuncname on line above will be
// collapsed to one when we pull out all the imprecise fallback code.
arg0 := frame.sp
if usesLR {
arg0 += ptrSize
}
fn := *(**[2]uintptr)(unsafe.Pointer(arg0))
if fn[0] != f.entry {
print("runtime: confused by ", gofuncname(f), "\n")
gothrow("reflect mismatch")
}
bv := (*bitvector)(unsafe.Pointer(fn[1]))
frame.arglen = uintptr(bv.n / 2 * ptrSize)
frame.argmap = bv
} else if flr == nil {
frame.arglen = 0
} else {
......@@ -332,6 +347,7 @@ func gentraceback(pc0 uintptr, sp0 uintptr, lr0 uintptr, gp *g, skip int, pcbuf
frame.lr = 0
frame.sp = frame.fp
frame.fp = 0
frame.argmap = nil
// On link register architectures, sighandler saves the LR on stack
// before faking a call to sigpanic.
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment