Source file
src/reflect/value.go
Documentation: reflect
1
2
3
4
5 package reflect
6
7 import (
8 "internal/abi"
9 "internal/itoa"
10 "internal/unsafeheader"
11 "math"
12 "runtime"
13 "unsafe"
14 )
15
16 const ptrSize = 4 << (^uintptr(0) >> 63)
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39 type Value struct {
40
41 typ *rtype
42
43
44
45 ptr unsafe.Pointer
46
47
48
49
50
51
52
53
54
55
56
57
58
59 flag
60
61
62
63
64
65
66 }
67
68 type flag uintptr
69
70 const (
71 flagKindWidth = 5
72 flagKindMask flag = 1<<flagKindWidth - 1
73 flagStickyRO flag = 1 << 5
74 flagEmbedRO flag = 1 << 6
75 flagIndir flag = 1 << 7
76 flagAddr flag = 1 << 8
77 flagMethod flag = 1 << 9
78 flagMethodShift = 10
79 flagRO flag = flagStickyRO | flagEmbedRO
80 )
81
82 func (f flag) kind() Kind {
83 return Kind(f & flagKindMask)
84 }
85
86 func (f flag) ro() flag {
87 if f&flagRO != 0 {
88 return flagStickyRO
89 }
90 return 0
91 }
92
93
94
95
96 func (v Value) pointer() unsafe.Pointer {
97 if v.typ.size != ptrSize || !v.typ.pointers() {
98 panic("can't call pointer on a non-pointer Value")
99 }
100 if v.flag&flagIndir != 0 {
101 return *(*unsafe.Pointer)(v.ptr)
102 }
103 return v.ptr
104 }
105
106
107 func packEface(v Value) interface{} {
108 t := v.typ
109 var i interface{}
110 e := (*emptyInterface)(unsafe.Pointer(&i))
111
112 switch {
113 case ifaceIndir(t):
114 if v.flag&flagIndir == 0 {
115 panic("bad indir")
116 }
117
118 ptr := v.ptr
119 if v.flag&flagAddr != 0 {
120
121
122 c := unsafe_New(t)
123 typedmemmove(t, c, ptr)
124 ptr = c
125 }
126 e.word = ptr
127 case v.flag&flagIndir != 0:
128
129
130 e.word = *(*unsafe.Pointer)(v.ptr)
131 default:
132
133 e.word = v.ptr
134 }
135
136
137
138
139 e.typ = t
140 return i
141 }
142
143
144 func unpackEface(i interface{}) Value {
145 e := (*emptyInterface)(unsafe.Pointer(&i))
146
147 t := e.typ
148 if t == nil {
149 return Value{}
150 }
151 f := flag(t.Kind())
152 if ifaceIndir(t) {
153 f |= flagIndir
154 }
155 return Value{t, e.word, f}
156 }
157
158
159
160
161 type ValueError struct {
162 Method string
163 Kind Kind
164 }
165
166 func (e *ValueError) Error() string {
167 if e.Kind == 0 {
168 return "reflect: call of " + e.Method + " on zero Value"
169 }
170 return "reflect: call of " + e.Method + " on " + e.Kind.String() + " Value"
171 }
172
173
174
175 func methodName() string {
176 pc, _, _, _ := runtime.Caller(2)
177 f := runtime.FuncForPC(pc)
178 if f == nil {
179 return "unknown method"
180 }
181 return f.Name()
182 }
183
184
185
186 func methodNameSkip() string {
187 pc, _, _, _ := runtime.Caller(3)
188 f := runtime.FuncForPC(pc)
189 if f == nil {
190 return "unknown method"
191 }
192 return f.Name()
193 }
194
195
196 type emptyInterface struct {
197 typ *rtype
198 word unsafe.Pointer
199 }
200
201
202 type nonEmptyInterface struct {
203
204 itab *struct {
205 ityp *rtype
206 typ *rtype
207 hash uint32
208 _ [4]byte
209 fun [100000]unsafe.Pointer
210 }
211 word unsafe.Pointer
212 }
213
214
215
216
217
218
219
220 func (f flag) mustBe(expected Kind) {
221
222 if Kind(f&flagKindMask) != expected {
223 panic(&ValueError{methodName(), f.kind()})
224 }
225 }
226
227
228
229 func (f flag) mustBeExported() {
230 if f == 0 || f&flagRO != 0 {
231 f.mustBeExportedSlow()
232 }
233 }
234
235 func (f flag) mustBeExportedSlow() {
236 if f == 0 {
237 panic(&ValueError{methodNameSkip(), Invalid})
238 }
239 if f&flagRO != 0 {
240 panic("reflect: " + methodNameSkip() + " using value obtained using unexported field")
241 }
242 }
243
244
245
246
247 func (f flag) mustBeAssignable() {
248 if f&flagRO != 0 || f&flagAddr == 0 {
249 f.mustBeAssignableSlow()
250 }
251 }
252
253 func (f flag) mustBeAssignableSlow() {
254 if f == 0 {
255 panic(&ValueError{methodNameSkip(), Invalid})
256 }
257
258 if f&flagRO != 0 {
259 panic("reflect: " + methodNameSkip() + " using value obtained using unexported field")
260 }
261 if f&flagAddr == 0 {
262 panic("reflect: " + methodNameSkip() + " using unaddressable value")
263 }
264 }
265
266
267
268
269
270
271 func (v Value) Addr() Value {
272 if v.flag&flagAddr == 0 {
273 panic("reflect.Value.Addr of unaddressable value")
274 }
275
276
277 fl := v.flag & flagRO
278 return Value{v.typ.ptrTo(), v.ptr, fl | flag(Ptr)}
279 }
280
281
282
283 func (v Value) Bool() bool {
284 v.mustBe(Bool)
285 return *(*bool)(v.ptr)
286 }
287
288
289
290 func (v Value) Bytes() []byte {
291 v.mustBe(Slice)
292 if v.typ.Elem().Kind() != Uint8 {
293 panic("reflect.Value.Bytes of non-byte slice")
294 }
295
296 return *(*[]byte)(v.ptr)
297 }
298
299
300
301 func (v Value) runes() []rune {
302 v.mustBe(Slice)
303 if v.typ.Elem().Kind() != Int32 {
304 panic("reflect.Value.Bytes of non-rune slice")
305 }
306
307 return *(*[]rune)(v.ptr)
308 }
309
310
311
312
313
314
315 func (v Value) CanAddr() bool {
316 return v.flag&flagAddr != 0
317 }
318
319
320
321
322
323
324 func (v Value) CanSet() bool {
325 return v.flag&(flagAddr|flagRO) == flagAddr
326 }
327
328
329
330
331
332
333
334
335
336 func (v Value) Call(in []Value) []Value {
337 v.mustBe(Func)
338 v.mustBeExported()
339 return v.call("Call", in)
340 }
341
342
343
344
345
346
347
348
349 func (v Value) CallSlice(in []Value) []Value {
350 v.mustBe(Func)
351 v.mustBeExported()
352 return v.call("CallSlice", in)
353 }
354
355 var callGC bool
356
357 const debugReflectCall = false
358
359 func (v Value) call(op string, in []Value) []Value {
360
361 t := (*funcType)(unsafe.Pointer(v.typ))
362 var (
363 fn unsafe.Pointer
364 rcvr Value
365 rcvrtype *rtype
366 )
367 if v.flag&flagMethod != 0 {
368 rcvr = v
369 rcvrtype, t, fn = methodReceiver(op, v, int(v.flag)>>flagMethodShift)
370 } else if v.flag&flagIndir != 0 {
371 fn = *(*unsafe.Pointer)(v.ptr)
372 } else {
373 fn = v.ptr
374 }
375
376 if fn == nil {
377 panic("reflect.Value.Call: call of nil function")
378 }
379
380 isSlice := op == "CallSlice"
381 n := t.NumIn()
382 isVariadic := t.IsVariadic()
383 if isSlice {
384 if !isVariadic {
385 panic("reflect: CallSlice of non-variadic function")
386 }
387 if len(in) < n {
388 panic("reflect: CallSlice with too few input arguments")
389 }
390 if len(in) > n {
391 panic("reflect: CallSlice with too many input arguments")
392 }
393 } else {
394 if isVariadic {
395 n--
396 }
397 if len(in) < n {
398 panic("reflect: Call with too few input arguments")
399 }
400 if !isVariadic && len(in) > n {
401 panic("reflect: Call with too many input arguments")
402 }
403 }
404 for _, x := range in {
405 if x.Kind() == Invalid {
406 panic("reflect: " + op + " using zero Value argument")
407 }
408 }
409 for i := 0; i < n; i++ {
410 if xt, targ := in[i].Type(), t.In(i); !xt.AssignableTo(targ) {
411 panic("reflect: " + op + " using " + xt.String() + " as type " + targ.String())
412 }
413 }
414 if !isSlice && isVariadic {
415
416 m := len(in) - n
417 slice := MakeSlice(t.In(n), m, m)
418 elem := t.In(n).Elem()
419 for i := 0; i < m; i++ {
420 x := in[n+i]
421 if xt := x.Type(); !xt.AssignableTo(elem) {
422 panic("reflect: cannot use " + xt.String() + " as type " + elem.String() + " in " + op)
423 }
424 slice.Index(i).Set(x)
425 }
426 origIn := in
427 in = make([]Value, n+1)
428 copy(in[:n], origIn)
429 in[n] = slice
430 }
431
432 nin := len(in)
433 if nin != t.NumIn() {
434 panic("reflect.Value.Call: wrong argument count")
435 }
436 nout := t.NumOut()
437
438
439 var regArgs abi.RegArgs
440
441
442 frametype, framePool, abi := funcLayout(t, rcvrtype)
443
444
445 var stackArgs unsafe.Pointer
446 if frametype.size != 0 {
447 if nout == 0 {
448 stackArgs = framePool.Get().(unsafe.Pointer)
449 } else {
450
451
452 stackArgs = unsafe_New(frametype)
453 }
454 }
455 frameSize := frametype.size
456
457 if debugReflectCall {
458 println("reflect.call", t.String())
459 abi.dump()
460 }
461
462
463
464
465 inStart := 0
466 if rcvrtype != nil {
467
468
469
470 switch st := abi.call.steps[0]; st.kind {
471 case abiStepStack:
472 storeRcvr(rcvr, stackArgs)
473 case abiStepIntReg, abiStepPointer:
474
475
476
477
478 storeRcvr(rcvr, unsafe.Pointer(®Args.Ints[st.ireg]))
479 case abiStepFloatReg:
480 storeRcvr(rcvr, unsafe.Pointer(®Args.Floats[st.freg]))
481 default:
482 panic("unknown ABI parameter kind")
483 }
484 inStart = 1
485 }
486
487
488 for i, v := range in {
489 v.mustBeExported()
490 targ := t.In(i).(*rtype)
491
492
493
494 v = v.assignTo("reflect.Value.Call", targ, nil)
495 stepsLoop:
496 for _, st := range abi.call.stepsForValue(i + inStart) {
497 switch st.kind {
498 case abiStepStack:
499
500 addr := add(stackArgs, st.stkOff, "precomputed stack arg offset")
501 if v.flag&flagIndir != 0 {
502 typedmemmove(targ, addr, v.ptr)
503 } else {
504 *(*unsafe.Pointer)(addr) = v.ptr
505 }
506
507 break stepsLoop
508 case abiStepIntReg, abiStepPointer:
509
510 if v.flag&flagIndir != 0 {
511 offset := add(v.ptr, st.offset, "precomputed value offset")
512 memmove(unsafe.Pointer(®Args.Ints[st.ireg]), offset, st.size)
513 } else {
514 if st.kind == abiStepPointer {
515
516
517
518 regArgs.Ptrs[st.ireg] = v.ptr
519 }
520 regArgs.Ints[st.ireg] = uintptr(v.ptr)
521 }
522 case abiStepFloatReg:
523
524 if v.flag&flagIndir == 0 {
525 panic("attempted to copy pointer to FP register")
526 }
527 offset := add(v.ptr, st.offset, "precomputed value offset")
528 memmove(unsafe.Pointer(®Args.Floats[st.freg]), offset, st.size)
529 default:
530 panic("unknown ABI part kind")
531 }
532 }
533 }
534
535
536 frameSize = align(frameSize, ptrSize)
537 frameSize += abi.spill
538
539
540 regArgs.ReturnIsPtr = abi.outRegPtrs
541
542
543 call(frametype, fn, stackArgs, uint32(frametype.size), uint32(abi.retOffset), uint32(frameSize), ®Args)
544
545
546 if callGC {
547 runtime.GC()
548 }
549
550 var ret []Value
551 if nout == 0 {
552 if stackArgs != nil {
553 typedmemclr(frametype, stackArgs)
554 framePool.Put(stackArgs)
555 }
556 } else {
557 if stackArgs != nil {
558
559
560
561 typedmemclrpartial(frametype, stackArgs, 0, abi.retOffset)
562 }
563
564
565 ret = make([]Value, nout)
566 for i := 0; i < nout; i++ {
567 tv := t.Out(i)
568 if tv.Size() == 0 {
569
570
571 ret[i] = Zero(tv)
572 continue
573 }
574 steps := abi.ret.stepsForValue(i)
575 if st := steps[0]; st.kind == abiStepStack {
576
577
578
579 fl := flagIndir | flag(tv.Kind())
580 ret[i] = Value{tv.common(), add(stackArgs, st.stkOff, "tv.Size() != 0"), fl}
581
582
583
584
585 continue
586 }
587
588
589 if !ifaceIndir(tv.common()) {
590
591
592 if steps[0].kind != abiStepPointer {
593 print("kind=", steps[0].kind, ", type=", tv.String(), "\n")
594 panic("mismatch between ABI description and types")
595 }
596 ret[i] = Value{tv.common(), regArgs.Ptrs[steps[0].ireg], flag(tv.Kind())}
597 continue
598 }
599
600
601
602
603
604
605
606
607
608
609 s := unsafe_New(tv.common())
610 for _, st := range steps {
611 switch st.kind {
612 case abiStepIntReg:
613 offset := add(s, st.offset, "precomputed value offset")
614 memmove(offset, unsafe.Pointer(®Args.Ints[st.ireg]), st.size)
615 case abiStepPointer:
616 s := add(s, st.offset, "precomputed value offset")
617 *((*unsafe.Pointer)(s)) = regArgs.Ptrs[st.ireg]
618 case abiStepFloatReg:
619 offset := add(s, st.offset, "precomputed value offset")
620 memmove(offset, unsafe.Pointer(®Args.Floats[st.freg]), st.size)
621 case abiStepStack:
622 panic("register-based return value has stack component")
623 default:
624 panic("unknown ABI part kind")
625 }
626 }
627 ret[i] = Value{tv.common(), s, flagIndir | flag(tv.Kind())}
628 }
629 }
630
631 return ret
632 }
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654 func callReflect(ctxt *makeFuncImpl, frame unsafe.Pointer, retValid *bool, regs *abi.RegArgs) {
655 if callGC {
656
657
658
659
660
661 runtime.GC()
662 }
663 ftyp := ctxt.ftyp
664 f := ctxt.fn
665
666 _, _, abi := funcLayout(ftyp, nil)
667
668
669 ptr := frame
670 in := make([]Value, 0, int(ftyp.inCount))
671 for i, typ := range ftyp.in() {
672 if typ.Size() == 0 {
673 in = append(in, Zero(typ))
674 continue
675 }
676 v := Value{typ, nil, flag(typ.Kind())}
677 steps := abi.call.stepsForValue(i)
678 if st := steps[0]; st.kind == abiStepStack {
679 if ifaceIndir(typ) {
680
681
682
683
684 v.ptr = unsafe_New(typ)
685 if typ.size > 0 {
686 typedmemmove(typ, v.ptr, add(ptr, st.stkOff, "typ.size > 0"))
687 }
688 v.flag |= flagIndir
689 } else {
690 v.ptr = *(*unsafe.Pointer)(add(ptr, st.stkOff, "1-ptr"))
691 }
692 } else {
693 if ifaceIndir(typ) {
694
695
696 v.flag |= flagIndir
697 v.ptr = unsafe_New(typ)
698 for _, st := range steps {
699 switch st.kind {
700 case abiStepIntReg:
701 offset := add(v.ptr, st.offset, "precomputed value offset")
702 memmove(offset, unsafe.Pointer(®s.Ints[st.ireg]), st.size)
703 case abiStepPointer:
704 s := add(v.ptr, st.offset, "precomputed value offset")
705 *((*unsafe.Pointer)(s)) = regs.Ptrs[st.ireg]
706 case abiStepFloatReg:
707 offset := add(v.ptr, st.offset, "precomputed value offset")
708 memmove(offset, unsafe.Pointer(®s.Floats[st.freg]), st.size)
709 case abiStepStack:
710 panic("register-based return value has stack component")
711 default:
712 panic("unknown ABI part kind")
713 }
714 }
715 } else {
716
717
718 if steps[0].kind != abiStepPointer {
719 print("kind=", steps[0].kind, ", type=", typ.String(), "\n")
720 panic("mismatch between ABI description and types")
721 }
722 v.ptr = regs.Ptrs[steps[0].ireg]
723 }
724 }
725 in = append(in, v)
726 }
727
728
729 out := f(in)
730 numOut := ftyp.NumOut()
731 if len(out) != numOut {
732 panic("reflect: wrong return count from function created by MakeFunc")
733 }
734
735
736 if numOut > 0 {
737 for i, typ := range ftyp.out() {
738 v := out[i]
739 if v.typ == nil {
740 panic("reflect: function created by MakeFunc using " + funcName(f) +
741 " returned zero Value")
742 }
743 if v.flag&flagRO != 0 {
744 panic("reflect: function created by MakeFunc using " + funcName(f) +
745 " returned value obtained from unexported field")
746 }
747 if typ.size == 0 {
748 continue
749 }
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765 v = v.assignTo("reflect.MakeFunc", typ, nil)
766 stepsLoop:
767 for _, st := range abi.ret.stepsForValue(i) {
768 switch st.kind {
769 case abiStepStack:
770
771 addr := add(ptr, st.stkOff, "precomputed stack arg offset")
772
773
774
775
776 if v.flag&flagIndir != 0 {
777 memmove(addr, v.ptr, st.size)
778 } else {
779
780 *(*uintptr)(addr) = uintptr(v.ptr)
781 }
782
783 break stepsLoop
784 case abiStepIntReg, abiStepPointer:
785
786 if v.flag&flagIndir != 0 {
787 offset := add(v.ptr, st.offset, "precomputed value offset")
788 memmove(unsafe.Pointer(®s.Ints[st.ireg]), offset, st.size)
789 } else {
790
791
792
793
794
795 regs.Ints[st.ireg] = uintptr(v.ptr)
796 }
797 case abiStepFloatReg:
798
799 if v.flag&flagIndir == 0 {
800 panic("attempted to copy pointer to FP register")
801 }
802 offset := add(v.ptr, st.offset, "precomputed value offset")
803 memmove(unsafe.Pointer(®s.Floats[st.freg]), offset, st.size)
804 default:
805 panic("unknown ABI part kind")
806 }
807 }
808 }
809 }
810
811
812
813 *retValid = true
814
815
816
817
818
819 runtime.KeepAlive(out)
820
821
822
823
824 runtime.KeepAlive(ctxt)
825 }
826
827
828
829
830
831
832
833
834 func methodReceiver(op string, v Value, methodIndex int) (rcvrtype *rtype, t *funcType, fn unsafe.Pointer) {
835 i := methodIndex
836 if v.typ.Kind() == Interface {
837 tt := (*interfaceType)(unsafe.Pointer(v.typ))
838 if uint(i) >= uint(len(tt.methods)) {
839 panic("reflect: internal error: invalid method index")
840 }
841 m := &tt.methods[i]
842 if !tt.nameOff(m.name).isExported() {
843 panic("reflect: " + op + " of unexported method")
844 }
845 iface := (*nonEmptyInterface)(v.ptr)
846 if iface.itab == nil {
847 panic("reflect: " + op + " of method on nil interface value")
848 }
849 rcvrtype = iface.itab.typ
850 fn = unsafe.Pointer(&iface.itab.fun[i])
851 t = (*funcType)(unsafe.Pointer(tt.typeOff(m.typ)))
852 } else {
853 rcvrtype = v.typ
854 ms := v.typ.exportedMethods()
855 if uint(i) >= uint(len(ms)) {
856 panic("reflect: internal error: invalid method index")
857 }
858 m := ms[i]
859 if !v.typ.nameOff(m.name).isExported() {
860 panic("reflect: " + op + " of unexported method")
861 }
862 ifn := v.typ.textOff(m.ifn)
863 fn = unsafe.Pointer(&ifn)
864 t = (*funcType)(unsafe.Pointer(v.typ.typeOff(m.mtyp)))
865 }
866 return
867 }
868
869
870
871
872
873 func storeRcvr(v Value, p unsafe.Pointer) {
874 t := v.typ
875 if t.Kind() == Interface {
876
877 iface := (*nonEmptyInterface)(v.ptr)
878 *(*unsafe.Pointer)(p) = iface.word
879 } else if v.flag&flagIndir != 0 && !ifaceIndir(t) {
880 *(*unsafe.Pointer)(p) = *(*unsafe.Pointer)(v.ptr)
881 } else {
882 *(*unsafe.Pointer)(p) = v.ptr
883 }
884 }
885
886
887
888 func align(x, n uintptr) uintptr {
889 return (x + n - 1) &^ (n - 1)
890 }
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911 func callMethod(ctxt *methodValue, frame unsafe.Pointer, retValid *bool, regs *abi.RegArgs) {
912 rcvr := ctxt.rcvr
913 rcvrType, valueFuncType, methodFn := methodReceiver("call", rcvr, ctxt.method)
914
915
916
917
918
919
920
921
922
923 _, _, valueABI := funcLayout(valueFuncType, nil)
924 valueFrame, valueRegs := frame, regs
925 methodFrameType, methodFramePool, methodABI := funcLayout(valueFuncType, rcvrType)
926
927
928
929 methodFrame := methodFramePool.Get().(unsafe.Pointer)
930 var methodRegs abi.RegArgs
931
932
933 if st := methodABI.call.steps[0]; st.kind == abiStepStack {
934
935
936 storeRcvr(rcvr, methodFrame)
937 } else {
938
939 storeRcvr(rcvr, unsafe.Pointer(&methodRegs.Ints))
940 }
941
942
943 for i, t := range valueFuncType.in() {
944 valueSteps := valueABI.call.stepsForValue(i)
945 methodSteps := methodABI.call.stepsForValue(i + 1)
946
947
948 if len(valueSteps) == 0 {
949 if len(methodSteps) != 0 {
950 panic("method ABI and value ABI do not align")
951 }
952 continue
953 }
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968 if vStep := valueSteps[0]; vStep.kind == abiStepStack {
969 mStep := methodSteps[0]
970
971 if mStep.kind == abiStepStack {
972 if vStep.size != mStep.size {
973 panic("method ABI and value ABI do not align")
974 }
975 typedmemmove(t,
976 add(methodFrame, mStep.stkOff, "precomputed stack offset"),
977 add(valueFrame, vStep.stkOff, "precomputed stack offset"))
978 continue
979 }
980
981 for _, mStep := range methodSteps {
982 from := add(valueFrame, vStep.stkOff+mStep.offset, "precomputed stack offset")
983 switch mStep.kind {
984 case abiStepPointer:
985
986 methodRegs.Ptrs[mStep.ireg] = *(*unsafe.Pointer)(from)
987 fallthrough
988 case abiStepIntReg:
989 memmove(unsafe.Pointer(&methodRegs.Ints[mStep.ireg]), from, mStep.size)
990 case abiStepFloatReg:
991 memmove(unsafe.Pointer(&methodRegs.Floats[mStep.freg]), from, mStep.size)
992 default:
993 panic("unexpected method step")
994 }
995 }
996 continue
997 }
998
999 if mStep := methodSteps[0]; mStep.kind == abiStepStack {
1000 for _, vStep := range valueSteps {
1001 to := add(methodFrame, mStep.stkOff+vStep.offset, "precomputed stack offset")
1002 switch vStep.kind {
1003 case abiStepPointer:
1004
1005 *(*unsafe.Pointer)(to) = valueRegs.Ptrs[vStep.ireg]
1006 case abiStepIntReg:
1007 memmove(to, unsafe.Pointer(&valueRegs.Ints[vStep.ireg]), vStep.size)
1008 case abiStepFloatReg:
1009 memmove(to, unsafe.Pointer(&valueRegs.Floats[vStep.freg]), vStep.size)
1010 default:
1011 panic("unexpected value step")
1012 }
1013 }
1014 continue
1015 }
1016
1017 if len(valueSteps) != len(methodSteps) {
1018
1019
1020
1021 panic("method ABI and value ABI don't align")
1022 }
1023 for i, vStep := range valueSteps {
1024 mStep := methodSteps[i]
1025 if mStep.kind != vStep.kind {
1026 panic("method ABI and value ABI don't align")
1027 }
1028 switch vStep.kind {
1029 case abiStepPointer:
1030
1031 methodRegs.Ptrs[mStep.ireg] = valueRegs.Ptrs[vStep.ireg]
1032 fallthrough
1033 case abiStepIntReg:
1034 methodRegs.Ints[mStep.ireg] = valueRegs.Ints[vStep.ireg]
1035 case abiStepFloatReg:
1036 methodRegs.Floats[mStep.freg] = valueRegs.Floats[vStep.freg]
1037 default:
1038 panic("unexpected value step")
1039 }
1040 }
1041 }
1042
1043 methodFrameSize := methodFrameType.size
1044
1045
1046 methodFrameSize = align(methodFrameSize, ptrSize)
1047 methodFrameSize += methodABI.spill
1048
1049
1050 methodRegs.ReturnIsPtr = methodABI.outRegPtrs
1051
1052
1053
1054
1055 call(methodFrameType, methodFn, methodFrame, uint32(methodFrameType.size), uint32(methodABI.retOffset), uint32(methodFrameSize), &methodRegs)
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066 if valueRegs != nil {
1067 *valueRegs = methodRegs
1068 }
1069 if retSize := methodFrameType.size - methodABI.retOffset; retSize > 0 {
1070 valueRet := add(valueFrame, valueABI.retOffset, "valueFrame's size > retOffset")
1071 methodRet := add(methodFrame, methodABI.retOffset, "methodFrame's size > retOffset")
1072
1073 memmove(valueRet, methodRet, retSize)
1074 }
1075
1076
1077
1078 *retValid = true
1079
1080
1081
1082
1083 typedmemclr(methodFrameType, methodFrame)
1084 methodFramePool.Put(methodFrame)
1085
1086
1087 runtime.KeepAlive(ctxt)
1088
1089
1090
1091
1092 runtime.KeepAlive(valueRegs)
1093 }
1094
1095
1096 func funcName(f func([]Value) []Value) string {
1097 pc := *(*uintptr)(unsafe.Pointer(&f))
1098 rf := runtime.FuncForPC(pc)
1099 if rf != nil {
1100 return rf.Name()
1101 }
1102 return "closure"
1103 }
1104
1105
1106
1107 func (v Value) Cap() int {
1108 k := v.kind()
1109 switch k {
1110 case Array:
1111 return v.typ.Len()
1112 case Chan:
1113 return chancap(v.pointer())
1114 case Slice:
1115
1116 return (*unsafeheader.Slice)(v.ptr).Cap
1117 }
1118 panic(&ValueError{"reflect.Value.Cap", v.kind()})
1119 }
1120
1121
1122
1123 func (v Value) Close() {
1124 v.mustBe(Chan)
1125 v.mustBeExported()
1126 chanclose(v.pointer())
1127 }
1128
1129
1130
1131 func (v Value) Complex() complex128 {
1132 k := v.kind()
1133 switch k {
1134 case Complex64:
1135 return complex128(*(*complex64)(v.ptr))
1136 case Complex128:
1137 return *(*complex128)(v.ptr)
1138 }
1139 panic(&ValueError{"reflect.Value.Complex", v.kind()})
1140 }
1141
1142
1143
1144
1145
1146 func (v Value) Elem() Value {
1147 k := v.kind()
1148 switch k {
1149 case Interface:
1150 var eface interface{}
1151 if v.typ.NumMethod() == 0 {
1152 eface = *(*interface{})(v.ptr)
1153 } else {
1154 eface = (interface{})(*(*interface {
1155 M()
1156 })(v.ptr))
1157 }
1158 x := unpackEface(eface)
1159 if x.flag != 0 {
1160 x.flag |= v.flag.ro()
1161 }
1162 return x
1163 case Ptr:
1164 ptr := v.ptr
1165 if v.flag&flagIndir != 0 {
1166 ptr = *(*unsafe.Pointer)(ptr)
1167 }
1168
1169 if ptr == nil {
1170 return Value{}
1171 }
1172 tt := (*ptrType)(unsafe.Pointer(v.typ))
1173 typ := tt.elem
1174 fl := v.flag&flagRO | flagIndir | flagAddr
1175 fl |= flag(typ.Kind())
1176 return Value{typ, ptr, fl}
1177 }
1178 panic(&ValueError{"reflect.Value.Elem", v.kind()})
1179 }
1180
1181
1182
1183 func (v Value) Field(i int) Value {
1184 if v.kind() != Struct {
1185 panic(&ValueError{"reflect.Value.Field", v.kind()})
1186 }
1187 tt := (*structType)(unsafe.Pointer(v.typ))
1188 if uint(i) >= uint(len(tt.fields)) {
1189 panic("reflect: Field index out of range")
1190 }
1191 field := &tt.fields[i]
1192 typ := field.typ
1193
1194
1195 fl := v.flag&(flagStickyRO|flagIndir|flagAddr) | flag(typ.Kind())
1196
1197 if !field.name.isExported() {
1198 if field.embedded() {
1199 fl |= flagEmbedRO
1200 } else {
1201 fl |= flagStickyRO
1202 }
1203 }
1204
1205
1206
1207
1208
1209 ptr := add(v.ptr, field.offset(), "same as non-reflect &v.field")
1210 return Value{typ, ptr, fl}
1211 }
1212
1213
1214
1215 func (v Value) FieldByIndex(index []int) Value {
1216 if len(index) == 1 {
1217 return v.Field(index[0])
1218 }
1219 v.mustBe(Struct)
1220 for i, x := range index {
1221 if i > 0 {
1222 if v.Kind() == Ptr && v.typ.Elem().Kind() == Struct {
1223 if v.IsNil() {
1224 panic("reflect: indirection through nil pointer to embedded struct")
1225 }
1226 v = v.Elem()
1227 }
1228 }
1229 v = v.Field(x)
1230 }
1231 return v
1232 }
1233
1234
1235
1236
1237 func (v Value) FieldByName(name string) Value {
1238 v.mustBe(Struct)
1239 if f, ok := v.typ.FieldByName(name); ok {
1240 return v.FieldByIndex(f.Index)
1241 }
1242 return Value{}
1243 }
1244
1245
1246
1247
1248
1249 func (v Value) FieldByNameFunc(match func(string) bool) Value {
1250 if f, ok := v.typ.FieldByNameFunc(match); ok {
1251 return v.FieldByIndex(f.Index)
1252 }
1253 return Value{}
1254 }
1255
1256
1257
1258 func (v Value) Float() float64 {
1259 k := v.kind()
1260 switch k {
1261 case Float32:
1262 return float64(*(*float32)(v.ptr))
1263 case Float64:
1264 return *(*float64)(v.ptr)
1265 }
1266 panic(&ValueError{"reflect.Value.Float", v.kind()})
1267 }
1268
1269 var uint8Type = TypeOf(uint8(0)).(*rtype)
1270
1271
1272
1273 func (v Value) Index(i int) Value {
1274 switch v.kind() {
1275 case Array:
1276 tt := (*arrayType)(unsafe.Pointer(v.typ))
1277 if uint(i) >= uint(tt.len) {
1278 panic("reflect: array index out of range")
1279 }
1280 typ := tt.elem
1281 offset := uintptr(i) * typ.size
1282
1283
1284
1285
1286
1287
1288 val := add(v.ptr, offset, "same as &v[i], i < tt.len")
1289 fl := v.flag&(flagIndir|flagAddr) | v.flag.ro() | flag(typ.Kind())
1290 return Value{typ, val, fl}
1291
1292 case Slice:
1293
1294
1295 s := (*unsafeheader.Slice)(v.ptr)
1296 if uint(i) >= uint(s.Len) {
1297 panic("reflect: slice index out of range")
1298 }
1299 tt := (*sliceType)(unsafe.Pointer(v.typ))
1300 typ := tt.elem
1301 val := arrayAt(s.Data, i, typ.size, "i < s.Len")
1302 fl := flagAddr | flagIndir | v.flag.ro() | flag(typ.Kind())
1303 return Value{typ, val, fl}
1304
1305 case String:
1306 s := (*unsafeheader.String)(v.ptr)
1307 if uint(i) >= uint(s.Len) {
1308 panic("reflect: string index out of range")
1309 }
1310 p := arrayAt(s.Data, i, 1, "i < s.Len")
1311 fl := v.flag.ro() | flag(Uint8) | flagIndir
1312 return Value{uint8Type, p, fl}
1313 }
1314 panic(&ValueError{"reflect.Value.Index", v.kind()})
1315 }
1316
1317
1318
1319 func (v Value) Int() int64 {
1320 k := v.kind()
1321 p := v.ptr
1322 switch k {
1323 case Int:
1324 return int64(*(*int)(p))
1325 case Int8:
1326 return int64(*(*int8)(p))
1327 case Int16:
1328 return int64(*(*int16)(p))
1329 case Int32:
1330 return int64(*(*int32)(p))
1331 case Int64:
1332 return *(*int64)(p)
1333 }
1334 panic(&ValueError{"reflect.Value.Int", v.kind()})
1335 }
1336
1337
1338 func (v Value) CanInterface() bool {
1339 if v.flag == 0 {
1340 panic(&ValueError{"reflect.Value.CanInterface", Invalid})
1341 }
1342 return v.flag&flagRO == 0
1343 }
1344
1345
1346
1347
1348
1349
1350 func (v Value) Interface() (i interface{}) {
1351 return valueInterface(v, true)
1352 }
1353
1354 func valueInterface(v Value, safe bool) interface{} {
1355 if v.flag == 0 {
1356 panic(&ValueError{"reflect.Value.Interface", Invalid})
1357 }
1358 if safe && v.flag&flagRO != 0 {
1359
1360
1361
1362 panic("reflect.Value.Interface: cannot return value obtained from unexported field or method")
1363 }
1364 if v.flag&flagMethod != 0 {
1365 v = makeMethodValue("Interface", v)
1366 }
1367
1368 if v.kind() == Interface {
1369
1370
1371
1372 if v.NumMethod() == 0 {
1373 return *(*interface{})(v.ptr)
1374 }
1375 return *(*interface {
1376 M()
1377 })(v.ptr)
1378 }
1379
1380
1381 return packEface(v)
1382 }
1383
1384
1385
1386
1387
1388
1389
1390
1391
1392
1393 func (v Value) InterfaceData() [2]uintptr {
1394 v.mustBe(Interface)
1395
1396
1397
1398
1399
1400 return *(*[2]uintptr)(v.ptr)
1401 }
1402
1403
1404
1405
1406
1407
1408
1409
1410 func (v Value) IsNil() bool {
1411 k := v.kind()
1412 switch k {
1413 case Chan, Func, Map, Ptr, UnsafePointer:
1414 if v.flag&flagMethod != 0 {
1415 return false
1416 }
1417 ptr := v.ptr
1418 if v.flag&flagIndir != 0 {
1419 ptr = *(*unsafe.Pointer)(ptr)
1420 }
1421 return ptr == nil
1422 case Interface, Slice:
1423
1424
1425 return *(*unsafe.Pointer)(v.ptr) == nil
1426 }
1427 panic(&ValueError{"reflect.Value.IsNil", v.kind()})
1428 }
1429
1430
1431
1432
1433
1434
1435 func (v Value) IsValid() bool {
1436 return v.flag != 0
1437 }
1438
1439
1440
1441 func (v Value) IsZero() bool {
1442 switch v.kind() {
1443 case Bool:
1444 return !v.Bool()
1445 case Int, Int8, Int16, Int32, Int64:
1446 return v.Int() == 0
1447 case Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
1448 return v.Uint() == 0
1449 case Float32, Float64:
1450 return math.Float64bits(v.Float()) == 0
1451 case Complex64, Complex128:
1452 c := v.Complex()
1453 return math.Float64bits(real(c)) == 0 && math.Float64bits(imag(c)) == 0
1454 case Array:
1455 for i := 0; i < v.Len(); i++ {
1456 if !v.Index(i).IsZero() {
1457 return false
1458 }
1459 }
1460 return true
1461 case Chan, Func, Interface, Map, Ptr, Slice, UnsafePointer:
1462 return v.IsNil()
1463 case String:
1464 return v.Len() == 0
1465 case Struct:
1466 for i := 0; i < v.NumField(); i++ {
1467 if !v.Field(i).IsZero() {
1468 return false
1469 }
1470 }
1471 return true
1472 default:
1473
1474
1475 panic(&ValueError{"reflect.Value.IsZero", v.Kind()})
1476 }
1477 }
1478
1479
1480
1481 func (v Value) Kind() Kind {
1482 return v.kind()
1483 }
1484
1485
1486
1487 func (v Value) Len() int {
1488 k := v.kind()
1489 switch k {
1490 case Array:
1491 tt := (*arrayType)(unsafe.Pointer(v.typ))
1492 return int(tt.len)
1493 case Chan:
1494 return chanlen(v.pointer())
1495 case Map:
1496 return maplen(v.pointer())
1497 case Slice:
1498
1499 return (*unsafeheader.Slice)(v.ptr).Len
1500 case String:
1501
1502 return (*unsafeheader.String)(v.ptr).Len
1503 }
1504 panic(&ValueError{"reflect.Value.Len", v.kind()})
1505 }
1506
1507
1508
1509
1510
1511 func (v Value) MapIndex(key Value) Value {
1512 v.mustBe(Map)
1513 tt := (*mapType)(unsafe.Pointer(v.typ))
1514
1515
1516
1517
1518
1519
1520
1521
1522 key = key.assignTo("reflect.Value.MapIndex", tt.key, nil)
1523
1524 var k unsafe.Pointer
1525 if key.flag&flagIndir != 0 {
1526 k = key.ptr
1527 } else {
1528 k = unsafe.Pointer(&key.ptr)
1529 }
1530 e := mapaccess(v.typ, v.pointer(), k)
1531 if e == nil {
1532 return Value{}
1533 }
1534 typ := tt.elem
1535 fl := (v.flag | key.flag).ro()
1536 fl |= flag(typ.Kind())
1537 return copyVal(typ, fl, e)
1538 }
1539
1540
1541
1542
1543
1544 func (v Value) MapKeys() []Value {
1545 v.mustBe(Map)
1546 tt := (*mapType)(unsafe.Pointer(v.typ))
1547 keyType := tt.key
1548
1549 fl := v.flag.ro() | flag(keyType.Kind())
1550
1551 m := v.pointer()
1552 mlen := int(0)
1553 if m != nil {
1554 mlen = maplen(m)
1555 }
1556 it := mapiterinit(v.typ, m)
1557 a := make([]Value, mlen)
1558 var i int
1559 for i = 0; i < len(a); i++ {
1560 key := mapiterkey(it)
1561 if key == nil {
1562
1563
1564
1565 break
1566 }
1567 a[i] = copyVal(keyType, fl, key)
1568 mapiternext(it)
1569 }
1570 return a[:i]
1571 }
1572
1573
1574
1575 type MapIter struct {
1576 m Value
1577 it unsafe.Pointer
1578 }
1579
1580
1581 func (it *MapIter) Key() Value {
1582 if it.it == nil {
1583 panic("MapIter.Key called before Next")
1584 }
1585 if mapiterkey(it.it) == nil {
1586 panic("MapIter.Key called on exhausted iterator")
1587 }
1588
1589 t := (*mapType)(unsafe.Pointer(it.m.typ))
1590 ktype := t.key
1591 return copyVal(ktype, it.m.flag.ro()|flag(ktype.Kind()), mapiterkey(it.it))
1592 }
1593
1594
1595 func (it *MapIter) Value() Value {
1596 if it.it == nil {
1597 panic("MapIter.Value called before Next")
1598 }
1599 if mapiterkey(it.it) == nil {
1600 panic("MapIter.Value called on exhausted iterator")
1601 }
1602
1603 t := (*mapType)(unsafe.Pointer(it.m.typ))
1604 vtype := t.elem
1605 return copyVal(vtype, it.m.flag.ro()|flag(vtype.Kind()), mapiterelem(it.it))
1606 }
1607
1608
1609
1610
1611 func (it *MapIter) Next() bool {
1612 if it.it == nil {
1613 it.it = mapiterinit(it.m.typ, it.m.pointer())
1614 } else {
1615 if mapiterkey(it.it) == nil {
1616 panic("MapIter.Next called on exhausted iterator")
1617 }
1618 mapiternext(it.it)
1619 }
1620 return mapiterkey(it.it) != nil
1621 }
1622
1623
1624
1625
1626
1627
1628
1629
1630
1631
1632
1633
1634
1635
1636
1637
1638
1639 func (v Value) MapRange() *MapIter {
1640 v.mustBe(Map)
1641 return &MapIter{m: v}
1642 }
1643
1644
1645
1646 func copyVal(typ *rtype, fl flag, ptr unsafe.Pointer) Value {
1647 if ifaceIndir(typ) {
1648
1649
1650 c := unsafe_New(typ)
1651 typedmemmove(typ, c, ptr)
1652 return Value{typ, c, fl | flagIndir}
1653 }
1654 return Value{typ, *(*unsafe.Pointer)(ptr), fl}
1655 }
1656
1657
1658
1659
1660
1661 func (v Value) Method(i int) Value {
1662 if v.typ == nil {
1663 panic(&ValueError{"reflect.Value.Method", Invalid})
1664 }
1665 if v.flag&flagMethod != 0 || uint(i) >= uint(v.typ.NumMethod()) {
1666 panic("reflect: Method index out of range")
1667 }
1668 if v.typ.Kind() == Interface && v.IsNil() {
1669 panic("reflect: Method on nil interface value")
1670 }
1671 fl := v.flag.ro() | (v.flag & flagIndir)
1672 fl |= flag(Func)
1673 fl |= flag(i)<<flagMethodShift | flagMethod
1674 return Value{v.typ, v.ptr, fl}
1675 }
1676
1677
1678 func (v Value) NumMethod() int {
1679 if v.typ == nil {
1680 panic(&ValueError{"reflect.Value.NumMethod", Invalid})
1681 }
1682 if v.flag&flagMethod != 0 {
1683 return 0
1684 }
1685 return v.typ.NumMethod()
1686 }
1687
1688
1689
1690
1691
1692
1693 func (v Value) MethodByName(name string) Value {
1694 if v.typ == nil {
1695 panic(&ValueError{"reflect.Value.MethodByName", Invalid})
1696 }
1697 if v.flag&flagMethod != 0 {
1698 return Value{}
1699 }
1700 m, ok := v.typ.MethodByName(name)
1701 if !ok {
1702 return Value{}
1703 }
1704 return v.Method(m.Index)
1705 }
1706
1707
1708
1709 func (v Value) NumField() int {
1710 v.mustBe(Struct)
1711 tt := (*structType)(unsafe.Pointer(v.typ))
1712 return len(tt.fields)
1713 }
1714
1715
1716
1717 func (v Value) OverflowComplex(x complex128) bool {
1718 k := v.kind()
1719 switch k {
1720 case Complex64:
1721 return overflowFloat32(real(x)) || overflowFloat32(imag(x))
1722 case Complex128:
1723 return false
1724 }
1725 panic(&ValueError{"reflect.Value.OverflowComplex", v.kind()})
1726 }
1727
1728
1729
1730 func (v Value) OverflowFloat(x float64) bool {
1731 k := v.kind()
1732 switch k {
1733 case Float32:
1734 return overflowFloat32(x)
1735 case Float64:
1736 return false
1737 }
1738 panic(&ValueError{"reflect.Value.OverflowFloat", v.kind()})
1739 }
1740
1741 func overflowFloat32(x float64) bool {
1742 if x < 0 {
1743 x = -x
1744 }
1745 return math.MaxFloat32 < x && x <= math.MaxFloat64
1746 }
1747
1748
1749
1750 func (v Value) OverflowInt(x int64) bool {
1751 k := v.kind()
1752 switch k {
1753 case Int, Int8, Int16, Int32, Int64:
1754 bitSize := v.typ.size * 8
1755 trunc := (x << (64 - bitSize)) >> (64 - bitSize)
1756 return x != trunc
1757 }
1758 panic(&ValueError{"reflect.Value.OverflowInt", v.kind()})
1759 }
1760
1761
1762
1763 func (v Value) OverflowUint(x uint64) bool {
1764 k := v.kind()
1765 switch k {
1766 case Uint, Uintptr, Uint8, Uint16, Uint32, Uint64:
1767 bitSize := v.typ.size * 8
1768 trunc := (x << (64 - bitSize)) >> (64 - bitSize)
1769 return x != trunc
1770 }
1771 panic(&ValueError{"reflect.Value.OverflowUint", v.kind()})
1772 }
1773
1774
1775
1776
1777
1778
1779
1780
1781
1782
1783
1784
1785
1786
1787
1788
1789
1790
1791
1792
1793 func (v Value) Pointer() uintptr {
1794
1795 k := v.kind()
1796 switch k {
1797 case Ptr:
1798 if v.typ.ptrdata == 0 {
1799
1800
1801
1802
1803 return *(*uintptr)(v.ptr)
1804 }
1805 fallthrough
1806 case Chan, Map, UnsafePointer:
1807 return uintptr(v.pointer())
1808 case Func:
1809 if v.flag&flagMethod != 0 {
1810
1811
1812
1813
1814
1815
1816 f := methodValueCall
1817 return **(**uintptr)(unsafe.Pointer(&f))
1818 }
1819 p := v.pointer()
1820
1821
1822 if p != nil {
1823 p = *(*unsafe.Pointer)(p)
1824 }
1825 return uintptr(p)
1826
1827 case Slice:
1828 return (*SliceHeader)(v.ptr).Data
1829 }
1830 panic(&ValueError{"reflect.Value.Pointer", v.kind()})
1831 }
1832
1833
1834
1835
1836
1837
1838 func (v Value) Recv() (x Value, ok bool) {
1839 v.mustBe(Chan)
1840 v.mustBeExported()
1841 return v.recv(false)
1842 }
1843
1844
1845
1846 func (v Value) recv(nb bool) (val Value, ok bool) {
1847 tt := (*chanType)(unsafe.Pointer(v.typ))
1848 if ChanDir(tt.dir)&RecvDir == 0 {
1849 panic("reflect: recv on send-only channel")
1850 }
1851 t := tt.elem
1852 val = Value{t, nil, flag(t.Kind())}
1853 var p unsafe.Pointer
1854 if ifaceIndir(t) {
1855 p = unsafe_New(t)
1856 val.ptr = p
1857 val.flag |= flagIndir
1858 } else {
1859 p = unsafe.Pointer(&val.ptr)
1860 }
1861 selected, ok := chanrecv(v.pointer(), nb, p)
1862 if !selected {
1863 val = Value{}
1864 }
1865 return
1866 }
1867
1868
1869
1870
1871 func (v Value) Send(x Value) {
1872 v.mustBe(Chan)
1873 v.mustBeExported()
1874 v.send(x, false)
1875 }
1876
1877
1878
1879 func (v Value) send(x Value, nb bool) (selected bool) {
1880 tt := (*chanType)(unsafe.Pointer(v.typ))
1881 if ChanDir(tt.dir)&SendDir == 0 {
1882 panic("reflect: send on recv-only channel")
1883 }
1884 x.mustBeExported()
1885 x = x.assignTo("reflect.Value.Send", tt.elem, nil)
1886 var p unsafe.Pointer
1887 if x.flag&flagIndir != 0 {
1888 p = x.ptr
1889 } else {
1890 p = unsafe.Pointer(&x.ptr)
1891 }
1892 return chansend(v.pointer(), p, nb)
1893 }
1894
1895
1896
1897
1898 func (v Value) Set(x Value) {
1899 v.mustBeAssignable()
1900 x.mustBeExported()
1901 var target unsafe.Pointer
1902 if v.kind() == Interface {
1903 target = v.ptr
1904 }
1905 x = x.assignTo("reflect.Set", v.typ, target)
1906 if x.flag&flagIndir != 0 {
1907 if x.ptr == unsafe.Pointer(&zeroVal[0]) {
1908 typedmemclr(v.typ, v.ptr)
1909 } else {
1910 typedmemmove(v.typ, v.ptr, x.ptr)
1911 }
1912 } else {
1913 *(*unsafe.Pointer)(v.ptr) = x.ptr
1914 }
1915 }
1916
1917
1918
1919 func (v Value) SetBool(x bool) {
1920 v.mustBeAssignable()
1921 v.mustBe(Bool)
1922 *(*bool)(v.ptr) = x
1923 }
1924
1925
1926
1927 func (v Value) SetBytes(x []byte) {
1928 v.mustBeAssignable()
1929 v.mustBe(Slice)
1930 if v.typ.Elem().Kind() != Uint8 {
1931 panic("reflect.Value.SetBytes of non-byte slice")
1932 }
1933 *(*[]byte)(v.ptr) = x
1934 }
1935
1936
1937
1938 func (v Value) setRunes(x []rune) {
1939 v.mustBeAssignable()
1940 v.mustBe(Slice)
1941 if v.typ.Elem().Kind() != Int32 {
1942 panic("reflect.Value.setRunes of non-rune slice")
1943 }
1944 *(*[]rune)(v.ptr) = x
1945 }
1946
1947
1948
1949 func (v Value) SetComplex(x complex128) {
1950 v.mustBeAssignable()
1951 switch k := v.kind(); k {
1952 default:
1953 panic(&ValueError{"reflect.Value.SetComplex", v.kind()})
1954 case Complex64:
1955 *(*complex64)(v.ptr) = complex64(x)
1956 case Complex128:
1957 *(*complex128)(v.ptr) = x
1958 }
1959 }
1960
1961
1962
1963 func (v Value) SetFloat(x float64) {
1964 v.mustBeAssignable()
1965 switch k := v.kind(); k {
1966 default:
1967 panic(&ValueError{"reflect.Value.SetFloat", v.kind()})
1968 case Float32:
1969 *(*float32)(v.ptr) = float32(x)
1970 case Float64:
1971 *(*float64)(v.ptr) = x
1972 }
1973 }
1974
1975
1976
1977 func (v Value) SetInt(x int64) {
1978 v.mustBeAssignable()
1979 switch k := v.kind(); k {
1980 default:
1981 panic(&ValueError{"reflect.Value.SetInt", v.kind()})
1982 case Int:
1983 *(*int)(v.ptr) = int(x)
1984 case Int8:
1985 *(*int8)(v.ptr) = int8(x)
1986 case Int16:
1987 *(*int16)(v.ptr) = int16(x)
1988 case Int32:
1989 *(*int32)(v.ptr) = int32(x)
1990 case Int64:
1991 *(*int64)(v.ptr) = x
1992 }
1993 }
1994
1995
1996
1997
1998 func (v Value) SetLen(n int) {
1999 v.mustBeAssignable()
2000 v.mustBe(Slice)
2001 s := (*unsafeheader.Slice)(v.ptr)
2002 if uint(n) > uint(s.Cap) {
2003 panic("reflect: slice length out of range in SetLen")
2004 }
2005 s.Len = n
2006 }
2007
2008
2009
2010
2011 func (v Value) SetCap(n int) {
2012 v.mustBeAssignable()
2013 v.mustBe(Slice)
2014 s := (*unsafeheader.Slice)(v.ptr)
2015 if n < s.Len || n > s.Cap {
2016 panic("reflect: slice capacity out of range in SetCap")
2017 }
2018 s.Cap = n
2019 }
2020
2021
2022
2023
2024
2025
2026
2027 func (v Value) SetMapIndex(key, elem Value) {
2028 v.mustBe(Map)
2029 v.mustBeExported()
2030 key.mustBeExported()
2031 tt := (*mapType)(unsafe.Pointer(v.typ))
2032 key = key.assignTo("reflect.Value.SetMapIndex", tt.key, nil)
2033 var k unsafe.Pointer
2034 if key.flag&flagIndir != 0 {
2035 k = key.ptr
2036 } else {
2037 k = unsafe.Pointer(&key.ptr)
2038 }
2039 if elem.typ == nil {
2040 mapdelete(v.typ, v.pointer(), k)
2041 return
2042 }
2043 elem.mustBeExported()
2044 elem = elem.assignTo("reflect.Value.SetMapIndex", tt.elem, nil)
2045 var e unsafe.Pointer
2046 if elem.flag&flagIndir != 0 {
2047 e = elem.ptr
2048 } else {
2049 e = unsafe.Pointer(&elem.ptr)
2050 }
2051 mapassign(v.typ, v.pointer(), k, e)
2052 }
2053
2054
2055
2056 func (v Value) SetUint(x uint64) {
2057 v.mustBeAssignable()
2058 switch k := v.kind(); k {
2059 default:
2060 panic(&ValueError{"reflect.Value.SetUint", v.kind()})
2061 case Uint:
2062 *(*uint)(v.ptr) = uint(x)
2063 case Uint8:
2064 *(*uint8)(v.ptr) = uint8(x)
2065 case Uint16:
2066 *(*uint16)(v.ptr) = uint16(x)
2067 case Uint32:
2068 *(*uint32)(v.ptr) = uint32(x)
2069 case Uint64:
2070 *(*uint64)(v.ptr) = x
2071 case Uintptr:
2072 *(*uintptr)(v.ptr) = uintptr(x)
2073 }
2074 }
2075
2076
2077
2078 func (v Value) SetPointer(x unsafe.Pointer) {
2079 v.mustBeAssignable()
2080 v.mustBe(UnsafePointer)
2081 *(*unsafe.Pointer)(v.ptr) = x
2082 }
2083
2084
2085
2086 func (v Value) SetString(x string) {
2087 v.mustBeAssignable()
2088 v.mustBe(String)
2089 *(*string)(v.ptr) = x
2090 }
2091
2092
2093
2094
2095 func (v Value) Slice(i, j int) Value {
2096 var (
2097 cap int
2098 typ *sliceType
2099 base unsafe.Pointer
2100 )
2101 switch kind := v.kind(); kind {
2102 default:
2103 panic(&ValueError{"reflect.Value.Slice", v.kind()})
2104
2105 case Array:
2106 if v.flag&flagAddr == 0 {
2107 panic("reflect.Value.Slice: slice of unaddressable array")
2108 }
2109 tt := (*arrayType)(unsafe.Pointer(v.typ))
2110 cap = int(tt.len)
2111 typ = (*sliceType)(unsafe.Pointer(tt.slice))
2112 base = v.ptr
2113
2114 case Slice:
2115 typ = (*sliceType)(unsafe.Pointer(v.typ))
2116 s := (*unsafeheader.Slice)(v.ptr)
2117 base = s.Data
2118 cap = s.Cap
2119
2120 case String:
2121 s := (*unsafeheader.String)(v.ptr)
2122 if i < 0 || j < i || j > s.Len {
2123 panic("reflect.Value.Slice: string slice index out of bounds")
2124 }
2125 var t unsafeheader.String
2126 if i < s.Len {
2127 t = unsafeheader.String{Data: arrayAt(s.Data, i, 1, "i < s.Len"), Len: j - i}
2128 }
2129 return Value{v.typ, unsafe.Pointer(&t), v.flag}
2130 }
2131
2132 if i < 0 || j < i || j > cap {
2133 panic("reflect.Value.Slice: slice index out of bounds")
2134 }
2135
2136
2137 var x []unsafe.Pointer
2138
2139
2140 s := (*unsafeheader.Slice)(unsafe.Pointer(&x))
2141 s.Len = j - i
2142 s.Cap = cap - i
2143 if cap-i > 0 {
2144 s.Data = arrayAt(base, i, typ.elem.Size(), "i < cap")
2145 } else {
2146
2147 s.Data = base
2148 }
2149
2150 fl := v.flag.ro() | flagIndir | flag(Slice)
2151 return Value{typ.common(), unsafe.Pointer(&x), fl}
2152 }
2153
2154
2155
2156
2157 func (v Value) Slice3(i, j, k int) Value {
2158 var (
2159 cap int
2160 typ *sliceType
2161 base unsafe.Pointer
2162 )
2163 switch kind := v.kind(); kind {
2164 default:
2165 panic(&ValueError{"reflect.Value.Slice3", v.kind()})
2166
2167 case Array:
2168 if v.flag&flagAddr == 0 {
2169 panic("reflect.Value.Slice3: slice of unaddressable array")
2170 }
2171 tt := (*arrayType)(unsafe.Pointer(v.typ))
2172 cap = int(tt.len)
2173 typ = (*sliceType)(unsafe.Pointer(tt.slice))
2174 base = v.ptr
2175
2176 case Slice:
2177 typ = (*sliceType)(unsafe.Pointer(v.typ))
2178 s := (*unsafeheader.Slice)(v.ptr)
2179 base = s.Data
2180 cap = s.Cap
2181 }
2182
2183 if i < 0 || j < i || k < j || k > cap {
2184 panic("reflect.Value.Slice3: slice index out of bounds")
2185 }
2186
2187
2188
2189 var x []unsafe.Pointer
2190
2191
2192 s := (*unsafeheader.Slice)(unsafe.Pointer(&x))
2193 s.Len = j - i
2194 s.Cap = k - i
2195 if k-i > 0 {
2196 s.Data = arrayAt(base, i, typ.elem.Size(), "i < k <= cap")
2197 } else {
2198
2199 s.Data = base
2200 }
2201
2202 fl := v.flag.ro() | flagIndir | flag(Slice)
2203 return Value{typ.common(), unsafe.Pointer(&x), fl}
2204 }
2205
2206
2207
2208
2209
2210
2211
2212 func (v Value) String() string {
2213 switch k := v.kind(); k {
2214 case Invalid:
2215 return "<invalid Value>"
2216 case String:
2217 return *(*string)(v.ptr)
2218 }
2219
2220
2221 return "<" + v.Type().String() + " Value>"
2222 }
2223
2224
2225
2226
2227
2228
2229 func (v Value) TryRecv() (x Value, ok bool) {
2230 v.mustBe(Chan)
2231 v.mustBeExported()
2232 return v.recv(true)
2233 }
2234
2235
2236
2237
2238
2239 func (v Value) TrySend(x Value) bool {
2240 v.mustBe(Chan)
2241 v.mustBeExported()
2242 return v.send(x, true)
2243 }
2244
2245
2246 func (v Value) Type() Type {
2247 f := v.flag
2248 if f == 0 {
2249 panic(&ValueError{"reflect.Value.Type", Invalid})
2250 }
2251 if f&flagMethod == 0 {
2252
2253 return v.typ
2254 }
2255
2256
2257
2258 i := int(v.flag) >> flagMethodShift
2259 if v.typ.Kind() == Interface {
2260
2261 tt := (*interfaceType)(unsafe.Pointer(v.typ))
2262 if uint(i) >= uint(len(tt.methods)) {
2263 panic("reflect: internal error: invalid method index")
2264 }
2265 m := &tt.methods[i]
2266 return v.typ.typeOff(m.typ)
2267 }
2268
2269 ms := v.typ.exportedMethods()
2270 if uint(i) >= uint(len(ms)) {
2271 panic("reflect: internal error: invalid method index")
2272 }
2273 m := ms[i]
2274 return v.typ.typeOff(m.mtyp)
2275 }
2276
2277
2278
2279 func (v Value) Uint() uint64 {
2280 k := v.kind()
2281 p := v.ptr
2282 switch k {
2283 case Uint:
2284 return uint64(*(*uint)(p))
2285 case Uint8:
2286 return uint64(*(*uint8)(p))
2287 case Uint16:
2288 return uint64(*(*uint16)(p))
2289 case Uint32:
2290 return uint64(*(*uint32)(p))
2291 case Uint64:
2292 return *(*uint64)(p)
2293 case Uintptr:
2294 return uint64(*(*uintptr)(p))
2295 }
2296 panic(&ValueError{"reflect.Value.Uint", v.kind()})
2297 }
2298
2299
2300
2301
2302
2303
2304
2305
2306
2307 func (v Value) UnsafeAddr() uintptr {
2308
2309 if v.typ == nil {
2310 panic(&ValueError{"reflect.Value.UnsafeAddr", Invalid})
2311 }
2312 if v.flag&flagAddr == 0 {
2313 panic("reflect.Value.UnsafeAddr of unaddressable value")
2314 }
2315 return uintptr(v.ptr)
2316 }
2317
2318
2319
2320
2321
2322
2323
2324 type StringHeader struct {
2325 Data uintptr
2326 Len int
2327 }
2328
2329
2330
2331
2332
2333
2334
2335 type SliceHeader struct {
2336 Data uintptr
2337 Len int
2338 Cap int
2339 }
2340
2341 func typesMustMatch(what string, t1, t2 Type) {
2342 if t1 != t2 {
2343 panic(what + ": " + t1.String() + " != " + t2.String())
2344 }
2345 }
2346
2347
2348
2349
2350
2351
2352
2353
2354 func arrayAt(p unsafe.Pointer, i int, eltSize uintptr, whySafe string) unsafe.Pointer {
2355 return add(p, uintptr(i)*eltSize, "i < len")
2356 }
2357
2358
2359
2360 func grow(s Value, extra int) (Value, int, int) {
2361 i0 := s.Len()
2362 i1 := i0 + extra
2363 if i1 < i0 {
2364 panic("reflect.Append: slice overflow")
2365 }
2366 m := s.Cap()
2367 if i1 <= m {
2368 return s.Slice(0, i1), i0, i1
2369 }
2370 if m == 0 {
2371 m = extra
2372 } else {
2373 for m < i1 {
2374 if i0 < 1024 {
2375 m += m
2376 } else {
2377 m += m / 4
2378 }
2379 }
2380 }
2381 t := MakeSlice(s.Type(), i1, m)
2382 Copy(t, s)
2383 return t, i0, i1
2384 }
2385
2386
2387
2388 func Append(s Value, x ...Value) Value {
2389 s.mustBe(Slice)
2390 s, i0, i1 := grow(s, len(x))
2391 for i, j := i0, 0; i < i1; i, j = i+1, j+1 {
2392 s.Index(i).Set(x[j])
2393 }
2394 return s
2395 }
2396
2397
2398
2399 func AppendSlice(s, t Value) Value {
2400 s.mustBe(Slice)
2401 t.mustBe(Slice)
2402 typesMustMatch("reflect.AppendSlice", s.Type().Elem(), t.Type().Elem())
2403 s, i0, i1 := grow(s, t.Len())
2404 Copy(s.Slice(i0, i1), t)
2405 return s
2406 }
2407
2408
2409
2410
2411
2412
2413
2414
2415 func Copy(dst, src Value) int {
2416 dk := dst.kind()
2417 if dk != Array && dk != Slice {
2418 panic(&ValueError{"reflect.Copy", dk})
2419 }
2420 if dk == Array {
2421 dst.mustBeAssignable()
2422 }
2423 dst.mustBeExported()
2424
2425 sk := src.kind()
2426 var stringCopy bool
2427 if sk != Array && sk != Slice {
2428 stringCopy = sk == String && dst.typ.Elem().Kind() == Uint8
2429 if !stringCopy {
2430 panic(&ValueError{"reflect.Copy", sk})
2431 }
2432 }
2433 src.mustBeExported()
2434
2435 de := dst.typ.Elem()
2436 if !stringCopy {
2437 se := src.typ.Elem()
2438 typesMustMatch("reflect.Copy", de, se)
2439 }
2440
2441 var ds, ss unsafeheader.Slice
2442 if dk == Array {
2443 ds.Data = dst.ptr
2444 ds.Len = dst.Len()
2445 ds.Cap = ds.Len
2446 } else {
2447 ds = *(*unsafeheader.Slice)(dst.ptr)
2448 }
2449 if sk == Array {
2450 ss.Data = src.ptr
2451 ss.Len = src.Len()
2452 ss.Cap = ss.Len
2453 } else if sk == Slice {
2454 ss = *(*unsafeheader.Slice)(src.ptr)
2455 } else {
2456 sh := *(*unsafeheader.String)(src.ptr)
2457 ss.Data = sh.Data
2458 ss.Len = sh.Len
2459 ss.Cap = sh.Len
2460 }
2461
2462 return typedslicecopy(de.common(), ds, ss)
2463 }
2464
2465
2466
2467 type runtimeSelect struct {
2468 dir SelectDir
2469 typ *rtype
2470 ch unsafe.Pointer
2471 val unsafe.Pointer
2472 }
2473
2474
2475
2476
2477
2478
2479 func rselect([]runtimeSelect) (chosen int, recvOK bool)
2480
2481
2482 type SelectDir int
2483
2484
2485
2486 const (
2487 _ SelectDir = iota
2488 SelectSend
2489 SelectRecv
2490 SelectDefault
2491 )
2492
2493
2494
2495
2496
2497
2498
2499
2500
2501
2502
2503
2504
2505
2506
2507
2508
2509
2510 type SelectCase struct {
2511 Dir SelectDir
2512 Chan Value
2513 Send Value
2514 }
2515
2516
2517
2518
2519
2520
2521
2522
2523
2524 func Select(cases []SelectCase) (chosen int, recv Value, recvOK bool) {
2525 if len(cases) > 65536 {
2526 panic("reflect.Select: too many cases (max 65536)")
2527 }
2528
2529
2530
2531 var runcases []runtimeSelect
2532 if len(cases) > 4 {
2533
2534 runcases = make([]runtimeSelect, len(cases))
2535 } else {
2536
2537 runcases = make([]runtimeSelect, len(cases), 4)
2538 }
2539
2540 haveDefault := false
2541 for i, c := range cases {
2542 rc := &runcases[i]
2543 rc.dir = c.Dir
2544 switch c.Dir {
2545 default:
2546 panic("reflect.Select: invalid Dir")
2547
2548 case SelectDefault:
2549 if haveDefault {
2550 panic("reflect.Select: multiple default cases")
2551 }
2552 haveDefault = true
2553 if c.Chan.IsValid() {
2554 panic("reflect.Select: default case has Chan value")
2555 }
2556 if c.Send.IsValid() {
2557 panic("reflect.Select: default case has Send value")
2558 }
2559
2560 case SelectSend:
2561 ch := c.Chan
2562 if !ch.IsValid() {
2563 break
2564 }
2565 ch.mustBe(Chan)
2566 ch.mustBeExported()
2567 tt := (*chanType)(unsafe.Pointer(ch.typ))
2568 if ChanDir(tt.dir)&SendDir == 0 {
2569 panic("reflect.Select: SendDir case using recv-only channel")
2570 }
2571 rc.ch = ch.pointer()
2572 rc.typ = &tt.rtype
2573 v := c.Send
2574 if !v.IsValid() {
2575 panic("reflect.Select: SendDir case missing Send value")
2576 }
2577 v.mustBeExported()
2578 v = v.assignTo("reflect.Select", tt.elem, nil)
2579 if v.flag&flagIndir != 0 {
2580 rc.val = v.ptr
2581 } else {
2582 rc.val = unsafe.Pointer(&v.ptr)
2583 }
2584
2585 case SelectRecv:
2586 if c.Send.IsValid() {
2587 panic("reflect.Select: RecvDir case has Send value")
2588 }
2589 ch := c.Chan
2590 if !ch.IsValid() {
2591 break
2592 }
2593 ch.mustBe(Chan)
2594 ch.mustBeExported()
2595 tt := (*chanType)(unsafe.Pointer(ch.typ))
2596 if ChanDir(tt.dir)&RecvDir == 0 {
2597 panic("reflect.Select: RecvDir case using send-only channel")
2598 }
2599 rc.ch = ch.pointer()
2600 rc.typ = &tt.rtype
2601 rc.val = unsafe_New(tt.elem)
2602 }
2603 }
2604
2605 chosen, recvOK = rselect(runcases)
2606 if runcases[chosen].dir == SelectRecv {
2607 tt := (*chanType)(unsafe.Pointer(runcases[chosen].typ))
2608 t := tt.elem
2609 p := runcases[chosen].val
2610 fl := flag(t.Kind())
2611 if ifaceIndir(t) {
2612 recv = Value{t, p, fl | flagIndir}
2613 } else {
2614 recv = Value{t, *(*unsafe.Pointer)(p), fl}
2615 }
2616 }
2617 return chosen, recv, recvOK
2618 }
2619
2620
2623
2624
2625 func unsafe_New(*rtype) unsafe.Pointer
2626 func unsafe_NewArray(*rtype, int) unsafe.Pointer
2627
2628
2629
2630 func MakeSlice(typ Type, len, cap int) Value {
2631 if typ.Kind() != Slice {
2632 panic("reflect.MakeSlice of non-slice type")
2633 }
2634 if len < 0 {
2635 panic("reflect.MakeSlice: negative len")
2636 }
2637 if cap < 0 {
2638 panic("reflect.MakeSlice: negative cap")
2639 }
2640 if len > cap {
2641 panic("reflect.MakeSlice: len > cap")
2642 }
2643
2644 s := unsafeheader.Slice{Data: unsafe_NewArray(typ.Elem().(*rtype), cap), Len: len, Cap: cap}
2645 return Value{typ.(*rtype), unsafe.Pointer(&s), flagIndir | flag(Slice)}
2646 }
2647
2648
2649 func MakeChan(typ Type, buffer int) Value {
2650 if typ.Kind() != Chan {
2651 panic("reflect.MakeChan of non-chan type")
2652 }
2653 if buffer < 0 {
2654 panic("reflect.MakeChan: negative buffer size")
2655 }
2656 if typ.ChanDir() != BothDir {
2657 panic("reflect.MakeChan: unidirectional channel type")
2658 }
2659 t := typ.(*rtype)
2660 ch := makechan(t, buffer)
2661 return Value{t, ch, flag(Chan)}
2662 }
2663
2664
2665 func MakeMap(typ Type) Value {
2666 return MakeMapWithSize(typ, 0)
2667 }
2668
2669
2670
2671 func MakeMapWithSize(typ Type, n int) Value {
2672 if typ.Kind() != Map {
2673 panic("reflect.MakeMapWithSize of non-map type")
2674 }
2675 t := typ.(*rtype)
2676 m := makemap(t, n)
2677 return Value{t, m, flag(Map)}
2678 }
2679
2680
2681
2682
2683 func Indirect(v Value) Value {
2684 if v.Kind() != Ptr {
2685 return v
2686 }
2687 return v.Elem()
2688 }
2689
2690
2691
2692 func ValueOf(i interface{}) Value {
2693 if i == nil {
2694 return Value{}
2695 }
2696
2697
2698
2699
2700
2701 escapes(i)
2702
2703 return unpackEface(i)
2704 }
2705
2706
2707
2708
2709
2710
2711 func Zero(typ Type) Value {
2712 if typ == nil {
2713 panic("reflect: Zero(nil)")
2714 }
2715 t := typ.(*rtype)
2716 fl := flag(t.Kind())
2717 if ifaceIndir(t) {
2718 var p unsafe.Pointer
2719 if t.size <= maxZero {
2720 p = unsafe.Pointer(&zeroVal[0])
2721 } else {
2722 p = unsafe_New(t)
2723 }
2724 return Value{t, p, fl | flagIndir}
2725 }
2726 return Value{t, nil, fl}
2727 }
2728
2729
2730 const maxZero = 1024
2731
2732
2733 var zeroVal [maxZero]byte
2734
2735
2736
2737 func New(typ Type) Value {
2738 if typ == nil {
2739 panic("reflect: New(nil)")
2740 }
2741 t := typ.(*rtype)
2742 pt := t.ptrTo()
2743 if ifaceIndir(pt) {
2744
2745 panic("reflect: New of type that may not be allocated in heap (possibly undefined cgo C type)")
2746 }
2747 ptr := unsafe_New(t)
2748 fl := flag(Ptr)
2749 return Value{pt, ptr, fl}
2750 }
2751
2752
2753
2754 func NewAt(typ Type, p unsafe.Pointer) Value {
2755 fl := flag(Ptr)
2756 t := typ.(*rtype)
2757 return Value{t.ptrTo(), p, fl}
2758 }
2759
2760
2761
2762
2763
2764 func (v Value) assignTo(context string, dst *rtype, target unsafe.Pointer) Value {
2765 if v.flag&flagMethod != 0 {
2766 v = makeMethodValue(context, v)
2767 }
2768
2769 switch {
2770 case directlyAssignable(dst, v.typ):
2771
2772
2773 fl := v.flag&(flagAddr|flagIndir) | v.flag.ro()
2774 fl |= flag(dst.Kind())
2775 return Value{dst, v.ptr, fl}
2776
2777 case implements(dst, v.typ):
2778 if target == nil {
2779 target = unsafe_New(dst)
2780 }
2781 if v.Kind() == Interface && v.IsNil() {
2782
2783
2784
2785 return Value{dst, nil, flag(Interface)}
2786 }
2787 x := valueInterface(v, false)
2788 if dst.NumMethod() == 0 {
2789 *(*interface{})(target) = x
2790 } else {
2791 ifaceE2I(dst, x, target)
2792 }
2793 return Value{dst, target, flagIndir | flag(Interface)}
2794 }
2795
2796
2797 panic(context + ": value of type " + v.typ.String() + " is not assignable to type " + dst.String())
2798 }
2799
2800
2801
2802
2803 func (v Value) Convert(t Type) Value {
2804 if v.flag&flagMethod != 0 {
2805 v = makeMethodValue("Convert", v)
2806 }
2807 op := convertOp(t.common(), v.typ)
2808 if op == nil {
2809 panic("reflect.Value.Convert: value of type " + v.typ.String() + " cannot be converted to type " + t.String())
2810 }
2811 return op(v, t)
2812 }
2813
2814
2815
2816 func (v Value) CanConvert(t Type) bool {
2817 vt := v.Type()
2818 if !vt.ConvertibleTo(t) {
2819 return false
2820 }
2821
2822
2823
2824 if vt.Kind() == Slice && t.Kind() == Ptr && t.Elem().Kind() == Array {
2825 n := t.Elem().Len()
2826 h := (*unsafeheader.Slice)(v.ptr)
2827 if n > h.Len {
2828 return false
2829 }
2830 }
2831 return true
2832 }
2833
2834
2835
2836 func convertOp(dst, src *rtype) func(Value, Type) Value {
2837 switch src.Kind() {
2838 case Int, Int8, Int16, Int32, Int64:
2839 switch dst.Kind() {
2840 case Int, Int8, Int16, Int32, Int64, Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
2841 return cvtInt
2842 case Float32, Float64:
2843 return cvtIntFloat
2844 case String:
2845 return cvtIntString
2846 }
2847
2848 case Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
2849 switch dst.Kind() {
2850 case Int, Int8, Int16, Int32, Int64, Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
2851 return cvtUint
2852 case Float32, Float64:
2853 return cvtUintFloat
2854 case String:
2855 return cvtUintString
2856 }
2857
2858 case Float32, Float64:
2859 switch dst.Kind() {
2860 case Int, Int8, Int16, Int32, Int64:
2861 return cvtFloatInt
2862 case Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
2863 return cvtFloatUint
2864 case Float32, Float64:
2865 return cvtFloat
2866 }
2867
2868 case Complex64, Complex128:
2869 switch dst.Kind() {
2870 case Complex64, Complex128:
2871 return cvtComplex
2872 }
2873
2874 case String:
2875 if dst.Kind() == Slice && dst.Elem().PkgPath() == "" {
2876 switch dst.Elem().Kind() {
2877 case Uint8:
2878 return cvtStringBytes
2879 case Int32:
2880 return cvtStringRunes
2881 }
2882 }
2883
2884 case Slice:
2885 if dst.Kind() == String && src.Elem().PkgPath() == "" {
2886 switch src.Elem().Kind() {
2887 case Uint8:
2888 return cvtBytesString
2889 case Int32:
2890 return cvtRunesString
2891 }
2892 }
2893
2894
2895 if dst.Kind() == Ptr && dst.Elem().Kind() == Array && src.Elem() == dst.Elem().Elem() {
2896 return cvtSliceArrayPtr
2897 }
2898
2899 case Chan:
2900 if dst.Kind() == Chan && specialChannelAssignability(dst, src) {
2901 return cvtDirect
2902 }
2903 }
2904
2905
2906 if haveIdenticalUnderlyingType(dst, src, false) {
2907 return cvtDirect
2908 }
2909
2910
2911 if dst.Kind() == Ptr && dst.Name() == "" &&
2912 src.Kind() == Ptr && src.Name() == "" &&
2913 haveIdenticalUnderlyingType(dst.Elem().common(), src.Elem().common(), false) {
2914 return cvtDirect
2915 }
2916
2917 if implements(dst, src) {
2918 if src.Kind() == Interface {
2919 return cvtI2I
2920 }
2921 return cvtT2I
2922 }
2923
2924 return nil
2925 }
2926
2927
2928
2929 func makeInt(f flag, bits uint64, t Type) Value {
2930 typ := t.common()
2931 ptr := unsafe_New(typ)
2932 switch typ.size {
2933 case 1:
2934 *(*uint8)(ptr) = uint8(bits)
2935 case 2:
2936 *(*uint16)(ptr) = uint16(bits)
2937 case 4:
2938 *(*uint32)(ptr) = uint32(bits)
2939 case 8:
2940 *(*uint64)(ptr) = bits
2941 }
2942 return Value{typ, ptr, f | flagIndir | flag(typ.Kind())}
2943 }
2944
2945
2946
2947 func makeFloat(f flag, v float64, t Type) Value {
2948 typ := t.common()
2949 ptr := unsafe_New(typ)
2950 switch typ.size {
2951 case 4:
2952 *(*float32)(ptr) = float32(v)
2953 case 8:
2954 *(*float64)(ptr) = v
2955 }
2956 return Value{typ, ptr, f | flagIndir | flag(typ.Kind())}
2957 }
2958
2959
2960 func makeFloat32(f flag, v float32, t Type) Value {
2961 typ := t.common()
2962 ptr := unsafe_New(typ)
2963 *(*float32)(ptr) = v
2964 return Value{typ, ptr, f | flagIndir | flag(typ.Kind())}
2965 }
2966
2967
2968
2969 func makeComplex(f flag, v complex128, t Type) Value {
2970 typ := t.common()
2971 ptr := unsafe_New(typ)
2972 switch typ.size {
2973 case 8:
2974 *(*complex64)(ptr) = complex64(v)
2975 case 16:
2976 *(*complex128)(ptr) = v
2977 }
2978 return Value{typ, ptr, f | flagIndir | flag(typ.Kind())}
2979 }
2980
2981 func makeString(f flag, v string, t Type) Value {
2982 ret := New(t).Elem()
2983 ret.SetString(v)
2984 ret.flag = ret.flag&^flagAddr | f
2985 return ret
2986 }
2987
2988 func makeBytes(f flag, v []byte, t Type) Value {
2989 ret := New(t).Elem()
2990 ret.SetBytes(v)
2991 ret.flag = ret.flag&^flagAddr | f
2992 return ret
2993 }
2994
2995 func makeRunes(f flag, v []rune, t Type) Value {
2996 ret := New(t).Elem()
2997 ret.setRunes(v)
2998 ret.flag = ret.flag&^flagAddr | f
2999 return ret
3000 }
3001
3002
3003
3004
3005
3006
3007
3008 func cvtInt(v Value, t Type) Value {
3009 return makeInt(v.flag.ro(), uint64(v.Int()), t)
3010 }
3011
3012
3013 func cvtUint(v Value, t Type) Value {
3014 return makeInt(v.flag.ro(), v.Uint(), t)
3015 }
3016
3017
3018 func cvtFloatInt(v Value, t Type) Value {
3019 return makeInt(v.flag.ro(), uint64(int64(v.Float())), t)
3020 }
3021
3022
3023 func cvtFloatUint(v Value, t Type) Value {
3024 return makeInt(v.flag.ro(), uint64(v.Float()), t)
3025 }
3026
3027
3028 func cvtIntFloat(v Value, t Type) Value {
3029 return makeFloat(v.flag.ro(), float64(v.Int()), t)
3030 }
3031
3032
3033 func cvtUintFloat(v Value, t Type) Value {
3034 return makeFloat(v.flag.ro(), float64(v.Uint()), t)
3035 }
3036
3037
3038 func cvtFloat(v Value, t Type) Value {
3039 if v.Type().Kind() == Float32 && t.Kind() == Float32 {
3040
3041
3042
3043 return makeFloat32(v.flag.ro(), *(*float32)(v.ptr), t)
3044 }
3045 return makeFloat(v.flag.ro(), v.Float(), t)
3046 }
3047
3048
3049 func cvtComplex(v Value, t Type) Value {
3050 return makeComplex(v.flag.ro(), v.Complex(), t)
3051 }
3052
3053
3054 func cvtIntString(v Value, t Type) Value {
3055 s := "\uFFFD"
3056 if x := v.Int(); int64(rune(x)) == x {
3057 s = string(rune(x))
3058 }
3059 return makeString(v.flag.ro(), s, t)
3060 }
3061
3062
3063 func cvtUintString(v Value, t Type) Value {
3064 s := "\uFFFD"
3065 if x := v.Uint(); uint64(rune(x)) == x {
3066 s = string(rune(x))
3067 }
3068 return makeString(v.flag.ro(), s, t)
3069 }
3070
3071
3072 func cvtBytesString(v Value, t Type) Value {
3073 return makeString(v.flag.ro(), string(v.Bytes()), t)
3074 }
3075
3076
3077 func cvtStringBytes(v Value, t Type) Value {
3078 return makeBytes(v.flag.ro(), []byte(v.String()), t)
3079 }
3080
3081
3082 func cvtRunesString(v Value, t Type) Value {
3083 return makeString(v.flag.ro(), string(v.runes()), t)
3084 }
3085
3086
3087 func cvtStringRunes(v Value, t Type) Value {
3088 return makeRunes(v.flag.ro(), []rune(v.String()), t)
3089 }
3090
3091
3092 func cvtSliceArrayPtr(v Value, t Type) Value {
3093 n := t.Elem().Len()
3094 h := (*unsafeheader.Slice)(v.ptr)
3095 if n > h.Len {
3096 panic("reflect: cannot convert slice with length " + itoa.Itoa(h.Len) + " to pointer to array with length " + itoa.Itoa(n))
3097 }
3098 return Value{t.common(), h.Data, v.flag&^(flagIndir|flagAddr|flagKindMask) | flag(Ptr)}
3099 }
3100
3101
3102 func cvtDirect(v Value, typ Type) Value {
3103 f := v.flag
3104 t := typ.common()
3105 ptr := v.ptr
3106 if f&flagAddr != 0 {
3107
3108 c := unsafe_New(t)
3109 typedmemmove(t, c, ptr)
3110 ptr = c
3111 f &^= flagAddr
3112 }
3113 return Value{t, ptr, v.flag.ro() | f}
3114 }
3115
3116
3117 func cvtT2I(v Value, typ Type) Value {
3118 target := unsafe_New(typ.common())
3119 x := valueInterface(v, false)
3120 if typ.NumMethod() == 0 {
3121 *(*interface{})(target) = x
3122 } else {
3123 ifaceE2I(typ.(*rtype), x, target)
3124 }
3125 return Value{typ.common(), target, v.flag.ro() | flagIndir | flag(Interface)}
3126 }
3127
3128
3129 func cvtI2I(v Value, typ Type) Value {
3130 if v.IsNil() {
3131 ret := Zero(typ)
3132 ret.flag |= v.flag.ro()
3133 return ret
3134 }
3135 return cvtT2I(v.Elem(), typ)
3136 }
3137
3138
3139 func chancap(ch unsafe.Pointer) int
3140 func chanclose(ch unsafe.Pointer)
3141 func chanlen(ch unsafe.Pointer) int
3142
3143
3144
3145
3146
3147
3148
3149
3150
3151
3152 func chanrecv(ch unsafe.Pointer, nb bool, val unsafe.Pointer) (selected, received bool)
3153
3154
3155 func chansend(ch unsafe.Pointer, val unsafe.Pointer, nb bool) bool
3156
3157 func makechan(typ *rtype, size int) (ch unsafe.Pointer)
3158 func makemap(t *rtype, cap int) (m unsafe.Pointer)
3159
3160
3161 func mapaccess(t *rtype, m unsafe.Pointer, key unsafe.Pointer) (val unsafe.Pointer)
3162
3163
3164 func mapassign(t *rtype, m unsafe.Pointer, key, val unsafe.Pointer)
3165
3166
3167 func mapdelete(t *rtype, m unsafe.Pointer, key unsafe.Pointer)
3168
3169
3170
3171
3172 func mapiterinit(t *rtype, m unsafe.Pointer) unsafe.Pointer
3173
3174
3175 func mapiterkey(it unsafe.Pointer) (key unsafe.Pointer)
3176
3177
3178 func mapiterelem(it unsafe.Pointer) (elem unsafe.Pointer)
3179
3180
3181 func mapiternext(it unsafe.Pointer)
3182
3183
3184 func maplen(m unsafe.Pointer) int
3185
3186
3187
3188
3189
3190
3191
3192
3193
3194
3195
3196
3197
3198
3199
3200
3201
3202
3203
3204
3205
3206
3207
3208
3209
3210
3211 func call(stackArgsType *rtype, f, stackArgs unsafe.Pointer, stackArgsSize, stackRetOffset, frameSize uint32, regArgs *abi.RegArgs)
3212
3213 func ifaceE2I(t *rtype, src interface{}, dst unsafe.Pointer)
3214
3215
3216
3217 func memmove(dst, src unsafe.Pointer, size uintptr)
3218
3219
3220
3221 func typedmemmove(t *rtype, dst, src unsafe.Pointer)
3222
3223
3224
3225
3226 func typedmemmovepartial(t *rtype, dst, src unsafe.Pointer, off, size uintptr)
3227
3228
3229
3230 func typedmemclr(t *rtype, ptr unsafe.Pointer)
3231
3232
3233
3234
3235 func typedmemclrpartial(t *rtype, ptr unsafe.Pointer, off, size uintptr)
3236
3237
3238
3239
3240 func typedslicecopy(elemType *rtype, dst, src unsafeheader.Slice) int
3241
3242
3243 func typehash(t *rtype, p unsafe.Pointer, h uintptr) uintptr
3244
3245
3246
3247
3248 func escapes(x interface{}) {
3249 if dummy.b {
3250 dummy.x = x
3251 }
3252 }
3253
3254 var dummy struct {
3255 b bool
3256 x interface{}
3257 }
3258
View as plain text