@@ -206,6 +206,9 @@ struct bpf_prog *bpf_int_jit_compile(struct bpf_prog *fp)
206206 cgctx .stack_size = round_up (fp -> aux -> stack_depth , 16 );
207207 cgctx .arena_vm_start = bpf_arena_get_kern_vm_start (fp -> aux -> arena );
208208 cgctx .user_vm_start = bpf_arena_get_user_vm_start (fp -> aux -> arena );
209+ cgctx .is_subprog = bpf_is_subprog (fp );
210+ cgctx .exception_boundary = fp -> aux -> exception_boundary ;
211+ cgctx .exception_cb = fp -> aux -> exception_cb ;
209212
210213 /* Scouting faux-generate pass 0 */
211214 if (bpf_jit_build_body (fp , NULL , NULL , & cgctx , addrs , 0 , false)) {
@@ -435,6 +438,16 @@ void bpf_jit_free(struct bpf_prog *fp)
435438 bpf_prog_unlock_free (fp );
436439}
437440
441+ bool bpf_jit_supports_exceptions (void )
442+ {
443+ return IS_ENABLED (CONFIG_PPC64 );
444+ }
445+
446+ bool bpf_jit_supports_subprog_tailcalls (void )
447+ {
448+ return IS_ENABLED (CONFIG_PPC64 );
449+ }
450+
438451bool bpf_jit_supports_kfunc_call (void )
439452{
440453 return true;
@@ -506,15 +519,15 @@ static int invoke_bpf_prog(u32 *image, u32 *ro_image, struct codegen_context *ct
506519
507520 /* __bpf_prog_enter(p, &bpf_tramp_run_ctx) */
508521 PPC_LI_ADDR (_R3 , p );
509- EMIT (PPC_RAW_MR (_R25 , _R3 ));
522+ EMIT (PPC_RAW_MR (_R26 , _R3 ));
510523 EMIT (PPC_RAW_ADDI (_R4 , _R1 , run_ctx_off ));
511524 ret = bpf_jit_emit_func_call_rel (image , ro_image , ctx ,
512525 (unsigned long )bpf_trampoline_enter (p ));
513526 if (ret )
514527 return ret ;
515528
516529 /* Remember prog start time returned by __bpf_prog_enter */
517- EMIT (PPC_RAW_MR (_R26 , _R3 ));
530+ EMIT (PPC_RAW_MR (_R27 , _R3 ));
518531
519532 /*
520533 * if (__bpf_prog_enter(p) == 0)
@@ -537,7 +550,7 @@ static int invoke_bpf_prog(u32 *image, u32 *ro_image, struct codegen_context *ct
537550 image [ctx -> idx ] = ppc_inst_val (branch_insn );
538551 ctx -> idx ++ ;
539552 } else {
540- EMIT (PPC_RAW_LL (_R12 , _R25 , offsetof(struct bpf_prog , bpf_func )));
553+ EMIT (PPC_RAW_LL (_R12 , _R26 , offsetof(struct bpf_prog , bpf_func )));
541554 EMIT (PPC_RAW_MTCTR (_R12 ));
542555 EMIT (PPC_RAW_BCTRL ());
543556 }
@@ -554,8 +567,8 @@ static int invoke_bpf_prog(u32 *image, u32 *ro_image, struct codegen_context *ct
554567 }
555568
556569 /* __bpf_prog_exit(p, start_time, &bpf_tramp_run_ctx) */
557- EMIT (PPC_RAW_MR (_R3 , _R25 ));
558- EMIT (PPC_RAW_MR (_R4 , _R26 ));
570+ EMIT (PPC_RAW_MR (_R3 , _R26 ));
571+ EMIT (PPC_RAW_MR (_R4 , _R27 ));
559572 EMIT (PPC_RAW_ADDI (_R5 , _R1 , run_ctx_off ));
560573 ret = bpf_jit_emit_func_call_rel (image , ro_image , ctx ,
561574 (unsigned long )bpf_trampoline_exit (p ));
@@ -600,15 +613,42 @@ static int invoke_bpf_mod_ret(u32 *image, u32 *ro_image, struct codegen_context
600613 return 0 ;
601614}
602615
603- static void bpf_trampoline_setup_tail_call_cnt (u32 * image , struct codegen_context * ctx ,
604- int func_frame_offset , int r4_off )
616+ /*
617+ * Refer the label 'Generated stack layout' in this file for actual stack
618+ * layout during trampoline invocation.
619+ *
620+ * Refer __arch_prepare_bpf_trampoline() for stack component details.
621+ *
622+ * The tailcall count/reference is present in caller's stack frame. Its required
623+ * to copy the content of tail_call_info before calling the actual function
624+ * to which the trampoline is attached.
625+ *
626+ */
627+
628+ static void bpf_trampoline_setup_tail_call_info (u32 * image , struct codegen_context * ctx ,
629+ int func_frame_offset ,
630+ int bpf_dummy_frame_size , int r4_off )
605631{
606632 if (IS_ENABLED (CONFIG_PPC64 )) {
607- /* See bpf_jit_stack_tailcallcnt() */
608- int tailcallcnt_offset = 7 * 8 ;
633+ /* See bpf_jit_stack_tailcallinfo_offset() */
634+ int tailcallinfo_offset = BPF_PPC_STACK_SAVE + SZL ;
635+ /*
636+ * func_frame_offset =
637+ * bpf_dummy_frame_size + trampoline_frame_size
638+ */
639+ EMIT (PPC_RAW_LD (_R4 , _R1 , func_frame_offset ));
640+ EMIT (PPC_RAW_LD (_R3 , _R4 , - tailcallinfo_offset ));
641+
642+ /*
643+ * Setting the tail_call_info in trampoline's frame
644+ * depending on if previous frame had value or reference.
645+ */
646+ EMIT (PPC_RAW_CMPLWI (_R3 , MAX_TAIL_CALL_CNT ));
647+ PPC_COND_BRANCH (COND_GT , CTX_NIA (ctx ) + 8 );
648+ EMIT (PPC_RAW_ADDI (_R3 , _R4 , bpf_jit_stack_tailcallinfo_offset (ctx )));
649+ EMIT (PPC_RAW_STL (_R3 , _R1 , func_frame_offset
650+ - bpf_dummy_frame_size - tailcallinfo_offset ));
609651
610- EMIT (PPC_RAW_LL (_R3 , _R1 , func_frame_offset - tailcallcnt_offset ));
611- EMIT (PPC_RAW_STL (_R3 , _R1 , - tailcallcnt_offset ));
612652 } else {
613653 /* See bpf_jit_stack_offsetof() and BPF_PPC_TC */
614654 EMIT (PPC_RAW_LL (_R4 , _R1 , r4_off ));
@@ -619,7 +659,7 @@ static void bpf_trampoline_restore_tail_call_cnt(u32 *image, struct codegen_cont
619659 int func_frame_offset , int r4_off )
620660{
621661 if (IS_ENABLED (CONFIG_PPC64 )) {
622- /* See bpf_jit_stack_tailcallcnt () */
662+ /* See bpf_jit_stack_tailcallinfo_offset () */
623663 int tailcallcnt_offset = 7 * 8 ;
624664
625665 EMIT (PPC_RAW_LL (_R3 , _R1 , - tailcallcnt_offset ));
@@ -715,10 +755,10 @@ static int __arch_prepare_bpf_trampoline(struct bpf_tramp_image *im, void *rw_im
715755 * [ r0 save (32-bit) ] |
716756 * dummy frame for unwind [ back chain 1 ] --
717757 * [ padding ] align stack frame
758+ * nvr_off [ r26..r31 ] nvr save : BPF_PPC_STACK_SAVE
759+ * [ tail_call_info ] non optional - 64-bit powerpc
718760 * r4_off [ r4 (tailcallcnt) ] optional - 32-bit powerpc
719761 * alt_lr_off [ real lr (ool stub)] optional - actual lr
720- * [ r26 ]
721- * nvr_off [ r25 ] nvr save area
722762 * retval_off [ return value ]
723763 * [ reg argN ]
724764 * [ ... ]
@@ -776,10 +816,6 @@ static int __arch_prepare_bpf_trampoline(struct bpf_tramp_image *im, void *rw_im
776816 if (save_ret )
777817 bpf_frame_size += SZL ;
778818
779- /* Room for nvr save area */
780- nvr_off = bpf_frame_size ;
781- bpf_frame_size += 2 * SZL ;
782-
783819 /* Optional save area for actual LR in case of ool ftrace */
784820 if (IS_ENABLED (CONFIG_PPC_FTRACE_OUT_OF_LINE )) {
785821 alt_lr_off = bpf_frame_size ;
@@ -795,6 +831,13 @@ static int __arch_prepare_bpf_trampoline(struct bpf_tramp_image *im, void *rw_im
795831 }
796832 }
797833
834+ /* Room for 64-bit tail_call_cnt */
835+ bpf_frame_size += SZL ;
836+
837+ /* Room for nvr save area */
838+ nvr_off = bpf_frame_size ;
839+ bpf_frame_size += BPF_PPC_STACK_SAVE ;
840+
798841 /* Padding to align stack frame, if any */
799842 bpf_frame_size = round_up (bpf_frame_size , SZL * 2 );
800843
@@ -856,8 +899,8 @@ static int __arch_prepare_bpf_trampoline(struct bpf_tramp_image *im, void *rw_im
856899 EMIT (PPC_RAW_STL (_R3 , _R1 , nregs_off ));
857900
858901 /* Save nv regs */
859- EMIT (PPC_RAW_STL (_R25 , _R1 , nvr_off ));
860- EMIT (PPC_RAW_STL (_R26 , _R1 , nvr_off + SZL ));
902+ EMIT (PPC_RAW_STL (_R26 , _R1 , nvr_off ));
903+ EMIT (PPC_RAW_STL (_R27 , _R1 , nvr_off + SZL ));
861904
862905 if (flags & BPF_TRAMP_F_CALL_ORIG ) {
863906 PPC_LI_ADDR (_R3 , (unsigned long )im );
@@ -896,7 +939,8 @@ static int __arch_prepare_bpf_trampoline(struct bpf_tramp_image *im, void *rw_im
896939
897940 /* Replicate tail_call_cnt before calling the original BPF prog */
898941 if (flags & BPF_TRAMP_F_TAIL_CALL_CTX )
899- bpf_trampoline_setup_tail_call_cnt (image , ctx , func_frame_offset , r4_off );
942+ bpf_trampoline_setup_tail_call_info (image , ctx , func_frame_offset ,
943+ bpf_dummy_frame_size , r4_off );
900944
901945 /* Restore args */
902946 bpf_trampoline_restore_args_stack (image , ctx , func_frame_offset , nr_regs , regs_off );
@@ -957,8 +1001,8 @@ static int __arch_prepare_bpf_trampoline(struct bpf_tramp_image *im, void *rw_im
9571001 EMIT (PPC_RAW_LL (_R3 , _R1 , retval_off ));
9581002
9591003 /* Restore nv regs */
960- EMIT (PPC_RAW_LL (_R26 , _R1 , nvr_off + SZL ));
961- EMIT (PPC_RAW_LL (_R25 , _R1 , nvr_off ));
1004+ EMIT (PPC_RAW_LL (_R27 , _R1 , nvr_off + SZL ));
1005+ EMIT (PPC_RAW_LL (_R26 , _R1 , nvr_off ));
9621006
9631007 /* Epilogue */
9641008 if (IS_ENABLED (CONFIG_PPC64_ELF_ABI_V2 ) && !IS_ENABLED (CONFIG_PPC_KERNEL_PCREL ))
0 commit comments