static void emit_priv_frame_ptr(u8 **pprog, struct bpf_prog *bpf_prog,
- enum bpf_priv_stack_mode priv_stack_mode)
+ enum bpf_priv_stack_mode priv_stack_mode,
+ bool is_subprog, u8 *image, u8 *temp)
{
u32 orig_stack_depth = round_up(bpf_prog->aux->stack_depth, 8);
u8 *prog = *pprog;
- if (priv_stack_mode == PRIV_STACK_ROOT_PROG)
- emit_root_priv_frame_ptr(&prog, bpf_prog, orig_stack_depth);
- else if (priv_stack_mode == PRIV_STACK_SUB_PROG && orig_stack_depth)
+ if (priv_stack_mode == PRIV_STACK_ROOT_PROG) {
+ int offs;
+ u8 *func;
+
+ if (!bpf_prog->aux->has_prog_call) {
+ emit_root_priv_frame_ptr(&prog, bpf_prog, orig_stack_depth);
+ } else {
+ EMIT1(0x57); /* push rdi */
+ if (is_subprog) {
+ /* subprog may have up to 5 arguments */
+ EMIT1(0x56); /* push rsi */
+ EMIT1(0x52); /* push rdx */
+ EMIT1(0x51); /* push rcx */
+ EMIT2(0x41, 0x50); /* push r8 */
+ }
+ emit_mov_imm64(&prog, BPF_REG_1, (long) bpf_prog >> 32,
+ (u32) (long) bpf_prog);
+ func = (u8 *)__bpf_prog_enter_recur_limited;
+ offs = prog - temp;
+ offs += x86_call_depth_emit_accounting(&prog, func, image + offs);
+ emit_call(&prog, func, image + offs);
+ if (is_subprog) {
+ EMIT2(0x41, 0x58); /* pop r8 */
+ EMIT1(0x59); /* pop rcx */
+ EMIT1(0x5a); /* pop rdx */
+ EMIT1(0x5e); /* pop rsi */
+ }
+ EMIT1(0x5f); /* pop rdi */
+
+ EMIT4(0x48, 0x83, 0xf8, 0x0); /* cmp rax,0x0 */
+ EMIT2(X86_JNE, num_bytes_of_emit_return() + 1);
+
+ /* return if stack recursion has been reached */
+ EMIT1(0xC9); /* leave */
+ emit_return(&prog, image + (prog - temp));
+
+ /* cnt -= 1 */
+ emit_alu_helper_1(&prog, BPF_ALU64 | BPF_SUB | BPF_K,
+ BPF_REG_0, 1);
+
+ /* accum_stack_depth = cnt * subtree_stack_depth */
+ emit_alu_helper_3(&prog, BPF_ALU64 | BPF_MUL | BPF_K, BPF_REG_0,
+ bpf_prog->aux->subtree_stack_depth);
+
+ emit_root_priv_frame_ptr(&prog, bpf_prog, orig_stack_depth);
+
+ /* r9 += accum_stack_depth */
+ emit_alu_helper_2(&prog, BPF_ALU64 | BPF_ADD | BPF_X, X86_REG_R9,
+ BPF_REG_0);