@@ -296,7 +296,6 @@ void InterpreterMacroAssembler::call_VM_leaf_base(address entry_point,
296
296
}
297
297
298
298
void InterpreterMacroAssembler::call_VM_base (Register oop_result,
299
- Register java_thread,
300
299
Register last_java_sp,
301
300
address entry_point,
302
301
int number_of_arguments,
@@ -319,7 +318,7 @@ void InterpreterMacroAssembler::call_VM_base(Register oop_result,
319
318
}
320
319
#endif /* ASSERT */
321
320
// super call
322
- MacroAssembler::call_VM_base (oop_result, noreg, last_java_sp,
321
+ MacroAssembler::call_VM_base (oop_result, last_java_sp,
323
322
entry_point, number_of_arguments,
324
323
check_exceptions);
325
324
// interpreter specific
@@ -379,7 +378,7 @@ void InterpreterMacroAssembler::restore_after_resume(bool is_native) {
379
378
}
380
379
}
381
380
382
- void InterpreterMacroAssembler::check_and_handle_popframe (Register java_thread ) {
381
+ void InterpreterMacroAssembler::check_and_handle_popframe () {
383
382
if (JvmtiExport::can_pop_frame ()) {
384
383
Label L;
385
384
// Initiate popframe handling only if it is not already being
@@ -389,7 +388,7 @@ void InterpreterMacroAssembler::check_and_handle_popframe(Register java_thread)
389
388
// This method is only called just after the call into the vm in
390
389
// call_VM_base, so the arg registers are available.
391
390
Register pop_cond = c_rarg0;
392
- movl (pop_cond, Address (java_thread , JavaThread::popframe_condition_offset ()));
391
+ movl (pop_cond, Address (r15_thread , JavaThread::popframe_condition_offset ()));
393
392
testl (pop_cond, JavaThread::popframe_pending_bit);
394
393
jcc (Assembler::zero, L);
395
394
testl (pop_cond, JavaThread::popframe_processing_bit);
@@ -430,7 +429,7 @@ void InterpreterMacroAssembler::load_earlyret_value(TosState state) {
430
429
}
431
430
432
431
433
- void InterpreterMacroAssembler::check_and_handle_earlyret (Register java_thread ) {
432
+ void InterpreterMacroAssembler::check_and_handle_earlyret () {
434
433
if (JvmtiExport::can_force_early_return ()) {
435
434
Label L;
436
435
Register tmp = c_rarg0;
@@ -810,13 +809,13 @@ void InterpreterMacroAssembler::remove_activation(
810
809
// the stack, will call InterpreterRuntime::at_unwind.
811
810
Label slow_path;
812
811
Label fast_path;
813
- safepoint_poll (slow_path, rthread, true /* at_return */ , false /* in_nmethod */ );
812
+ safepoint_poll (slow_path, true /* at_return */ , false /* in_nmethod */ );
814
813
jmp (fast_path);
815
814
bind (slow_path);
816
815
push (state);
817
- set_last_Java_frame (rthread, noreg, rbp, (address)pc (), rscratch1);
816
+ set_last_Java_frame (noreg, rbp, (address)pc (), rscratch1);
818
817
super_call_VM_leaf (CAST_FROM_FN_PTR (address, InterpreterRuntime::at_unwind), rthread);
819
- reset_last_Java_frame (rthread, true );
818
+ reset_last_Java_frame (true );
820
819
pop (state);
821
820
bind (fast_path);
822
821
@@ -1031,8 +1030,7 @@ void InterpreterMacroAssembler::lock_object(Register lock_reg) {
1031
1030
}
1032
1031
1033
1032
if (LockingMode == LM_LIGHTWEIGHT) {
1034
- const Register thread = r15_thread;
1035
- lightweight_lock (lock_reg, obj_reg, swap_reg, thread, tmp_reg, slow_case);
1033
+ lightweight_lock (lock_reg, obj_reg, swap_reg, tmp_reg, slow_case);
1036
1034
} else if (LockingMode == LM_LEGACY) {
1037
1035
// Load immediate 1 into swap_reg %rax
1038
1036
movl (swap_reg, 1 );
@@ -1141,7 +1139,7 @@ void InterpreterMacroAssembler::unlock_object(Register lock_reg) {
1141
1139
movptr (Address (lock_reg, BasicObjectLock::obj_offset ()), NULL_WORD);
1142
1140
1143
1141
if (LockingMode == LM_LIGHTWEIGHT) {
1144
- lightweight_unlock (obj_reg, swap_reg, r15_thread, header_reg, slow_case);
1142
+ lightweight_unlock (obj_reg, swap_reg, header_reg, slow_case);
1145
1143
} else if (LockingMode == LM_LEGACY) {
1146
1144
// Load the old header from BasicLock structure
1147
1145
movptr (header_reg, Address (swap_reg,
0 commit comments