py: Combine native emitters to 1 glue function; distinguish viper.
This patch simplifies the glue between native emitter and runtime, and handles viper code like inline assember: return values are converted to Python objects. Fixes issue #531.
This commit is contained in:
parent
04b7cc4df0
commit
ccc85ea0da
|
@ -74,7 +74,7 @@ mp_raw_code_t *mp_emit_glue_new_raw_code(void) {
|
|||
}
|
||||
|
||||
void mp_emit_glue_assign_bytecode(mp_raw_code_t *rc, byte *code, uint len, uint n_pos_args, uint n_kwonly_args, qstr *arg_names, uint scope_flags) {
|
||||
rc->kind = MP_CODE_BYTE;
|
||||
rc->kind = MP_CODE_BYTECODE;
|
||||
rc->scope_flags = scope_flags;
|
||||
rc->n_pos_args = n_pos_args;
|
||||
rc->n_kwonly_args = n_kwonly_args;
|
||||
|
@ -104,40 +104,15 @@ void mp_emit_glue_assign_bytecode(mp_raw_code_t *rc, byte *code, uint len, uint
|
|||
#endif
|
||||
}
|
||||
|
||||
void mp_emit_glue_assign_native_code(mp_raw_code_t *rc, void *fun, uint len, int n_args) {
|
||||
rc->kind = MP_CODE_NATIVE;
|
||||
void mp_emit_glue_assign_native(mp_raw_code_t *rc, mp_raw_code_kind_t kind, void *fun, uint len, int n_args) {
|
||||
assert(kind == MP_CODE_NATIVE_PY || kind == MP_CODE_NATIVE_VIPER || kind == MP_CODE_NATIVE_ASM);
|
||||
rc->kind = kind;
|
||||
rc->scope_flags = 0;
|
||||
rc->n_pos_args = n_args;
|
||||
rc->u_native.fun = fun;
|
||||
|
||||
#ifdef DEBUG_PRINT
|
||||
DEBUG_printf("assign native code: fun=%p len=%u n_args=%d\n", fun, len, n_args);
|
||||
byte *fun_data = (byte*)(((machine_uint_t)fun) & (~1)); // need to clear lower bit in case it's thumb code
|
||||
for (int i = 0; i < 128 && i < len; i++) {
|
||||
if (i > 0 && i % 16 == 0) {
|
||||
DEBUG_printf("\n");
|
||||
}
|
||||
DEBUG_printf(" %02x", fun_data[i]);
|
||||
}
|
||||
DEBUG_printf("\n");
|
||||
|
||||
#ifdef WRITE_CODE
|
||||
if (fp_write_code != NULL) {
|
||||
fwrite(fun_data, len, 1, fp_write_code);
|
||||
fflush(fp_write_code);
|
||||
}
|
||||
#endif
|
||||
#endif
|
||||
}
|
||||
|
||||
void mp_emit_glue_assign_inline_asm_code(mp_raw_code_t *rc, void *fun, uint len, int n_args) {
|
||||
rc->kind = MP_CODE_INLINE_ASM;
|
||||
rc->scope_flags = 0;
|
||||
rc->n_pos_args = n_args;
|
||||
rc->u_inline_asm.fun = fun;
|
||||
|
||||
#ifdef DEBUG_PRINT
|
||||
DEBUG_printf("assign inline asm code: fun=%p len=%u n_args=%d\n", fun, len, n_args);
|
||||
DEBUG_printf("assign native: kind=%d fun=%p len=%u n_args=%d\n", kind, fun, len, n_args);
|
||||
byte *fun_data = (byte*)(((machine_uint_t)fun) & (~1)); // need to clear lower bit in case it's thumb code
|
||||
for (int i = 0; i < 128 && i < len; i++) {
|
||||
if (i > 0 && i % 16 == 0) {
|
||||
|
@ -169,14 +144,15 @@ mp_obj_t mp_make_function_from_raw_code(mp_raw_code_t *rc, mp_obj_t def_args, mp
|
|||
// make the function, depending on the raw code kind
|
||||
mp_obj_t fun;
|
||||
switch (rc->kind) {
|
||||
case MP_CODE_BYTE:
|
||||
case MP_CODE_BYTECODE:
|
||||
fun = mp_obj_new_fun_bc(rc->scope_flags, rc->arg_names, rc->n_pos_args, rc->n_kwonly_args, def_args, rc->u_byte.code);
|
||||
break;
|
||||
case MP_CODE_NATIVE:
|
||||
case MP_CODE_NATIVE_PY:
|
||||
fun = mp_make_function_n(rc->n_pos_args, rc->u_native.fun);
|
||||
break;
|
||||
case MP_CODE_INLINE_ASM:
|
||||
fun = mp_obj_new_fun_asm(rc->n_pos_args, rc->u_inline_asm.fun);
|
||||
case MP_CODE_NATIVE_VIPER:
|
||||
case MP_CODE_NATIVE_ASM:
|
||||
fun = mp_obj_new_fun_asm(rc->n_pos_args, rc->u_native.fun);
|
||||
break;
|
||||
default:
|
||||
// raw code was never set (this should not happen)
|
||||
|
|
|
@ -29,9 +29,10 @@
|
|||
typedef enum {
|
||||
MP_CODE_UNUSED,
|
||||
MP_CODE_RESERVED,
|
||||
MP_CODE_BYTE,
|
||||
MP_CODE_NATIVE,
|
||||
MP_CODE_INLINE_ASM,
|
||||
MP_CODE_BYTECODE,
|
||||
MP_CODE_NATIVE_PY,
|
||||
MP_CODE_NATIVE_VIPER,
|
||||
MP_CODE_NATIVE_ASM,
|
||||
} mp_raw_code_kind_t;
|
||||
|
||||
typedef struct _mp_code_t {
|
||||
|
@ -45,12 +46,9 @@ typedef struct _mp_code_t {
|
|||
byte *code;
|
||||
uint len;
|
||||
} u_byte;
|
||||
struct {
|
||||
mp_fun_t fun;
|
||||
} u_native;
|
||||
struct {
|
||||
void *fun;
|
||||
} u_inline_asm;
|
||||
} u_native;
|
||||
};
|
||||
} mp_raw_code_t;
|
||||
|
||||
|
@ -60,8 +58,7 @@ void mp_emit_glue_deinit(void);
|
|||
mp_raw_code_t *mp_emit_glue_new_raw_code(void);
|
||||
|
||||
void mp_emit_glue_assign_bytecode(mp_raw_code_t *rc, byte *code, uint len, uint n_pos_args, uint n_kwonly_args, qstr *arg_names, uint scope_flags);
|
||||
void mp_emit_glue_assign_native_code(mp_raw_code_t *rc, void *f, uint len, int n_args);
|
||||
void mp_emit_glue_assign_inline_asm_code(mp_raw_code_t *rc, void *f, uint len, int n_args);
|
||||
void mp_emit_glue_assign_native(mp_raw_code_t *rc, mp_raw_code_kind_t kind, void *f, uint len, int n_args);
|
||||
|
||||
mp_obj_t mp_make_function_from_raw_code(mp_raw_code_t *rc, mp_obj_t def_args, mp_obj_t def_kw_args);
|
||||
mp_obj_t mp_make_closure_from_raw_code(mp_raw_code_t *rc, uint n_closed_over, const mp_obj_t *args);
|
||||
|
|
|
@ -99,7 +99,7 @@ STATIC bool emit_inline_thumb_end_pass(emit_inline_asm_t *emit) {
|
|||
|
||||
if (emit->pass == MP_PASS_EMIT) {
|
||||
void *f = asm_thumb_get_code(emit->as);
|
||||
mp_emit_glue_assign_inline_asm_code(emit->scope->raw_code, f, asm_thumb_get_code_size(emit->as), emit->scope->num_pos_args);
|
||||
mp_emit_glue_assign_native(emit->scope->raw_code, MP_CODE_NATIVE_ASM, f, asm_thumb_get_code_size(emit->as), emit->scope->num_pos_args);
|
||||
}
|
||||
|
||||
return emit->success;
|
||||
|
|
|
@ -309,10 +309,10 @@ STATIC void emit_native_end_pass(emit_t *emit) {
|
|||
if (emit->pass == MP_PASS_EMIT) {
|
||||
#if N_X64
|
||||
void *f = asm_x64_get_code(emit->as);
|
||||
mp_emit_glue_assign_native_code(emit->scope->raw_code, f, asm_x64_get_code_size(emit->as), emit->scope->num_pos_args);
|
||||
mp_emit_glue_assign_native(emit->scope->raw_code, emit->do_viper_types ? MP_CODE_NATIVE_VIPER : MP_CODE_NATIVE_PY, f, asm_x64_get_code_size(emit->as), emit->scope->num_pos_args);
|
||||
#elif N_THUMB
|
||||
void *f = asm_thumb_get_code(emit->as);
|
||||
mp_emit_glue_assign_native_code(emit->scope->raw_code, f, asm_thumb_get_code_size(emit->as), emit->scope->num_pos_args);
|
||||
mp_emit_glue_assign_native(emit->scope->raw_code, emit->do_viper_types ? MP_CODE_NATIVE_VIPER : MP_CODE_NATIVE_PY, f, asm_thumb_get_code_size(emit->as), emit->scope->num_pos_args);
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
@ -438,6 +438,11 @@ STATIC void emit_access_stack(emit_t *emit, int pos, vtype_kind_t *vtype, int re
|
|||
}
|
||||
}
|
||||
|
||||
STATIC void emit_pre_pop_discard(emit_t *emit, vtype_kind_t *vtype) {
|
||||
emit->last_emit_was_return_value = false;
|
||||
adjust_stack(emit, -1);
|
||||
}
|
||||
|
||||
STATIC void emit_pre_pop_reg(emit_t *emit, vtype_kind_t *vtype, int reg_dest) {
|
||||
emit->last_emit_was_return_value = false;
|
||||
emit_access_stack(emit, 1, vtype, reg_dest);
|
||||
|
@ -938,7 +943,7 @@ STATIC void emit_native_dup_top_two(emit_t *emit) {
|
|||
|
||||
STATIC void emit_native_pop_top(emit_t *emit) {
|
||||
vtype_kind_t vtype;
|
||||
emit_pre_pop_reg(emit, &vtype, REG_TEMP0);
|
||||
emit_pre_pop_discard(emit, &vtype);
|
||||
emit_post(emit);
|
||||
}
|
||||
|
||||
|
|
|
@ -506,12 +506,7 @@ STATIC mp_obj_t convert_val_from_inline_asm(machine_uint_t val) {
|
|||
STATIC mp_obj_t fun_asm_call(mp_obj_t self_in, uint n_args, uint n_kw, const mp_obj_t *args) {
|
||||
mp_obj_fun_asm_t *self = self_in;
|
||||
|
||||
if (n_args != self->n_args) {
|
||||
nlr_raise(mp_obj_new_exception_msg_varg(&mp_type_TypeError, "function takes %d positional arguments but %d were given", self->n_args, n_args));
|
||||
}
|
||||
if (n_kw != 0) {
|
||||
nlr_raise(mp_obj_new_exception_msg(&mp_type_TypeError, "function does not take keyword arguments"));
|
||||
}
|
||||
mp_arg_check_num(n_args, n_kw, self->n_args, self->n_args, false);
|
||||
|
||||
machine_uint_t ret;
|
||||
if (n_args == 0) {
|
||||
|
|
Loading…
Reference in New Issue