Revision 01a4afeb target-ppc/translate.c
b/target-ppc/translate.c | ||
---|---|---|
71 | 71 |
|
72 | 72 |
/* dyngen register indexes */ |
73 | 73 |
static TCGv cpu_T[3]; |
74 |
static TCGv_i64 cpu_FT[2]; |
|
75 | 74 |
|
76 | 75 |
#include "gen-icount.h" |
77 | 76 |
|
... | ... | |
103 | 102 |
#endif |
104 | 103 |
#endif |
105 | 104 |
|
106 |
cpu_FT[0] = tcg_global_mem_new_i64(TCG_AREG0, |
|
107 |
offsetof(CPUState, ft0), "FT0"); |
|
108 |
cpu_FT[1] = tcg_global_mem_new_i64(TCG_AREG0, |
|
109 |
offsetof(CPUState, ft1), "FT1"); |
|
110 |
|
|
111 | 105 |
p = cpu_reg_names; |
112 | 106 |
|
113 | 107 |
for (i = 0; i < 8; i++) { |
... | ... | |
5102 | 5096 |
|
5103 | 5097 |
/* POWER2 specific instructions */ |
5104 | 5098 |
/* Quad manipulation (load/store two floats at a time) */ |
5105 |
/* Original POWER2 is 32 bits only, define 64 bits ops as 32 bits ones */ |
|
5106 |
#define op_POWER2_lfq() (*gen_op_POWER2_lfq[ctx->mem_idx])() |
|
5107 |
#define op_POWER2_stfq() (*gen_op_POWER2_stfq[ctx->mem_idx])() |
|
5108 |
#define gen_op_POWER2_lfq_64_raw gen_op_POWER2_lfq_raw |
|
5109 |
#define gen_op_POWER2_lfq_64_user gen_op_POWER2_lfq_user |
|
5110 |
#define gen_op_POWER2_lfq_64_kernel gen_op_POWER2_lfq_kernel |
|
5111 |
#define gen_op_POWER2_lfq_64_hypv gen_op_POWER2_lfq_hypv |
|
5112 |
#define gen_op_POWER2_lfq_le_64_raw gen_op_POWER2_lfq_le_raw |
|
5113 |
#define gen_op_POWER2_lfq_le_64_user gen_op_POWER2_lfq_le_user |
|
5114 |
#define gen_op_POWER2_lfq_le_64_kernel gen_op_POWER2_lfq_le_kernel |
|
5115 |
#define gen_op_POWER2_lfq_le_64_hypv gen_op_POWER2_lfq_le_hypv |
|
5116 |
#define gen_op_POWER2_stfq_64_raw gen_op_POWER2_stfq_raw |
|
5117 |
#define gen_op_POWER2_stfq_64_user gen_op_POWER2_stfq_user |
|
5118 |
#define gen_op_POWER2_stfq_64_kernel gen_op_POWER2_stfq_kernel |
|
5119 |
#define gen_op_POWER2_stfq_64_hypv gen_op_POWER2_stfq_hypv |
|
5120 |
#define gen_op_POWER2_stfq_le_64_raw gen_op_POWER2_stfq_le_raw |
|
5121 |
#define gen_op_POWER2_stfq_le_64_user gen_op_POWER2_stfq_le_user |
|
5122 |
#define gen_op_POWER2_stfq_le_64_kernel gen_op_POWER2_stfq_le_kernel |
|
5123 |
#define gen_op_POWER2_stfq_le_64_hypv gen_op_POWER2_stfq_le_hypv |
|
5124 |
static GenOpFunc *gen_op_POWER2_lfq[NB_MEM_FUNCS] = { |
|
5125 |
GEN_MEM_FUNCS(POWER2_lfq), |
|
5126 |
}; |
|
5127 |
static GenOpFunc *gen_op_POWER2_stfq[NB_MEM_FUNCS] = { |
|
5128 |
GEN_MEM_FUNCS(POWER2_stfq), |
|
5129 |
}; |
|
5130 | 5099 |
|
5131 | 5100 |
/* lfq */ |
5132 | 5101 |
GEN_HANDLER(lfq, 0x38, 0xFF, 0xFF, 0x00000003, PPC_POWER2) |
5133 | 5102 |
{ |
5134 |
/* NIP cannot be restored if the memory exception comes from an helper */ |
|
5135 |
gen_update_nip(ctx, ctx->nip - 4); |
|
5136 |
gen_addr_imm_index(cpu_T[0], ctx, 0); |
|
5137 |
op_POWER2_lfq(); |
|
5138 |
tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]); |
|
5139 |
tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode) + 1], cpu_FT[1]); |
|
5103 |
int rd = rD(ctx->opcode); |
|
5104 |
TCGv t0 = tcg_temp_new(); |
|
5105 |
gen_addr_imm_index(t0, ctx, 0); |
|
5106 |
gen_qemu_ld64(cpu_fpr[rd], t0, ctx->mem_idx); |
|
5107 |
tcg_gen_addi_tl(t0, t0, 8); |
|
5108 |
gen_qemu_ld64(cpu_fpr[(rd + 1) % 32], t0, ctx->mem_idx); |
|
5109 |
tcg_temp_free(t0); |
|
5140 | 5110 |
} |
5141 | 5111 |
|
5142 | 5112 |
/* lfqu */ |
5143 | 5113 |
GEN_HANDLER(lfqu, 0x39, 0xFF, 0xFF, 0x00000003, PPC_POWER2) |
5144 | 5114 |
{ |
5145 | 5115 |
int ra = rA(ctx->opcode); |
5146 |
|
|
5147 |
/* NIP cannot be restored if the memory exception comes from an helper */
|
|
5148 |
gen_update_nip(ctx, ctx->nip - 4);
|
|
5149 |
gen_addr_imm_index(cpu_T[0], ctx, 0);
|
|
5150 |
op_POWER2_lfq();
|
|
5151 |
tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]);
|
|
5152 |
tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode) + 1], cpu_FT[1]);
|
|
5116 |
int rd = rD(ctx->opcode); |
|
5117 |
TCGv t0 = tcg_temp_new();
|
|
5118 |
TCGv t1 = tcg_temp_new();
|
|
5119 |
gen_addr_imm_index(t0, ctx, 0);
|
|
5120 |
gen_qemu_ld64(cpu_fpr[rd], t0, ctx->mem_idx);
|
|
5121 |
tcg_gen_addi_tl(t1, t0, 8);
|
|
5122 |
gen_qemu_ld64(cpu_fpr[(rd + 1) % 32], t1, ctx->mem_idx);
|
|
5153 | 5123 |
if (ra != 0) |
5154 |
tcg_gen_mov_tl(cpu_gpr[ra], cpu_T[0]); |
|
5124 |
tcg_gen_mov_tl(cpu_gpr[ra], t0); |
|
5125 |
tcg_temp_free(t0); |
|
5126 |
tcg_temp_free(t1); |
|
5155 | 5127 |
} |
5156 | 5128 |
|
5157 | 5129 |
/* lfqux */ |
5158 | 5130 |
GEN_HANDLER(lfqux, 0x1F, 0x17, 0x19, 0x00000001, PPC_POWER2) |
5159 | 5131 |
{ |
5160 | 5132 |
int ra = rA(ctx->opcode); |
5161 |
|
|
5162 |
/* NIP cannot be restored if the memory exception comes from an helper */
|
|
5163 |
gen_update_nip(ctx, ctx->nip - 4);
|
|
5164 |
gen_addr_reg_index(cpu_T[0], ctx);
|
|
5165 |
op_POWER2_lfq();
|
|
5166 |
tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]);
|
|
5167 |
tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode) + 1], cpu_FT[1]);
|
|
5133 |
int rd = rD(ctx->opcode); |
|
5134 |
TCGv t0 = tcg_temp_new();
|
|
5135 |
TCGv t1 = tcg_temp_new();
|
|
5136 |
gen_addr_reg_index(t0, ctx);
|
|
5137 |
gen_qemu_ld64(cpu_fpr[rd], t0, ctx->mem_idx);
|
|
5138 |
tcg_gen_addi_tl(t1, t0, 8);
|
|
5139 |
gen_qemu_ld64(cpu_fpr[(rd + 1) % 32], t1, ctx->mem_idx);
|
|
5168 | 5140 |
if (ra != 0) |
5169 |
tcg_gen_mov_tl(cpu_gpr[ra], cpu_T[0]); |
|
5141 |
tcg_gen_mov_tl(cpu_gpr[ra], t0); |
|
5142 |
tcg_temp_free(t0); |
|
5143 |
tcg_temp_free(t1); |
|
5170 | 5144 |
} |
5171 | 5145 |
|
5172 | 5146 |
/* lfqx */ |
5173 | 5147 |
GEN_HANDLER(lfqx, 0x1F, 0x17, 0x18, 0x00000001, PPC_POWER2) |
5174 | 5148 |
{ |
5175 |
/* NIP cannot be restored if the memory exception comes from an helper */ |
|
5176 |
gen_update_nip(ctx, ctx->nip - 4); |
|
5177 |
gen_addr_reg_index(cpu_T[0], ctx); |
|
5178 |
op_POWER2_lfq(); |
|
5179 |
tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]); |
|
5180 |
tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode) + 1], cpu_FT[1]); |
|
5149 |
int rd = rD(ctx->opcode); |
|
5150 |
TCGv t0 = tcg_temp_new(); |
|
5151 |
gen_addr_reg_index(t0, ctx); |
|
5152 |
gen_qemu_ld64(cpu_fpr[rd], t0, ctx->mem_idx); |
|
5153 |
tcg_gen_addi_tl(t0, t0, 8); |
|
5154 |
gen_qemu_ld64(cpu_fpr[(rd + 1) % 32], t0, ctx->mem_idx); |
|
5155 |
tcg_temp_free(t0); |
|
5181 | 5156 |
} |
5182 | 5157 |
|
5183 | 5158 |
/* stfq */ |
5184 | 5159 |
GEN_HANDLER(stfq, 0x3C, 0xFF, 0xFF, 0x00000003, PPC_POWER2) |
5185 | 5160 |
{ |
5186 |
/* NIP cannot be restored if the memory exception comes from an helper */ |
|
5187 |
gen_update_nip(ctx, ctx->nip - 4); |
|
5188 |
gen_addr_imm_index(cpu_T[0], ctx, 0); |
|
5189 |
tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rS(ctx->opcode)]); |
|
5190 |
tcg_gen_mov_i64(cpu_FT[1], cpu_fpr[rS(ctx->opcode) + 1]); |
|
5191 |
op_POWER2_stfq(); |
|
5161 |
int rd = rD(ctx->opcode); |
|
5162 |
TCGv t0 = tcg_temp_new(); |
|
5163 |
gen_addr_imm_index(t0, ctx, 0); |
|
5164 |
gen_qemu_st64(cpu_fpr[rd], t0, ctx->mem_idx); |
|
5165 |
tcg_gen_addi_tl(t0, t0, 8); |
|
5166 |
gen_qemu_st64(cpu_fpr[(rd + 1) % 32], t0, ctx->mem_idx); |
|
5167 |
tcg_temp_free(t0); |
|
5192 | 5168 |
} |
5193 | 5169 |
|
5194 | 5170 |
/* stfqu */ |
5195 | 5171 |
GEN_HANDLER(stfqu, 0x3D, 0xFF, 0xFF, 0x00000003, PPC_POWER2) |
5196 | 5172 |
{ |
5197 | 5173 |
int ra = rA(ctx->opcode); |
5198 |
|
|
5199 |
/* NIP cannot be restored if the memory exception comes from an helper */
|
|
5200 |
gen_update_nip(ctx, ctx->nip - 4);
|
|
5201 |
gen_addr_imm_index(cpu_T[0], ctx, 0);
|
|
5202 |
tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rS(ctx->opcode)]);
|
|
5203 |
tcg_gen_mov_i64(cpu_FT[1], cpu_fpr[rS(ctx->opcode) + 1]);
|
|
5204 |
op_POWER2_stfq();
|
|
5174 |
int rd = rD(ctx->opcode); |
|
5175 |
TCGv t0 = tcg_temp_new();
|
|
5176 |
TCGv t1 = tcg_temp_new();
|
|
5177 |
gen_addr_imm_index(t0, ctx, 0);
|
|
5178 |
gen_qemu_st64(cpu_fpr[rd], t0, ctx->mem_idx);
|
|
5179 |
tcg_gen_addi_tl(t1, t0, 8);
|
|
5180 |
gen_qemu_st64(cpu_fpr[(rd + 1) % 32], t1, ctx->mem_idx);
|
|
5205 | 5181 |
if (ra != 0) |
5206 |
tcg_gen_mov_tl(cpu_gpr[ra], cpu_T[0]); |
|
5182 |
tcg_gen_mov_tl(cpu_gpr[ra], t0); |
|
5183 |
tcg_temp_free(t0); |
|
5184 |
tcg_temp_free(t1); |
|
5207 | 5185 |
} |
5208 | 5186 |
|
5209 | 5187 |
/* stfqux */ |
5210 | 5188 |
GEN_HANDLER(stfqux, 0x1F, 0x17, 0x1D, 0x00000001, PPC_POWER2) |
5211 | 5189 |
{ |
5212 | 5190 |
int ra = rA(ctx->opcode); |
5213 |
|
|
5214 |
/* NIP cannot be restored if the memory exception comes from an helper */
|
|
5215 |
gen_update_nip(ctx, ctx->nip - 4);
|
|
5216 |
gen_addr_reg_index(cpu_T[0], ctx);
|
|
5217 |
tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rS(ctx->opcode)]);
|
|
5218 |
tcg_gen_mov_i64(cpu_FT[1], cpu_fpr[rS(ctx->opcode) + 1]);
|
|
5219 |
op_POWER2_stfq();
|
|
5191 |
int rd = rD(ctx->opcode); |
|
5192 |
TCGv t0 = tcg_temp_new();
|
|
5193 |
TCGv t1 = tcg_temp_new();
|
|
5194 |
gen_addr_reg_index(t0, ctx);
|
|
5195 |
gen_qemu_st64(cpu_fpr[rd], t0, ctx->mem_idx);
|
|
5196 |
tcg_gen_addi_tl(t1, t0, 8);
|
|
5197 |
gen_qemu_st64(cpu_fpr[(rd + 1) % 32], t1, ctx->mem_idx);
|
|
5220 | 5198 |
if (ra != 0) |
5221 |
tcg_gen_mov_tl(cpu_gpr[ra], cpu_T[0]); |
|
5199 |
tcg_gen_mov_tl(cpu_gpr[ra], t0); |
|
5200 |
tcg_temp_free(t0); |
|
5201 |
tcg_temp_free(t1); |
|
5222 | 5202 |
} |
5223 | 5203 |
|
5224 | 5204 |
/* stfqx */ |
5225 | 5205 |
GEN_HANDLER(stfqx, 0x1F, 0x17, 0x1C, 0x00000001, PPC_POWER2) |
5226 | 5206 |
{ |
5227 |
/* NIP cannot be restored if the memory exception comes from an helper */ |
|
5228 |
gen_update_nip(ctx, ctx->nip - 4); |
|
5229 |
gen_addr_reg_index(cpu_T[0], ctx); |
|
5230 |
tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rS(ctx->opcode)]); |
|
5231 |
tcg_gen_mov_i64(cpu_FT[1], cpu_fpr[rS(ctx->opcode) + 1]); |
|
5232 |
op_POWER2_stfq(); |
|
5207 |
int rd = rD(ctx->opcode); |
|
5208 |
TCGv t0 = tcg_temp_new(); |
|
5209 |
gen_addr_reg_index(t0, ctx); |
|
5210 |
gen_qemu_st64(cpu_fpr[rd], t0, ctx->mem_idx); |
|
5211 |
tcg_gen_addi_tl(t0, t0, 8); |
|
5212 |
gen_qemu_st64(cpu_fpr[(rd + 1) % 32], t0, ctx->mem_idx); |
|
5213 |
tcg_temp_free(t0); |
|
5233 | 5214 |
} |
5234 | 5215 |
|
5235 | 5216 |
/* BookE specific instructions */ |
Also available in: Unified diff