瀏覽代碼

+ IsSimpleRef() function that can be used to determine whether a reference
is a valid AArch64 memory reference or not. It returns an enum that says
it's either valid, invalid, or invalid in a way that should trigger an
internal error in case we created the reference ourselves (as opposed to
it coming in via the assembler reader)

git-svn-id: trunk@29846 -

Jonas Maebe 10 年之前
父節點
當前提交
0ce5a1de7f
共有 1 個文件被更改,包括 286 次插入0 次删除
  1. 286 0
      compiler/aarch64/aasmcpu.pas

+ 286 - 0
compiler/aarch64/aasmcpu.pas

@@ -222,6 +222,21 @@ uses
         { nothing to add }
       end;
 
+    type
+      tsimplereftype =
+         { valid reference }
+        (sr_simple,
+         { invalid reference, should not be generated by the code generator (but
+           can be encountered via inline assembly, where it must be rejected) }
+         sr_internal_illegal,
+         { invalid reference, may be generated by the code generator and then
+           must be simplified (also rejected in inline assembly) }
+         sr_complex);
+
+    function simple_ref_type(op: tasmop; size:tcgsize; oppostfix: toppostfix; const ref: treference): tsimplereftype;
+    function can_be_shifter_operand(opc: tasmop; opnr: longint): boolean;
+    function valid_shifter_operand(opc: tasmop; useszr, usessp, is64bit: boolean; sm: tshiftmode; shiftimm: longint): boolean;
+
     function spilling_create_load(const ref: treference; r: tregister): taicpu;
     function spilling_create_store(r: tregister; const ref: treference): taicpu;
 
@@ -500,6 +515,277 @@ implementation
       end;
 
 
+    function is_valid_load_symbol(op: tasmop; oppostfix: toppostfix; const ref: treference): tsimplereftype;
+      begin
+        result:=sr_complex;
+        if not assigned(ref.symboldata) and
+           not(ref.refaddr in [addr_pic,addr_gotpageoffset,addr_gotpage]) then
+          exit;
+        { can't use pre-/post-indexed mode here (makes no sense either) }
+        if ref.addressmode<>AM_OFFSET then
+          exit;
+        { "ldr literal" must be a 32/64 bit LDR and have a symbol }
+        if assigned(ref.symboldata) and
+           ((op<>A_LDR) or
+            not(oppostfix in [PF_NONE,PF_W,PF_SW]) or
+            not assigned(ref.symbol)) then
+          exit;
+        { if this is a got offset load, we must have a base register and a
+          symbol }
+        if (ref.refaddr=addr_gotpageoffset) and
+           (not assigned(ref.symbol) or
+            (ref.base=NR_NO) or
+            (ref.index<>NR_NO) or
+            (ref.offset<>0)) then
+          begin
+            result:=sr_internal_illegal;
+            exit;
+          end;
+        { cannot have base or index register (we generate these kind of
+          references internally, they should never end up here with an
+          extra base or offset) }
+        if (ref.refaddr in [addr_gotpage,addr_pic]) and
+           (ref.base<>NR_NO) or
+           (ref.index<>NR_NO) then
+          begin
+            result:=sr_internal_illegal;
+            exit;
+          end;
+        result:=sr_simple;
+      end;
+
+
+    function simple_ref_type(op: tasmop; size:tcgsize; oppostfix: toppostfix; const ref: treference): tsimplereftype;
+      var
+        maxoffs: asizeint;
+        accesssize: longint;
+      begin
+        result:=sr_internal_illegal;
+        { post-indexed is only allowed for vector and immediate loads/stores }
+        if (ref.addressmode=AM_POSTINDEXED) and
+           not(op in [A_LD1,A_LD2,A_LD3,A_LD4,A_ST1,A_ST2,A_ST3,A_ST4]) and
+           (not(op in [A_LDR,A_STR,A_LDP,A_STP]) or
+            (ref.base=NR_NO) or
+            (ref.index<>NR_NO)) then
+          exit;
+
+        { can only have a shift mode if we have an index }
+        if (ref.index=NR_NO) and
+           (ref.shiftmode<>SM_None) then
+          exit;
+
+        { the index can never be the stack pointer }
+        if ref.index=NR_SP then
+          exit;
+
+        { no instruction supports an index without a base }
+        if (ref.base=NR_NO) and
+           (ref.index<>NR_NO) then
+          begin
+            result:=sr_complex;
+            exit;
+          end;
+
+        { LDR literal or GOT entry: 32 or 64 bit, label }
+        if assigned(ref.symboldata) or
+           assigned(ref.symbol) then
+          begin
+            { we generate these kind of references internally; at least for now,
+              they should never end up here with an extra base or offset or so }
+            result:=is_valid_load_symbol(op,oppostfix,ref);
+            exit;
+          end;
+
+        { any other reference cannot be gotpage/gotpageoffset/pic }
+        if ref.refaddr in [addr_gotpage,addr_gotpageoffset,addr_pic] then
+          exit;
+
+        { base & index:
+            * index cannot be the stack pointer
+            * offset must be 0
+            * can scale with the size of the access
+            * can zero/sign extend 32 bit index register, and/or multiple by
+              access size
+            * no pre/post-indexing
+        }
+        if (ref.base<>NR_NO) and
+           (ref.index<>NR_NO) then
+          begin
+            if ref.addressmode in [AM_PREINDEXED,AM_POSTINDEXED] then
+              exit;
+            case op of
+              { this holds for both integer and fpu/vector loads }
+              A_LDR,A_STR:
+                if (ref.offset=0) and
+                   (((ref.shiftmode=SM_None) and
+                     (ref.shiftimm=0)) or
+                    ((ref.shiftmode in [SM_LSL,SM_UXTW,SM_SXTW]) and
+                     (ref.shiftimm=tcgsizep2size[size]))) then
+                  result:=sr_simple
+                else
+                  result:=sr_complex;
+              { todo }
+              A_LD1,A_LD2,A_LD3,A_LD4,
+              A_ST1,A_ST2,A_ST3,A_ST4:
+                internalerror(2014110704);
+              { these don't support base+index }
+              A_LDUR,A_STUR,
+              A_LDP,A_STP:
+                result:=sr_complex;
+              else
+                { nothing: result is already sr_internal_illegal };
+            end;
+            exit;
+          end;
+
+        { base + immediate offset. Variants:
+            * LDR*/STR*:
+              - pre- or post-indexed with signed 9 bit immediate
+              - regular with unsiged scaled immediate (multiple of access
+                size), in the range 0 to (12 bit * access_size)-1
+            * LDP/STP
+              - pre- or post-indexed with signed 9 bit immediate
+              - regular with signed 9 bit immediate
+            * LDUR*/STUR*:
+              - regular with signed 9 bit immediate
+        }
+        if ref.base<>NR_NO then
+          begin
+            accesssize:=1 shl tcgsizep2size[size];
+            case op of
+              A_LDR,A_STR:
+                begin
+                  if (ref.addressmode=AM_OFFSET) and
+                     (ref.offset>=0) and
+                     (ref.offset<(((1 shl 12)-1)*accesssize)) and
+                     ((ref.offset mod accesssize)=0) then
+                    result:=sr_simple
+                  else if (ref.offset>=-256) and
+                     (ref.offset<=255) then
+                    begin
+                      { non pre-/post-indexed regular loads/stores can only be
+                        performed using LDUR/STUR }
+                      if ref.addressmode in [AM_PREINDEXED,AM_POSTINDEXED] then
+                        result:=sr_simple
+                      else
+                        result:=sr_complex
+                    end
+                  else
+                    result:=sr_complex;
+                end;
+              A_LDP,A_STP:
+                begin
+                  { only supported for 32/64 bit }
+                  if not(oppostfix in [PF_W,PF_SW,PF_None]) then
+                    exit;
+                  { offset must be a multple of the access size }
+                  if (ref.offset mod accesssize)<>0 then
+                    exit;
+                  { offset must fit in a signed 7 bit offset }
+                  if (ref.offset>=-(1 shl (6+tcgsizep2size[size]))) and
+                     (ref.offset<=(1 shl (6+tcgsizep2size[size]))-1) then
+                    result:=sr_simple
+                  else
+                    result:=sr_complex;
+                end;
+              A_LDUR,A_STUR:
+                begin
+                  if (ref.addressmode=AM_OFFSET) and
+                     (ref.offset>=-256) and
+                     (ref.offset<=255) then
+                    result:=sr_simple
+                  else
+                    result:=sr_complex;
+                end;
+              { todo }
+              A_LD1,A_LD2,A_LD3,A_LD4,
+              A_ST1,A_ST2,A_ST3,A_ST4:
+                internalerror(2014110907);
+              else
+                { nothing: result is already sr_internal_illegal };
+            end;
+            exit;
+          end;
+        { absolute addresses are not supported, have to load them first into
+          a register }
+        result:=sr_complex;
+      end;
+
+
+    function can_be_shifter_operand(opc: tasmop; opnr: longint): boolean;
+      begin
+        case opc of
+          A_ADD,
+          A_AND,
+          A_EON,
+          A_EOR,
+          A_ORN,
+          A_ORR,
+          A_SUB:
+            result:=opnr=3;
+          A_BIC,
+          A_CMN,
+          A_CMP,
+          A_MOVK,
+          A_MOVZ,
+          A_MOVN,
+          A_MVN,
+          A_NEG,
+          A_TST:
+            result:=opnr=2;
+          else
+            result:=false;
+        end;
+      end;
+
+
+    function valid_shifter_operand(opc: tasmop; useszr, usessp, is64bit: boolean; sm: tshiftmode; shiftimm: longint): boolean;
+      begin
+        case opc of
+          A_ADD,
+          A_SUB,
+          A_NEG,
+          A_AND,
+          A_TST,
+          A_CMN,
+          A_CMP:
+            begin
+              result:=false;
+              if not useszr then
+                result:=
+                  (sm in shiftedregmodes) and
+                  ((shiftimm in [0..31]) or
+                   (is64bit and
+                    (shiftimm in [32..63])));
+              if not usessp then
+                result:=
+                  result or
+                  ((sm in extendedregmodes) and
+                   (shiftimm in [0..4]));
+            end;
+          A_BIC,
+          A_EON,
+          A_EOR,
+          A_MVN,
+          A_ORN,
+          A_ORR:
+            result:=
+              (sm in shiftedregmodes) and
+              (shiftimm in [0..31*(ord(is64bit)+1)+ord(is64bit)]);
+          A_MOVK,
+          A_MOVZ,
+          A_MOVN:
+            result:=
+              (sm=SM_LSL) and
+              ((shiftimm in [0,16]) or
+               (is64bit and
+                (shiftimm in [32,48])));
+          else
+            result:=false;
+        end;
+      end;
+
+
     function spilling_create_load(const ref: treference; r: tregister): taicpu;
       var
         op: tasmop;