ソースを参照

+ support the new WebAssembly threads and atomics instructions in the internal assembler

Nikolay Nikolov 3 年 前
コミット
3cfdf000a2
2 ファイル変更400 行追加4 行削除
  1. 330 0
      compiler/wasm32/aasmcpu.pas
  2. 70 4
      compiler/wasm32/cpubase.pas

+ 330 - 0
compiler/wasm32/aasmcpu.pas

@@ -721,6 +721,104 @@ uses
                     internalerror(2021092017);
                 end;
             end;
+          a_memory_atomic_notify,
+          a_memory_atomic_wait32,
+          a_memory_atomic_wait64,
+          a_i32_atomic_load,
+          a_i64_atomic_load,
+          a_i32_atomic_load8_u,
+          a_i32_atomic_load16_u,
+          a_i64_atomic_load8_u,
+          a_i64_atomic_load16_u,
+          a_i64_atomic_load32_u,
+          a_i32_atomic_store,
+          a_i64_atomic_store,
+          a_i32_atomic_store8,
+          a_i32_atomic_store16,
+          a_i64_atomic_store8,
+          a_i64_atomic_store16,
+          a_i64_atomic_store32,
+          a_i32_atomic_rmw_add,
+          a_i64_atomic_rmw_add,
+          a_i32_atomic_rmw8_add_u,
+          a_i32_atomic_rmw16_add_u,
+          a_i64_atomic_rmw8_add_u,
+          a_i64_atomic_rmw16_add_u,
+          a_i64_atomic_rmw32_add_u,
+          a_i32_atomic_rmw_sub,
+          a_i64_atomic_rmw_sub,
+          a_i32_atomic_rmw8_sub_u,
+          a_i32_atomic_rmw16_sub_u,
+          a_i64_atomic_rmw8_sub_u,
+          a_i64_atomic_rmw16_sub_u,
+          a_i64_atomic_rmw32_sub_u,
+          a_i32_atomic_rmw_and,
+          a_i64_atomic_rmw_and,
+          a_i32_atomic_rmw8_and_u,
+          a_i32_atomic_rmw16_and_u,
+          a_i64_atomic_rmw8_and_u,
+          a_i64_atomic_rmw16_and_u,
+          a_i64_atomic_rmw32_and_u,
+          a_i32_atomic_rmw_or,
+          a_i64_atomic_rmw_or,
+          a_i32_atomic_rmw8_or_u,
+          a_i32_atomic_rmw16_or_u,
+          a_i64_atomic_rmw8_or_u,
+          a_i64_atomic_rmw16_or_u,
+          a_i64_atomic_rmw32_or_u,
+          a_i32_atomic_rmw_xor,
+          a_i64_atomic_rmw_xor,
+          a_i32_atomic_rmw8_xor_u,
+          a_i32_atomic_rmw16_xor_u,
+          a_i64_atomic_rmw8_xor_u,
+          a_i64_atomic_rmw16_xor_u,
+          a_i64_atomic_rmw32_xor_u,
+          a_i32_atomic_rmw_xchg,
+          a_i64_atomic_rmw_xchg,
+          a_i32_atomic_rmw8_xchg_u,
+          a_i32_atomic_rmw16_xchg_u,
+          a_i64_atomic_rmw8_xchg_u,
+          a_i64_atomic_rmw16_xchg_u,
+          a_i64_atomic_rmw32_xchg_u,
+          a_i32_atomic_rmw_cmpxchg,
+          a_i64_atomic_rmw_cmpxchg,
+          a_i32_atomic_rmw8_cmpxchg_u,
+          a_i32_atomic_rmw16_cmpxchg_u,
+          a_i64_atomic_rmw8_cmpxchg_u,
+          a_i64_atomic_rmw16_cmpxchg_u,
+          a_i64_atomic_rmw32_cmpxchg_u:
+            begin
+              if ops<>1 then
+                internalerror(2021092016);
+              with oper[0]^ do
+                case typ of
+                  top_ref:
+                    begin
+                      if assigned(ref^.symbol) then
+                        begin
+                          Result:=2+
+                            UlebSize(natural_alignment_for_load_store(opcode))+
+                            5;  { relocation, fixed size = 5 bytes }
+                        end
+                      else
+                        begin
+                          if assigned(ref^.symbol) or (ref^.base<>NR_NO) or (ref^.index<>NR_NO) then
+                            internalerror(2021092018);
+                          Result:=2+
+                            UlebSize(natural_alignment_for_load_store(opcode))+
+                            UlebSize(ref^.offset);
+                        end;
+                    end;
+                  top_const:
+                    begin
+                      Result:=2+
+                        UlebSize(natural_alignment_for_load_store(opcode))+
+                        UlebSize(val);
+                    end;
+                  else
+                    internalerror(2021092017);
+                end;
+            end;
           a_call:
             begin
               if ops<>1 then
@@ -1442,6 +1540,238 @@ uses
                     internalerror(2021092017);
                 end;
             end;
+          a_memory_atomic_notify,
+          a_memory_atomic_wait32,
+          a_memory_atomic_wait64,
+          a_i32_atomic_load,
+          a_i64_atomic_load,
+          a_i32_atomic_load8_u,
+          a_i32_atomic_load16_u,
+          a_i64_atomic_load8_u,
+          a_i64_atomic_load16_u,
+          a_i64_atomic_load32_u,
+          a_i32_atomic_store,
+          a_i64_atomic_store,
+          a_i32_atomic_store8,
+          a_i32_atomic_store16,
+          a_i64_atomic_store8,
+          a_i64_atomic_store16,
+          a_i64_atomic_store32,
+          a_i32_atomic_rmw_add,
+          a_i64_atomic_rmw_add,
+          a_i32_atomic_rmw8_add_u,
+          a_i32_atomic_rmw16_add_u,
+          a_i64_atomic_rmw8_add_u,
+          a_i64_atomic_rmw16_add_u,
+          a_i64_atomic_rmw32_add_u,
+          a_i32_atomic_rmw_sub,
+          a_i64_atomic_rmw_sub,
+          a_i32_atomic_rmw8_sub_u,
+          a_i32_atomic_rmw16_sub_u,
+          a_i64_atomic_rmw8_sub_u,
+          a_i64_atomic_rmw16_sub_u,
+          a_i64_atomic_rmw32_sub_u,
+          a_i32_atomic_rmw_and,
+          a_i64_atomic_rmw_and,
+          a_i32_atomic_rmw8_and_u,
+          a_i32_atomic_rmw16_and_u,
+          a_i64_atomic_rmw8_and_u,
+          a_i64_atomic_rmw16_and_u,
+          a_i64_atomic_rmw32_and_u,
+          a_i32_atomic_rmw_or,
+          a_i64_atomic_rmw_or,
+          a_i32_atomic_rmw8_or_u,
+          a_i32_atomic_rmw16_or_u,
+          a_i64_atomic_rmw8_or_u,
+          a_i64_atomic_rmw16_or_u,
+          a_i64_atomic_rmw32_or_u,
+          a_i32_atomic_rmw_xor,
+          a_i64_atomic_rmw_xor,
+          a_i32_atomic_rmw8_xor_u,
+          a_i32_atomic_rmw16_xor_u,
+          a_i64_atomic_rmw8_xor_u,
+          a_i64_atomic_rmw16_xor_u,
+          a_i64_atomic_rmw32_xor_u,
+          a_i32_atomic_rmw_xchg,
+          a_i64_atomic_rmw_xchg,
+          a_i32_atomic_rmw8_xchg_u,
+          a_i32_atomic_rmw16_xchg_u,
+          a_i64_atomic_rmw8_xchg_u,
+          a_i64_atomic_rmw16_xchg_u,
+          a_i64_atomic_rmw32_xchg_u,
+          a_i32_atomic_rmw_cmpxchg,
+          a_i64_atomic_rmw_cmpxchg,
+          a_i32_atomic_rmw8_cmpxchg_u,
+          a_i32_atomic_rmw16_cmpxchg_u,
+          a_i64_atomic_rmw8_cmpxchg_u,
+          a_i64_atomic_rmw16_cmpxchg_u,
+          a_i64_atomic_rmw32_cmpxchg_u:
+            begin
+              WriteByte($FE);
+              case opcode of
+                a_memory_atomic_notify:
+                  WriteByte($00);
+                a_memory_atomic_wait32:
+                  WriteByte($01);
+                a_memory_atomic_wait64:
+                  WriteByte($02);
+                a_i32_atomic_load:
+                  WriteByte($10);
+                a_i64_atomic_load:
+                  WriteByte($11);
+                a_i32_atomic_load8_u:
+                  WriteByte($12);
+                a_i32_atomic_load16_u:
+                  WriteByte($13);
+                a_i64_atomic_load8_u:
+                  WriteByte($14);
+                a_i64_atomic_load16_u:
+                  WriteByte($15);
+                a_i64_atomic_load32_u:
+                  WriteByte($16);
+                a_i32_atomic_store:
+                  WriteByte($17);
+                a_i64_atomic_store:
+                  WriteByte($18);
+                a_i32_atomic_store8:
+                  WriteByte($19);
+                a_i32_atomic_store16:
+                  WriteByte($1A);
+                a_i64_atomic_store8:
+                  WriteByte($1B);
+                a_i64_atomic_store16:
+                  WriteByte($1C);
+                a_i64_atomic_store32:
+                  WriteByte($1D);
+                a_i32_atomic_rmw_add:
+                  WriteByte($1E);
+                a_i64_atomic_rmw_add:
+                  WriteByte($1F);
+                a_i32_atomic_rmw8_add_u:
+                  WriteByte($20);
+                a_i32_atomic_rmw16_add_u:
+                  WriteByte($21);
+                a_i64_atomic_rmw8_add_u:
+                  WriteByte($22);
+                a_i64_atomic_rmw16_add_u:
+                  WriteByte($23);
+                a_i64_atomic_rmw32_add_u:
+                  WriteByte($24);
+                a_i32_atomic_rmw_sub:
+                  WriteByte($25);
+                a_i64_atomic_rmw_sub:
+                  WriteByte($26);
+                a_i32_atomic_rmw8_sub_u:
+                  WriteByte($27);
+                a_i32_atomic_rmw16_sub_u:
+                  WriteByte($28);
+                a_i64_atomic_rmw8_sub_u:
+                  WriteByte($29);
+                a_i64_atomic_rmw16_sub_u:
+                  WriteByte($2A);
+                a_i64_atomic_rmw32_sub_u:
+                  WriteByte($2B);
+                a_i32_atomic_rmw_and:
+                  WriteByte($2C);
+                a_i64_atomic_rmw_and:
+                  WriteByte($2D);
+                a_i32_atomic_rmw8_and_u:
+                  WriteByte($2E);
+                a_i32_atomic_rmw16_and_u:
+                  WriteByte($2F);
+                a_i64_atomic_rmw8_and_u:
+                  WriteByte($30);
+                a_i64_atomic_rmw16_and_u:
+                  WriteByte($31);
+                a_i64_atomic_rmw32_and_u:
+                  WriteByte($32);
+                a_i32_atomic_rmw_or:
+                  WriteByte($33);
+                a_i64_atomic_rmw_or:
+                  WriteByte($34);
+                a_i32_atomic_rmw8_or_u:
+                  WriteByte($35);
+                a_i32_atomic_rmw16_or_u:
+                  WriteByte($36);
+                a_i64_atomic_rmw8_or_u:
+                  WriteByte($37);
+                a_i64_atomic_rmw16_or_u:
+                  WriteByte($38);
+                a_i64_atomic_rmw32_or_u:
+                  WriteByte($39);
+                a_i32_atomic_rmw_xor:
+                  WriteByte($3A);
+                a_i64_atomic_rmw_xor:
+                  WriteByte($3B);
+                a_i32_atomic_rmw8_xor_u:
+                  WriteByte($3C);
+                a_i32_atomic_rmw16_xor_u:
+                  WriteByte($3D);
+                a_i64_atomic_rmw8_xor_u:
+                  WriteByte($3E);
+                a_i64_atomic_rmw16_xor_u:
+                  WriteByte($3F);
+                a_i64_atomic_rmw32_xor_u:
+                  WriteByte($40);
+                a_i32_atomic_rmw_xchg:
+                  WriteByte($41);
+                a_i64_atomic_rmw_xchg:
+                  WriteByte($42);
+                a_i32_atomic_rmw8_xchg_u:
+                  WriteByte($43);
+                a_i32_atomic_rmw16_xchg_u:
+                  WriteByte($44);
+                a_i64_atomic_rmw8_xchg_u:
+                  WriteByte($45);
+                a_i64_atomic_rmw16_xchg_u:
+                  WriteByte($46);
+                a_i64_atomic_rmw32_xchg_u:
+                  WriteByte($47);
+                a_i32_atomic_rmw_cmpxchg:
+                  WriteByte($48);
+                a_i64_atomic_rmw_cmpxchg:
+                  WriteByte($49);
+                a_i32_atomic_rmw8_cmpxchg_u:
+                  WriteByte($4A);
+                a_i32_atomic_rmw16_cmpxchg_u:
+                  WriteByte($4B);
+                a_i64_atomic_rmw8_cmpxchg_u:
+                  WriteByte($4C);
+                a_i64_atomic_rmw16_cmpxchg_u:
+                  WriteByte($4D);
+                a_i64_atomic_rmw32_cmpxchg_u:
+                  WriteByte($4E);
+                else
+                  internalerror(2022052101);
+              end;
+              if ops<>1 then
+                internalerror(2021092016);
+              with oper[0]^ do
+                case typ of
+                  top_ref:
+                    begin
+                      if assigned(ref^.symbol) then
+                        begin
+                          WriteUleb(natural_alignment_for_load_store(opcode));
+                          objdata.writeReloc(ref^.offset,5,ObjData.symbolref(ref^.symbol),RELOC_MEMORY_ADDR_LEB);
+                        end
+                      else
+                        begin
+                          if assigned(ref^.symbol) or (ref^.base<>NR_NO) or (ref^.index<>NR_NO) then
+                            internalerror(2021092018);
+                          WriteUleb(natural_alignment_for_load_store(opcode));
+                          WriteUleb(ref^.offset);
+                        end;
+                    end;
+                  top_const:
+                    begin
+                      WriteUleb(natural_alignment_for_load_store(opcode));
+                      WriteUleb(val);
+                    end;
+                  else
+                    internalerror(2021092017);
+                end;
+            end;
           a_call:
             begin
               if ops<>1 then

+ 70 - 4
compiler/wasm32/cpubase.pas

@@ -442,7 +442,25 @@ uses
           a_i64_load8_s,
           a_i64_load8_u,
           a_i32_store8,
-          a_i64_store8:
+          a_i64_store8,
+          a_i32_atomic_load8_u,
+          a_i64_atomic_load8_u,
+          a_i32_atomic_store8,
+          a_i64_atomic_store8,
+          a_i32_atomic_rmw8_add_u,
+          a_i64_atomic_rmw8_add_u,
+          a_i32_atomic_rmw8_sub_u,
+          a_i64_atomic_rmw8_sub_u,
+          a_i32_atomic_rmw8_and_u,
+          a_i64_atomic_rmw8_and_u,
+          a_i32_atomic_rmw8_or_u,
+          a_i64_atomic_rmw8_or_u,
+          a_i32_atomic_rmw8_xor_u,
+          a_i64_atomic_rmw8_xor_u,
+          a_i32_atomic_rmw8_xchg_u,
+          a_i64_atomic_rmw8_xchg_u,
+          a_i32_atomic_rmw8_cmpxchg_u,
+          a_i64_atomic_rmw8_cmpxchg_u:
             result:=0;
 
           a_i32_load16_s,
@@ -450,7 +468,25 @@ uses
           a_i64_load16_s,
           a_i64_load16_u,
           a_i32_store16,
-          a_i64_store16:
+          a_i64_store16,
+          a_i32_atomic_load16_u,
+          a_i64_atomic_load16_u,
+          a_i32_atomic_store16,
+          a_i64_atomic_store16,
+          a_i32_atomic_rmw16_add_u,
+          a_i64_atomic_rmw16_add_u,
+          a_i32_atomic_rmw16_sub_u,
+          a_i64_atomic_rmw16_sub_u,
+          a_i32_atomic_rmw16_and_u,
+          a_i64_atomic_rmw16_and_u,
+          a_i32_atomic_rmw16_or_u,
+          a_i64_atomic_rmw16_or_u,
+          a_i32_atomic_rmw16_xor_u,
+          a_i64_atomic_rmw16_xor_u,
+          a_i32_atomic_rmw16_xchg_u,
+          a_i64_atomic_rmw16_xchg_u,
+          a_i32_atomic_rmw16_cmpxchg_u,
+          a_i64_atomic_rmw16_cmpxchg_u:
             result:=1;
 
           a_i32_load,
@@ -459,13 +495,43 @@ uses
           a_i64_load32_u,
           a_i32_store,
           a_f32_store,
-          a_i64_store32:
+          a_i64_store32,
+          a_memory_atomic_notify,
+          a_memory_atomic_wait32,
+          a_i32_atomic_load,
+          a_i64_atomic_load32_u,
+          a_i32_atomic_store,
+          a_i64_atomic_store32,
+          a_i32_atomic_rmw_add,
+          a_i64_atomic_rmw32_add_u,
+          a_i32_atomic_rmw_sub,
+          a_i64_atomic_rmw32_sub_u,
+          a_i32_atomic_rmw_and,
+          a_i64_atomic_rmw32_and_u,
+          a_i32_atomic_rmw_or,
+          a_i64_atomic_rmw32_or_u,
+          a_i32_atomic_rmw_xor,
+          a_i64_atomic_rmw32_xor_u,
+          a_i32_atomic_rmw_xchg,
+          a_i64_atomic_rmw32_xchg_u,
+          a_i32_atomic_rmw_cmpxchg,
+          a_i64_atomic_rmw32_cmpxchg_u:
             result:=2;
 
           a_i64_load,
           a_f64_load,
           a_i64_store,
-          a_f64_store:
+          a_f64_store,
+          a_memory_atomic_wait64,
+          a_i64_atomic_load,
+          a_i64_atomic_store,
+          a_i64_atomic_rmw_add,
+          a_i64_atomic_rmw_sub,
+          a_i64_atomic_rmw_and,
+          a_i64_atomic_rmw_or,
+          a_i64_atomic_rmw_xor,
+          a_i64_atomic_rmw_xchg,
+          a_i64_atomic_rmw_cmpxchg:
             result:=3;
           else
             internalerror(2021092614);