Browse Source

utils: apply Inoussa patch Unicode utils (issue #0022909):
This patch fixes some memory overwrites that were causing the helpers crash.
It introduces the generation of the little endian and big endian files
regardless of the host endianess.
The patch also adds some new test cases. The cldrparser program now accepts a
"-t" switch to execute the test suite.

git-svn-id: trunk@24018 -

paul 12 years ago
parent
commit
3c0e11fa5a

+ 94 - 35
utils/unicode/cldrhelper.pas

@@ -26,6 +26,7 @@ unit cldrhelper;
 {$PACKENUM 1}
 {$PACKENUM 1}
 {$modeswitch advancedrecords}
 {$modeswitch advancedrecords}
 {$scopedenums on}
 {$scopedenums on}
+{$typedaddress on}
 
 
 interface
 interface
 
 
@@ -200,13 +201,14 @@ type
   ) : Integer;
   ) : Integer;
   function FindCollationDefaultItemName(ACollation : TCldrCollation) : string;
   function FindCollationDefaultItemName(ACollation : TCldrCollation) : string;
   procedure GenerateCdlrCollation(
   procedure GenerateCdlrCollation(
-    ACollation    : TCldrCollation;
-    AItemName     : string;
-    AStoreName    : string;
+    ACollation           : TCldrCollation;
+    AItemName            : string;
+    AStoreName           : string;
     AStream,
     AStream,
-    AEndianStream : TStream;
-    ARootChars    : TOrderedCharacters;
-    ARootWeigths  : TUCA_LineRecArray
+    ANativeEndianStream,
+    AOtherEndianStream   : TStream;
+    ARootChars           : TOrderedCharacters;
+    ARootWeigths         : TUCA_LineRecArray
   );
   );
 
 
   procedure GenerateUCA_CLDR_Head(
   procedure GenerateUCA_CLDR_Head(
@@ -468,6 +470,8 @@ var
       exit(kr + 1);
       exit(kr + 1);
     if not pst^.Before then begin
     if not pst^.Before then begin
       kk := kr + 1;
       kk := kr + 1;
+      if (kk >= ASequence.ActualLength) then
+        exit(kk);
       pd := @ASequence.Data[kk];
       pd := @ASequence.Data[kk];
       for kk := kk to ASequence.ActualLength - 1 do begin
       for kk := kk to ASequence.ActualLength - 1 do begin
         if (pd^.WeigthKind <= pse^.WeigthKind) then
         if (pd^.WeigthKind <= pse^.WeigthKind) then
@@ -533,14 +537,17 @@ function SimpleFormKey(const ACEList : TUCA_WeightRecArray) : TUCASortKey;
 var
 var
   r : TUCASortKey;
   r : TUCASortKey;
   i, c, k, ral, levelCount : Integer;
   i, c, k, ral, levelCount : Integer;
-  pce : PUCA_PropWeights;
+  pce : ^TUCA_WeightRec;
 begin
 begin
   c := Length(ACEList);
   c := Length(ACEList);
   if (c = 0) then
   if (c = 0) then
     exit(nil);
     exit(nil);
-  SetLength(r,((3+1{Level Separator})*c));
-  ral := 0;
+  //SetLength(r,((3+1{Level Separator})*c));
   levelCount := Length(ACEList[0].Weights);
   levelCount := Length(ACEList[0].Weights);
+  if (levelCount > 3) then
+    levelCount := 3;
+  SetLength(r,(levelCount*c + levelCount));
+  ral := 0;
   for i := 0 to levelCount - 1 do begin
   for i := 0 to levelCount - 1 do begin
     for k := 0 to c - 1 do begin
     for k := 0 to c - 1 do begin
       pce := @ACEList[k];
       pce := @ACEList[k];
@@ -878,13 +885,13 @@ var
     kral : Integer;
     kral : Integer;
     kres : TUCA_WeightRecArray;
     kres : TUCA_WeightRecArray;
 
 
-    procedure EnsureResultLength(const APlus : Integer);inline;
+    procedure EnsureResultLength(const APlus : Integer);//inline;
     begin
     begin
       if ((kral+APlus) > Length(kres)) then
       if ((kral+APlus) > Length(kres)) then
         SetLength(kres,(2*(kral+APlus)));
         SetLength(kres,(2*(kral+APlus)));
     end;
     end;
 
 
-    procedure AddToResult(const AValue : TUCA_WeightRecArray);inline;
+    procedure AddToResult(const AValue : TUCA_WeightRecArray);//inline;
     begin
     begin
       EnsureResultLength(Length(AValue));
       EnsureResultLength(Length(AValue));
       Move(AValue[0],kres[kral],(Length(AValue)*SizeOf(kres[0])));
       Move(AValue[0],kres[kral],(Length(AValue)*SizeOf(kres[0])));
@@ -893,6 +900,7 @@ var
 
 
   var
   var
     kc, k, ktempIndex, ki : Integer;
     kc, k, ktempIndex, ki : Integer;
+    tmpWeight : array of TUCA_PropWeights;
   begin
   begin
     kc := Length(AList);
     kc := Length(AList);
     kral := 0;
     kral := 0;
@@ -925,14 +933,30 @@ var
               Continue;
               Continue;
             end;
             end;
           end;
           end;
+          SetLength(tmpWeight,2);
+          DeriveWeight(AList[k][ki],@tmpWeight[0]);
           EnsureResultLength(2);
           EnsureResultLength(2);
-          DeriveWeight(AList[k][ki],@kres[kral]);
+          kres[kral].Weights[0] := tmpWeight[0].Weights[0];
+          kres[kral].Weights[1] := tmpWeight[0].Weights[1];
+          kres[kral].Weights[2] := tmpWeight[0].Weights[2];
+          kres[kral+1].Weights[0] := tmpWeight[1].Weights[0];
+          kres[kral+1].Weights[1] := tmpWeight[1].Weights[1];
+          kres[kral+1].Weights[2] := tmpWeight[1].Weights[2];
           kral := kral + 2;
           kral := kral + 2;
+          tmpWeight := nil;
         end
         end
       end;
       end;
+      SetLength(tmpWeight,2);
+      DeriveWeight(AList[k][0],@tmpWeight[0]);
       EnsureResultLength(2);
       EnsureResultLength(2);
-      DeriveWeight(AList[k][0],@kres[kral]);
+      kres[kral].Weights[0] := tmpWeight[0].Weights[0];
+      kres[kral].Weights[1] := tmpWeight[0].Weights[1];
+      kres[kral].Weights[2] := tmpWeight[0].Weights[2];
+      kres[kral+1].Weights[0] := tmpWeight[1].Weights[0];
+      kres[kral+1].Weights[1] := tmpWeight[1].Weights[1];
+      kres[kral+1].Weights[2] := tmpWeight[1].Weights[2];
       kral := kral + 2;
       kral := kral + 2;
+      tmpWeight := nil;
     end;
     end;
     SetLength(kres,kral);
     SetLength(kres,kral);
     Result := kres;
     Result := kres;
@@ -1021,13 +1045,13 @@ begin
             if ((p - 1) = pbase) then begin
             if ((p - 1) = pbase) then begin
               if (p^.WeigthKind = TReorderWeigthKind.Primary) then begin
               if (p^.WeigthKind = TReorderWeigthKind.Primary) then begin
                 SetLength(pr^.Weights,2);
                 SetLength(pr^.Weights,2);
-                FillByte(pr^.Weights[0],SizeOf(pr^.Weights[0]),0);
+                FillByte(pr^.Weights[0],(Length(pr^.Weights)*SizeOf(pr^.Weights[0])),0);
                 pr^.Weights[0].Weights[0] := (pwb^[0].Weights[0] + 1);
                 pr^.Weights[0].Weights[0] := (pwb^[0].Weights[0] + 1);
                 pr^.Weights[0].Variable := pwb^[0].Variable;
                 pr^.Weights[0].Variable := pwb^[0].Variable;
                 pr^.Weights[1] := pr^.Weights[0];
                 pr^.Weights[1] := pr^.Weights[0];
               end else if (p^.WeigthKind = TReorderWeigthKind.Secondary) then begin
               end else if (p^.WeigthKind = TReorderWeigthKind.Secondary) then begin
                 SetLength(pr^.Weights,2);
                 SetLength(pr^.Weights,2);
-                FillByte(pr^.Weights[0],SizeOf(pr^.Weights[0]),0);
+                FillByte(pr^.Weights[0],(Length(pr^.Weights)*SizeOf(pr^.Weights[0])),0);
                 pr^.Weights[0].Weights[0] := pwb^[0].Weights[0];
                 pr^.Weights[0].Weights[0] := pwb^[0].Weights[0];
                 pr^.Weights[0].Weights[1] := (pwb^[0].Weights[1] + 1);
                 pr^.Weights[0].Weights[1] := (pwb^[0].Weights[1] + 1);
                 pr^.Weights[0].Variable := pwb^[0].Variable;
                 pr^.Weights[0].Variable := pwb^[0].Variable;
@@ -1035,7 +1059,7 @@ begin
                 pr^.Weights[1].Variable := pr^.Weights[0].Variable;
                 pr^.Weights[1].Variable := pr^.Weights[0].Variable;
               end else if (p^.WeigthKind = TReorderWeigthKind.Tertiary) then begin
               end else if (p^.WeigthKind = TReorderWeigthKind.Tertiary) then begin
                 SetLength(pr^.Weights,2);
                 SetLength(pr^.Weights,2);
-                FillByte(pr^.Weights[0],SizeOf(pr^.Weights[0]),0);
+                FillByte(pr^.Weights[0],(Length(pr^.Weights)*SizeOf(pr^.Weights[0])),0);
                 pr^.Weights[0].Weights[0] := pwb^[0].Weights[0];
                 pr^.Weights[0].Weights[0] := pwb^[0].Weights[0];
                 pr^.Weights[0].Weights[1] := pwb^[0].Weights[1];
                 pr^.Weights[0].Weights[1] := pwb^[0].Weights[1];
                 pr^.Weights[0].Weights[2] := (pwb^[0].Weights[2] + 1);
                 pr^.Weights[0].Weights[2] := (pwb^[0].Weights[2] + 1);
@@ -1345,6 +1369,13 @@ var
   p : PReorderUnit;
   p : PReorderUnit;
   i, c : Integer;
   i, c : Integer;
 begin
 begin
+  if (ActualLength=0) then begin
+    EnsureSize(ActualLength + 1);
+    p := @Data[0];
+    p^.Assign(AItem);
+    p^.Changed := True;
+    exit(0);
+  end;
   k := IndexOf(AItem.Characters,AItem.Context,@Data[0],ActualLength);
   k := IndexOf(AItem.Characters,AItem.Context,@Data[0],ActualLength);
   if (k = ADestPos) then begin
   if (k = ADestPos) then begin
     Data[ADestPos].Assign(AItem);
     Data[ADestPos].Assign(AItem);
@@ -1362,10 +1393,12 @@ begin
     p^.Assign(AItem);
     p^.Assign(AItem);
     p^.Changed := True;
     p^.Changed := True;
   end else begin
   end else begin
-    p := @Data[c-1];
-    for i := finalPos to c - 1 do begin
-      Move(p^,(p+1)^,SizeOf(p^));
-      Dec(p);
+    if (c > 0) then begin
+      p := @Data[c-1];
+      for i := finalPos to c - 1 do begin
+        Move(p^,(p+1)^,SizeOf(p^));
+        Dec(p);
+      end;
     end;
     end;
     p := @Data[finalPos];
     p := @Data[finalPos];
 
 
@@ -1519,9 +1552,7 @@ end;
 
 
 procedure GenerateUCA_CLDR_Registration(
 procedure GenerateUCA_CLDR_Registration(
   ADest  : TStream;
   ADest  : TStream;
-  ABook  : PUCA_DataBook;
-  AProps : PUCA_PropBook;
-  ACollation : TCldrCollationItem
+  ABook  : PUCA_DataBook
 );
 );
 
 
   procedure AddLine(const ALine : ansistring);
   procedure AddLine(const ALine : ansistring);
@@ -1577,14 +1608,34 @@ begin
   AddLine('end.');
   AddLine('end.');
 end;
 end;
 
 
+
+procedure CheckEndianTransform(const ASource : PUCA_PropBook);
+var
+  x, y : array of Byte;
+  px, py : PUCA_PropItemRec;
+begin
+  if (ASource = nil) or (ASource^.ItemSize = 0) then
+    exit;
+  SetLength(x,ASource^.ItemSize);
+  px := PUCA_PropItemRec(@x[0]);
+  ReverseFromNativeEndian(ASource^.Items,ASource^.ItemSize,px);
+
+  SetLength(y,ASource^.ItemSize);
+  py := PUCA_PropItemRec(@y[0]);
+  ReverseToNativeEndian(px,ASource^.ItemSize,py);
+  if not CompareMem(ASource^.Items,@y[0],Length(x)) then
+    CompareProps(ASource^.Items, PUCA_PropItemRec(@y[0]),ASource^.ItemSize);
+end;
+
 procedure GenerateCdlrCollation(
 procedure GenerateCdlrCollation(
-  ACollation    : TCldrCollation;
-  AItemName     : string;
-  AStoreName    : string;
+  ACollation           : TCldrCollation;
+  AItemName            : string;
+  AStoreName           : string;
   AStream,
   AStream,
-  AEndianStream : TStream;
-  ARootChars    : TOrderedCharacters;
-  ARootWeigths  : TUCA_LineRecArray
+  ANativeEndianStream,
+  AOtherEndianStream   : TStream;
+  ARootChars           : TOrderedCharacters;
+  ARootWeigths         : TUCA_LineRecArray
 );
 );
 
 
   procedure AddLine(const ALine : ansistring; ADestStream : TStream);
   procedure AddLine(const ALine : ansistring; ADestStream : TStream);
@@ -1626,6 +1677,7 @@ begin
   else
   else
     MakeUCA_Props(@locUcaBook,locUcaProps);
     MakeUCA_Props(@locUcaBook,locUcaProps);
   try
   try
+    CheckEndianTransform(locUcaProps);
     if locHasProps then begin
     if locHasProps then begin
       MakeUCA_BmpTables(ucaFirstTable,ucaSecondTable,locUcaProps);
       MakeUCA_BmpTables(ucaFirstTable,ucaSecondTable,locUcaProps);
       SetLength(ucaoSecondTable,100);
       SetLength(ucaoSecondTable,100);
@@ -1634,13 +1686,20 @@ begin
     GenerateLicenceText(AStream);
     GenerateLicenceText(AStream);
     GenerateUCA_CLDR_Head(AStream,@locUcaBook,locUcaProps,locItem);
     GenerateUCA_CLDR_Head(AStream,@locUcaBook,locUcaProps,locItem);
     if locHasProps then begin
     if locHasProps then begin
-      GenerateUCA_BmpTables(AStream,AEndianStream,ucaFirstTable,ucaSecondTable,THIS_ENDIAN);
-      GenerateUCA_OBmpTables(AStream,AEndianStream,ucaoFirstTable,ucaoSecondTable,THIS_ENDIAN);
-      GenerateUCA_PropTable(AEndianStream,locUcaProps);
-      s := GenerateEndianIncludeFileName(AStoreName);
-      AddLine(Format('{$include %s}'+sLineBreak,[ExtractFileName(s)]),AStream);
+      GenerateUCA_BmpTables(AStream,ANativeEndianStream,AOtherEndianStream,ucaFirstTable,ucaSecondTable);
+      GenerateUCA_OBmpTables(AStream,ANativeEndianStream,AOtherEndianStream,ucaoFirstTable,ucaoSecondTable);
+      GenerateUCA_PropTable(ANativeEndianStream,locUcaProps,ENDIAN_NATIVE);
+      GenerateUCA_PropTable(AOtherEndianStream,locUcaProps,ENDIAN_NON_NATIVE);
+
+      AddLine('{$ifdef FPC_LITTLE_ENDIAN}',AStream);
+        s := GenerateEndianIncludeFileName(AStoreName,ekLittle);
+        AddLine(Format('  {$include %s}',[ExtractFileName(s)]),AStream);
+      AddLine('{$else FPC_LITTLE_ENDIAN}',AStream);
+        s := GenerateEndianIncludeFileName(AStoreName,ekBig);
+        AddLine(Format('  {$include %s}',[ExtractFileName(s)]),AStream);
+      AddLine('{$endif FPC_LITTLE_ENDIAN}',AStream);
     end;
     end;
-    GenerateUCA_CLDR_Registration(AStream,@locUcaBook,locUcaProps,locItem);
+    GenerateUCA_CLDR_Registration(AStream,@locUcaBook);
   finally
   finally
     locSequence.Clear();
     locSequence.Clear();
     FreeUcaBook(locUcaProps);
     FreeUcaBook(locUcaProps);

+ 29 - 21
utils/unicode/cldrparser.lpr

@@ -23,9 +23,6 @@ program cldrparser;
 
 
 {$mode objfpc}{$H+}
 {$mode objfpc}{$H+}
 
 
-{ $define test_suite} // Define this to execute the parser test suite.
-{$define actual_parsing}
-
 uses
 uses
   SysUtils, classes, getopts,
   SysUtils, classes, getopts,
   cldrhelper, helper, cldrtest, cldrxml, unicodeset;
   cldrhelper, helper, cldrtest, cldrxml, unicodeset;
@@ -34,7 +31,7 @@ const
   SUsageText =
   SUsageText =
     'This program creates pascal units from CLDR''s collation files for usage ' + sLineBreak +
     'This program creates pascal units from CLDR''s collation files for usage ' + sLineBreak +
     'with the FreePascal Native Unicode Manager.' + sLineBreak + sLineBreak +
     'with the FreePascal Native Unicode Manager.' + sLineBreak + sLineBreak +
-    'Usage : cldrparser <collationFileName> [<typeName>] [-d<dataDir>] [-o<outputDir>]' + sLineBreak + sLineBreak +
+    'Usage : cldrparser <collationFileName> [<typeName>] [-d<dataDir>] [-o<outputDir>] [-t]' + sLineBreak + sLineBreak +
     '  where :' + sLineBreak +
     '  where :' + sLineBreak +
     ' ' + sLineBreak +
     ' ' + sLineBreak +
     '   - collationFileName : specify the target file.' + sLineBreak +
     '   - collationFileName : specify the target file.' + sLineBreak +
@@ -49,6 +46,7 @@ const
     '               The default value is the program''s directory.' + sLineBreak +
     '               The default value is the program''s directory.' + sLineBreak +
     '   - outputDir : specify the directory where the generated files will be stored.' + sLineBreak +
     '   - outputDir : specify the directory where the generated files will be stored.' + sLineBreak +
     '                 The default value is the program''s directory.' + sLineBreak +
     '                 The default value is the program''s directory.' + sLineBreak +
+    '   - t : to execute parser the test suite. The program will execute only the test suite and exit.' + sLineBreak +
     ' ' + sLineBreak +
     ' ' + sLineBreak +
     '  The program expects some files to be present in the <dataDir> folder : ' + sLineBreak +
     '  The program expects some files to be present in the <dataDir> folder : ' + sLineBreak +
     '     - UCA_Rules_SHORT.xml found in the CollationAuxiliary.zip available on unicode.org' + sLineBreak +
     '     - UCA_Rules_SHORT.xml found in the CollationAuxiliary.zip available on unicode.org' + sLineBreak +
@@ -57,7 +55,11 @@ const
 
 
 
 
 function ParseOptions(
 function ParseOptions(
-  var ADataDir, AOuputDir, ACollationFileName, ACollationTypeName : string
+  var ADataDir,
+      AOuputDir,
+      ACollationFileName,
+      ACollationTypeName : string;
+  var AExecTestSuite     : Boolean
 ) : Boolean;
 ) : Boolean;
 var
 var
   c : Char;
   c : Char;
@@ -67,8 +69,9 @@ begin
   if (ParamCount() = 0) then
   if (ParamCount() = 0) then
     exit(False);
     exit(False);
   Result := True;
   Result := True;
+  AExecTestSuite := False;
   repeat
   repeat
-    c := GetOpt('d:o:h');
+    c := GetOpt('d:o:ht');
     case c of
     case c of
       'd' : ADataDir := ExpandFileName(Trim(OptArg));
       'd' : ADataDir := ExpandFileName(Trim(OptArg));
       'o' : AOuputDir := ExpandFileName(Trim(OptArg));
       'o' : AOuputDir := ExpandFileName(Trim(OptArg));
@@ -77,6 +80,7 @@ begin
           WriteLn(SUsageText);
           WriteLn(SUsageText);
           Result := False;
           Result := False;
         end;
         end;
+      't' : AExecTestSuite := True;
     end;
     end;
   until (c = EndOfOptions);
   until (c = EndOfOptions);
   idx := 0;
   idx := 0;
@@ -97,24 +101,25 @@ end;
 var
 var
   orderedChars : TOrderedCharacters;
   orderedChars : TOrderedCharacters;
   ucaBook : TUCA_DataBook;
   ucaBook : TUCA_DataBook;
-  stream, endianStream : TMemoryStream;
+  stream, streamNE, streamOE : TMemoryStream;
   s, collationFileName, collationTypeName : string;
   s, collationFileName, collationTypeName : string;
   i , c: Integer;
   i , c: Integer;
   collation : TCldrCollation;
   collation : TCldrCollation;
   dataPath, outputPath : string;
   dataPath, outputPath : string;
   collationItem : TCldrCollationItem;
   collationItem : TCldrCollationItem;
+  testSuiteFlag : Boolean;
 begin
 begin
-{$ifdef test_suite}
-  exec_tests();
-{$endif test_suite}
-
-{$ifdef actual_parsing}
   dataPath := '';
   dataPath := '';
   outputPath := '';
   outputPath := '';
   collationFileName := '';
   collationFileName := '';
   collationTypeName := '';
   collationTypeName := '';
-  if not ParseOptions(dataPath,outputPath,collationFileName,collationTypeName) then
+  testSuiteFlag := False;
+  if not ParseOptions(dataPath,outputPath,collationFileName,collationTypeName,testSuiteFlag) then
     Halt(1);
     Halt(1);
+  if testSuiteFlag then begin
+    exec_tests();
+    Halt;
+  end;
   if (dataPath <> '') and not(DirectoryExists(dataPath)) then begin
   if (dataPath <> '') and not(DirectoryExists(dataPath)) then begin
     WriteLn('This directory does not exist : ',dataPath);
     WriteLn('This directory does not exist : ',dataPath);
     Halt(1);
     Halt(1);
@@ -148,7 +153,8 @@ begin
 
 
   WriteLn(sLineBreak,'Collation Parsing ',QuotedStr(collationFileName),'  ...');
   WriteLn(sLineBreak,'Collation Parsing ',QuotedStr(collationFileName),'  ...');
   stream := nil;
   stream := nil;
-  endianStream := nil;
+  streamNE := nil;
+  streamOE := nil;
   collation := TCldrCollation.Create();
   collation := TCldrCollation.Create();
   try
   try
     ParseCollationDocument(collationFileName,collation,TCldrParserMode.HeaderParsing);
     ParseCollationDocument(collationFileName,collation,TCldrParserMode.HeaderParsing);
@@ -186,24 +192,26 @@ begin
 
 
       WriteLn('Start generation ...');
       WriteLn('Start generation ...');
       stream.Clear();
       stream.Clear();
-      endianStream := TMemoryStream.Create();
+      streamNE := TMemoryStream.Create();
+      streamOE := TMemoryStream.Create();
       s := COLLATION_FILE_PREFIX + ChangeFileExt(LowerCase(ExtractFileName(collationFileName)),'.pas');
       s := COLLATION_FILE_PREFIX + ChangeFileExt(LowerCase(ExtractFileName(collationFileName)),'.pas');
       GenerateCdlrCollation(
       GenerateCdlrCollation(
-        collation,collationTypeName,s,stream,endianStream,
+        collation,collationTypeName,s,stream,streamNE,streamOE,
         orderedChars,ucaBook.Lines
         orderedChars,ucaBook.Lines
       );
       );
       stream.SaveToFile(ExtractFilePath(collationFileName)+s);
       stream.SaveToFile(ExtractFilePath(collationFileName)+s);
-      if (endianStream.Size > 0) then
-        endianStream.SaveToFile(ExtractFilePath(collationFileName)+GenerateEndianIncludeFileName(s));
+      if (streamNE.Size > 0) then begin
+        streamNE.SaveToFile(ExtractFilePath(collationFileName)+GenerateEndianIncludeFileName(s,ENDIAN_NATIVE));
+        streamOE.SaveToFile(ExtractFilePath(collationFileName)+GenerateEndianIncludeFileName(s,ENDIAN_NON_NATIVE));
+      end;
     end;
     end;
   finally
   finally
-    endianStream.Free();
+    streamOE.Free();
+    streamNE.Free();
     stream.Free();
     stream.Free();
     collation.Free();
     collation.Free();
   end;
   end;
 
 
-{$endif actual_parsing}
-
   WriteLn(sLineBreak,'Finished.');
   WriteLn(sLineBreak,'Finished.');
 end.
 end.
 
 

+ 1358 - 4
utils/unicode/cldrtest.pas

@@ -22,6 +22,8 @@
 unit cldrtest;
 unit cldrtest;
 
 
 {$mode objfpc}{$H+}
 {$mode objfpc}{$H+}
+{$typedaddress on}
+{$warn 4056 off}  //Conversion between ordinals and pointers is not portable
 
 
 interface
 interface
 
 
@@ -31,7 +33,8 @@ uses
 
 
   function ToAnsiChars(const AValue : array of TUnicodeCodePoint) : string;
   function ToAnsiChars(const AValue : array of TUnicodeCodePoint) : string;
   function DumpSequenceAnsi(const ASequence : TOrderedCharacters) : string;
   function DumpSequenceAnsi(const ASequence : TOrderedCharacters) : string;
-  function DumpWeigth(const AItem : TUCA_WeightRec) : string;
+  function DumpWeigth(const AItem : TUCA_WeightRec) : string;overload;
+  function DumpWeigth(const AItems : array of TUCA_WeightRec) : string;overload;
   function DumpLine(ALine : TUCA_LineRec) : string;
   function DumpLine(ALine : TUCA_LineRec) : string;
   function DumpLines(ALines : TUCA_LineRecArray) : string;
   function DumpLines(ALines : TUCA_LineRecArray) : string;
   function CodePointToArray(const ACodePoint : TUnicodeCodePoint) : TUnicodeCodePointArray;overload;
   function CodePointToArray(const ACodePoint : TUnicodeCodePoint) : TUnicodeCodePointArray;overload;
@@ -52,6 +55,10 @@ uses
   procedure test9();
   procedure test9();
   procedure test10();
   procedure test10();
   procedure test11();
   procedure test11();
+  procedure test12();
+  procedure test13();
+  procedure test14();
+  procedure test15();
 
 
 implementation
 implementation
 
 
@@ -79,6 +86,14 @@ begin
   test10();
   test10();
   WriteLn('***************************** TEST 11 ******************');
   WriteLn('***************************** TEST 11 ******************');
   test11();
   test11();
+  WriteLn('***************************** TEST 12 ******************');
+  test12();
+  WriteLn('***************************** TEST 13 ******************');
+  test13();
+  WriteLn('***************************** TEST 14 ******************');
+  test14();
+  WriteLn('***************************** TEST 15 ******************');
+  test15();
 end;
 end;
 
 
 function ToAnsiChars(const AValue : array of TUnicodeCodePoint) : string;
 function ToAnsiChars(const AValue : array of TUnicodeCodePoint) : string;
@@ -132,7 +147,7 @@ begin
   Result := s;
   Result := s;
 end;
 end;
 
 
-function DumpWeigth(const AItem : TUCA_WeightRec) : string;
+function DumpWeigth(const AItem : TUCA_WeightRec) : string;overload;
 var
 var
   r : string;
   r : string;
 begin
 begin
@@ -146,6 +161,17 @@ begin
   Result := r;
   Result := r;
 end;
 end;
 
 
+function DumpWeigth(const AItems : array of TUCA_WeightRec) : string;
+var
+  r : string;
+  i : Integer;
+begin
+  r := '';
+  for i := 0 to Length(AItems) - 1 do
+    r := r + ' ' +DumpWeigth(AItems[i]);
+  Result := Trim(r);
+end;
+
 function DumpKey(const AItem : TUCASortKey) : string;
 function DumpKey(const AItem : TUCASortKey) : string;
 var
 var
   i : Integer;
   i : Integer;
@@ -161,10 +187,20 @@ function DumpLine(ALine : TUCA_LineRec) : string;
 var
 var
   i : Integer;
   i : Integer;
   r : string;
   r : string;
+  ctxItem : TUCA_LineContextItemRec;
 begin
 begin
   r := '';
   r := '';
+  if ALine.HasContext() then begin
+    r := r + '*';
+    for i := 0 to Length(ALine.Context.Data) - 1 do begin
+      ctxItem := ALine.Context.Data[i];
+      r := r + sLineBreak +
+           '        ' + ToAnsiChars(ctxItem.CodePoints) + ' => ' + DumpWeigth(ctxItem.Weights);
+    end;
+    r := r + sLineBreak + '    ';
+  end;
   if (Length(ALine.Weights) = 0) then begin
   if (Length(ALine.Weights) = 0) then begin
-    r := '[]';
+    r := r + '[]';
   end else begin
   end else begin
     for i := Low(ALine.Weights) to High(ALine.Weights) do
     for i := Low(ALine.Weights) to High(ALine.Weights) do
       r := r + DumpWeigth(ALine.Weights[i]);
       r := r + DumpWeigth(ALine.Weights[i]);
@@ -1654,7 +1690,7 @@ var
   sequence, sequenceClean : TOrderedCharacters;
   sequence, sequenceClean : TOrderedCharacters;
   statement : TReorderSequence;
   statement : TReorderSequence;
   wfirst, wresult : TUCA_LineRecArray;
   wfirst, wresult : TUCA_LineRecArray;
-  unicodeBook1, unicodeBook2, unicodeBook3 : unicodedata.TUCA_DataBook;
+  unicodeBook1, unicodeBook2 : unicodedata.TUCA_DataBook;
 begin
 begin
   statement.Clear();
   statement.Clear();
   test10_prepareWeigth(wfirst);
   test10_prepareWeigth(wfirst);
@@ -1910,4 +1946,1322 @@ begin
 
 
 end;
 end;
 
 
+//------------------------------------------------------
+
+procedure test12_prepareWeigth(var AData : TUCA_LineRecArray);
+var
+  p : PUCA_LineRec;
+begin
+  SetLength(AData,1);
+  p := @AData[Low(AData)];
+    p^.CodePoints := CodePointToArray(Ord('a'));
+    p^.Weights := ToWeight($15EF,$0120,$0002);
+end;
+
+procedure test12_PopulateSequence(var ASequence : TOrderedCharacters);
+var
+  i : Integer;
+begin
+  ASequence := TOrderedCharacters.Create();
+  ASequence.Append(TReorderUnit.From(Ord('a'),TReorderWeigthKind.Primary,1));
+  for i := 0 to ASequence.ActualLength - 1 do
+    ASequence.Data[i].Changed := False;
+end;
+
+procedure Check(const ACondition : Boolean; const AMsg : string);overload;
+begin
+  if not ACondition then
+    raise Exception.Create(AMsg);
+end;
+
+procedure Check(
+  const ACondition : Boolean;
+  const AFormatMsg : string;
+  const AArgs      : array of const
+);overload;
+begin
+  Check(ACondition,Format(AFormatMsg,AArgs));
+end;
+
+procedure Check(const ACondition : Boolean);overload;
+begin
+  Check(ACondition,'Check failed.')
+end;
+
+procedure CheckSimpleProps(
+  AItem          : PUCA_PropItemRec;
+  AHasCodePoint,
+  AIsValid       : Boolean;
+  AChildCount    : Byte;
+  AContextual    : Boolean
+);overload;
+var
+  p : PUCA_PropItemRec;
+begin
+  p := AItem;
+  Check(p<>nil,'p = nil');
+  Check(p^.HasCodePoint()=AHasCodePoint,'HasCodePoint');
+  Check(p^.IsValid()=AIsValid,'IsValid');
+  Check(p^.ChildCount=AChildCount,'ChildCount');
+  Check(p^.Contextual=AContextual,'Contextual');
+end;
+
+procedure CheckSimpleProps(
+  AItem          : PUCA_PropItemRec;
+  AHasCodePoint,
+  AIsValid       : Boolean;
+  AChildCount    : Byte;
+  AContextual,
+  AIsDeleted     : Boolean
+);overload;
+begin
+  CheckSimpleProps(AItem,AHasCodePoint,AIsValid,AChildCount,AContextual);
+  Check(AItem^.IsDeleted=AIsDeleted,'IsDeleted');
+end;
+
+procedure CheckWeigths(AItem : PUCA_PropItemRec; const AWeigths : array of Word);overload;
+var
+  p : PUCA_PropItemRec;
+  c, i : Integer;
+  pb : PByte;
+  pw : PWord;
+begin
+  p := AItem;
+  c := Length(AWeigths);
+  if ((c mod 3) > 0) then
+    Check(False,'Invalid Weigth Array.');
+  c := c div 3;
+  Check(c=p^.WeightLength,'WeightLength');
+  if (c = 0) then
+    exit;
+  pb := PByte(PtrUInt(p)+SizeOf(TUCA_PropItemRec));
+  pw := @AWeigths[Low(AWeigths)];
+//First Item
+  Check(PWord(pb)^ = pw^, 'First Item[0]');
+    pw := pw + 1;
+    pb := pb + 2;
+  if (pw^ > High(Byte)) then begin
+    Check(PWord(pb)^ = pw^, 'First Item[1]');
+    pb := pb + 2;
+  end else begin
+    Check(pb^ = pw^, 'First Item[1]');
+    pb := pb + 1;
+  end;
+  pw := pw + 1;
+  if (pw^ > High(Byte)) then begin
+    Check(PWord(pb)^ = pw^, 'First Item[2]');
+    pb := pb + 2;
+  end else begin
+    Check(pb^ = pw^, 'First Item[2]');
+    pb := pb + 1;
+  end;
+  pw := pw + 1;
+// Others
+  for i := 1 to c-1 do begin
+    Check(PWord(pb)^ = pw^, 'Item[0],i=%d',[i]);
+      Inc(pw);
+      pb := pb + 2;
+    Check(PWord(pb)^ = pw^, 'Item[1],i=%d',[i]);
+      Inc(pw);
+      pb := pb + 2;
+    Check(PWord(pb)^ = pw^, 'Item[2],i=%d',[i]);
+      Inc(pw);
+      pb := pb + 2;
+  end;
+end;
+
+procedure CheckWeigths(
+        AData      : PUCA_PropWeights;
+  const ADataCount : Integer;
+  const AWeigths   : array of Word
+);overload;
+var
+  c: Integer;
+begin
+  c := Length(AWeigths);
+  if ((c mod 3) > 0) then
+    Check(False,'Invalid Weigth Array.');
+  c := c div 3;
+  Check(c=ADataCount,'WeightLength');
+  if (c = 0) then
+    exit;
+  if not CompareMem(AData,@AWeigths[0],(ADataCount*3*SizeOf(Word))) then
+    Check(False,'Weight');
+end;
+
+function CalcWeigthSize(const AWeigths : array of Word) : Integer;
+var
+  c : Integer;
+begin
+  c := Length(AWeigths);
+  if ((c mod 3) > 0) then
+    Check(False,'Invalid Weigth Array.');
+  Result := c * SizeOf(Word);
+  if (c>0) then begin
+    if (AWeigths[1] <= High(Byte)) then
+      Result := Result - 1;
+    if (AWeigths[2] <= High(Byte)) then
+      Result := Result - 1;
+  end;
+end;
+
+procedure test12_check_1(const ABook : unicodedata.TUCA_DataBook);
+var
+  p, px : PUCA_PropItemRec;
+  size, sizeTotal, t: Cardinal;
+begin
+  sizeTotal := 0;
+// for 'b'
+  p := ABook.Props;
+  CheckSimpleProps(p,False,True,0,False);
+  CheckWeigths(p,[$15F0,0,0, $15F0,0,0]);
+  size := SizeOf(TUCA_PropItemRec) + CalcWeigthSize([$15F0,0,0, $15F0,0,0]);
+  Check(p^.GetSelfOnlySize() = size,'GetSelfOnlySize');
+  Check(p^.Size = size,'size');
+  sizeTotal:= sizeTotal+size;
+
+// for 'c'
+  p := PUCA_PropItemRec(PtrUInt(p)+size);
+  CheckSimpleProps(p,False,True,0,False);
+  CheckWeigths(p,[$15F0,0,0, $15F1,0,0]);
+  size := SizeOf(TUCA_PropItemRec) + CalcWeigthSize([$15F0,0,0, $15F0,0,0]);
+  Check(p^.GetSelfOnlySize() = size,'GetSelfOnlySize');
+  Check(p^.Size = size,'size');
+  sizeTotal:= sizeTotal+size;
+
+// for 'x'
+  p := PUCA_PropItemRec(PtrUInt(p)+size);
+  px := p;
+  CheckSimpleProps(p,False,False,1,False);
+  CheckWeigths(p,[]);
+  size := SizeOf(TUCA_PropItemRec);
+  Check(p^.GetSelfOnlySize() = size,'GetSelfOnlySize');
+  t := size;
+  sizeTotal:= sizeTotal+size;
+// for 'y'
+  p := PUCA_PropItemRec(PtrUInt(p)+size);
+  CheckSimpleProps(p,True,True,0,False);
+  CheckWeigths(p,[$15F0,0,0, $15F2,0,0]);
+  size := SizeOf(TUCA_PropItemRec) +
+          SizeOf(UInt24) +
+          CalcWeigthSize([$15F0,0,0, $15F2,0,0]);
+  Check(p^.GetSelfOnlySize() = size,'GetSelfOnlySize');
+  Check(p^.Size = size,'size(y)');
+
+  Check(px^.Size = (t+size),'size(x)');
+
+  sizeTotal:= sizeTotal+size;
+  Check(ABook.PropCount = sizeTotal,'size(total)');
+end;
+
+procedure test12_check_2(const ABook : unicodedata.TUCA_DataBook);
+var
+  p, ph : PUCA_PropItemRec;
+  size, sizeTotal, t: Integer;
+begin
+  sizeTotal := 0;
+// for 'b'
+  p := ABook.Props;
+  CheckSimpleProps(p,False,True,0,False);
+  CheckWeigths(p,[$15EF,$121,0, $15EF,0,0]);
+  size := SizeOf(TUCA_PropItemRec) + CalcWeigthSize([$15EF,$121,0, $15EF,0,0]);
+  Check(p^.GetSelfOnlySize() = size,'GetSelfOnlySize');
+  Check(p^.Size = size,'size');
+  sizeTotal:= sizeTotal+size;
+
+// for 'c'
+  p := PUCA_PropItemRec(PtrUInt(p)+size);
+  CheckSimpleProps(p,False,True,0,False);
+  CheckWeigths(p,[$15EF,$122,0, $15EF,0,0]);
+  size := SizeOf(TUCA_PropItemRec) + CalcWeigthSize([$15EF,$122,0, $15EF,0,0]);
+  Check(p^.GetSelfOnlySize() = size,'GetSelfOnlySize');
+  Check(p^.Size = size,'size');
+  sizeTotal:= sizeTotal+size;
+
+// for 'h'
+  p := PUCA_PropItemRec(PtrUInt(p)+size);
+  ph := p;
+  CheckSimpleProps(p,False,False,1,False);
+  CheckWeigths(p,[]);
+  size := SizeOf(TUCA_PropItemRec);
+  Check(p^.GetSelfOnlySize() = size,'GetSelfOnlySize');
+  t := size;
+  sizeTotal:= sizeTotal+size;
+// for 'i'
+  p := PUCA_PropItemRec(PtrUInt(p)+size);
+  CheckSimpleProps(p,True,True,0,False);
+  CheckWeigths(p,[$15EF,$123,0, $15EF,0,0]);
+  size := SizeOf(TUCA_PropItemRec) +
+          SizeOf(UInt24) +
+          CalcWeigthSize([$15EF,$123,0, $15EF,0,0]);
+  Check(p^.GetSelfOnlySize() = size,'GetSelfOnlySize');
+  Check(p^.Size = size,'size(i)');
+  sizeTotal:= sizeTotal+size;
+
+  Check(ph^.Size = (t+size),'size(h)');
+
+// for 'k'
+  p := PUCA_PropItemRec(PtrUInt(p)+size);
+  CheckSimpleProps(p,False,True,0,False);
+  CheckWeigths(p,[$15EF,$123,1, $15EF,0,0]);
+  size := SizeOf(TUCA_PropItemRec) + CalcWeigthSize([$15EF,$123,1, $15EF,0,0]);
+  Check(p^.GetSelfOnlySize() = size,'GetSelfOnlySize');
+  Check(p^.Size = size,'size(k)');
+  sizeTotal:= sizeTotal+size;
+
+  Check(ABook.PropCount = sizeTotal,'size(total)');
+end;
+
+procedure test12_check_3(const ABook : unicodedata.TUCA_DataBook);
+var
+  p, pc : PUCA_PropItemRec;
+  size, sizeTotal, t: Integer;
+begin
+  sizeTotal := 0;
+// for 'b'
+  p := ABook.Props;
+  CheckSimpleProps(p,False,True,0,False);
+  CheckWeigths(p,[$15EF,$121,0, $15EF,0,0]);
+  size := SizeOf(TUCA_PropItemRec) + CalcWeigthSize([$15EF,$121,0, $15EF,0,0]);
+  Check(p^.GetSelfOnlySize() = size,'GetSelfOnlySize');
+  Check(p^.Size = size,'size');
+  sizeTotal:= sizeTotal+size;
+
+// for 'c'
+  p := PUCA_PropItemRec(PtrUInt(p)+size);
+  pc := p;
+  CheckSimpleProps(p,False,True,1,False);
+  CheckWeigths(p,[$15EF,$122,0, $15EF,0,0]);
+  size := SizeOf(TUCA_PropItemRec) + CalcWeigthSize([$15EF,$122,0, $15EF,0,0]);
+  Check(p^.GetSelfOnlySize() = size,'GetSelfOnlySize');
+  t := size;
+  sizeTotal:= sizeTotal+size;
+
+// for 'i'
+  p := PUCA_PropItemRec(PtrUInt(p)+size);
+  CheckSimpleProps(p,True,True,0,False);
+  CheckWeigths(p,[$15EF,$123,0, $15EF,0,0]);
+  size := SizeOf(TUCA_PropItemRec) +
+          SizeOf(UInt24) +
+          CalcWeigthSize([$15EF,$123,0, $15EF,0,0]);
+  Check(p^.GetSelfOnlySize() = size,'GetSelfOnlySize');
+  Check(p^.Size = size,'size(i)');
+  sizeTotal:= sizeTotal+size;
+
+  Check(pc^.Size = (t+size),'size(c)');
+
+// for 'k'
+  p := PUCA_PropItemRec(PtrUInt(p)+size);
+  CheckSimpleProps(p,False,True,0,False);
+  CheckWeigths(p,[$15EF,$123,1, $15EF,0,0]);
+  size := SizeOf(TUCA_PropItemRec) + CalcWeigthSize([$15EF,$123,1, $15EF,0,0]);
+  Check(p^.GetSelfOnlySize() = size,'GetSelfOnlySize');
+  Check(p^.Size = size,'size(k)');
+  sizeTotal:= sizeTotal+size;
+
+  Check(ABook.PropCount = sizeTotal,'size(total)');
+end;
+
+procedure test12_check_4(const ABook : unicodedata.TUCA_DataBook);
+var
+  p, pc : PUCA_PropItemRec;
+  size, sizeTotal, t: Integer;
+begin
+  sizeTotal := 0;
+// for 'b'
+  p := ABook.Props;
+  CheckSimpleProps(p,False,True,0,False);
+  CheckWeigths(p,[$15EF,$121,0, $15EF,0,0]);
+  size := SizeOf(TUCA_PropItemRec) + CalcWeigthSize([$15EF,$121,0, $15EF,0,0]);
+  Check(p^.GetSelfOnlySize() = size,'GetSelfOnlySize');
+  Check(p^.Size = size,'size');
+  sizeTotal:= sizeTotal+size;
+
+// for 'c'
+  p := PUCA_PropItemRec(PtrUInt(p)+size);
+  pc := p;
+  CheckSimpleProps(p,False,True,2,False);
+  CheckWeigths(p,[$15EF,$122,0, $15EF,0,0]);
+  size := SizeOf(TUCA_PropItemRec) + CalcWeigthSize([$15EF,$122,0, $15EF,0,0]);
+  Check(p^.GetSelfOnlySize() = size,'GetSelfOnlySize');
+  t := size;
+  sizeTotal:= sizeTotal+size;
+
+// for 'i' as in 'ci'
+  p := PUCA_PropItemRec(PtrUInt(p)+size);
+  CheckSimpleProps(p,True,True,0,False);
+  CheckWeigths(p,[$15EF,$123,0, $15EF,0,0]);
+  size := SizeOf(TUCA_PropItemRec) +
+          SizeOf(UInt24) +
+          CalcWeigthSize([$15EF,$123,0, $15EF,0,0]);
+  Check(p^.GetSelfOnlySize() = size,'GetSelfOnlySize');
+  Check(p^.Size = size,'size(i)');
+  t := t+size;
+  sizeTotal:= sizeTotal+size;
+
+// for 's' as in 'cs'
+  p := PUCA_PropItemRec(PtrUInt(p)+size);
+  CheckSimpleProps(p,True,True,0,False);
+  CheckWeigths(p,[$15EF,$123,1, $15F0,0,0]);
+  size := SizeOf(TUCA_PropItemRec) +
+          SizeOf(UInt24) +
+          CalcWeigthSize([$15EF,$123,1, $15F0,0,0]);
+  Check(p^.GetSelfOnlySize() = size,'GetSelfOnlySize');
+  Check(p^.Size = size,'size(s)');
+  t := t+size;
+  sizeTotal:= sizeTotal+size;
+
+  Check(pc^.Size = t,'size(c)');
+
+// for 'k'
+  p := PUCA_PropItemRec(PtrUInt(p)+size);
+  CheckSimpleProps(p,False,True,0,False);
+  CheckWeigths(p,[$15EF,$123,1, $15EF,0,0]);
+  size := SizeOf(TUCA_PropItemRec) + CalcWeigthSize([$15EF,$123,1, $15EF,0,0]);
+  Check(p^.GetSelfOnlySize() = size,'GetSelfOnlySize');
+  Check(p^.Size = size,'size(k)');
+  sizeTotal:= sizeTotal+size;
+
+  Check(ABook.PropCount = sizeTotal,'size(total)');
+end;
+
+procedure test12();
+var
+  sequence, sequenceClean : TOrderedCharacters;
+  statement : TReorderSequence;
+  wfirst, wresult : TUCA_LineRecArray;
+  unicodeBook1, unicodeBook2 : unicodedata.TUCA_DataBook;
+begin
+  statement.Clear();
+  test12_prepareWeigth(wfirst);
+  test12_PopulateSequence(sequenceClean);
+
+  WriteLn('  Initial = ',sLineBreak,'    ',DumpSequenceAnsi(sequenceClean),sLineBreak);
+  WriteLn(DumpLines(wfirst),sLineBreak+sLineBreak);
+  //Generate the original tables
+  ConstructUnicodeBook(wfirst,'test','first',nil,unicodeBook1);
+
+  // --- test 1
+  sequence := sequenceClean.Clone();
+  SetLength(statement.Reset,1);
+  statement.Reset[0] := Ord('a');
+  SetLength(statement.Elements,3);
+  statement.Elements[0] := TReorderUnit.From(Ord('b'),TReorderWeigthKind.Primary,0);
+  statement.Elements[1] := TReorderUnit.From(Ord('c'),TReorderWeigthKind.Primary,0);
+  statement.Elements[2] := TReorderUnit.From([Ord('x'),Ord('y')],TReorderWeigthKind.Primary,0);
+  sequence.ApplyStatement(@statement);
+  WriteLn('    Statement #1 = ',sLineBreak,'  ',DumpSequenceAnsi(sequence),sLineBreak);
+  wresult := nil;
+  ComputeWeigths(@sequence.Data[0],sequence.ActualLength,wfirst,wresult);
+  WriteLn(DumpLines(wresult),sLineBreak+sLineBreak);
+  //Generate updatet tables
+  ConstructUnicodeBook(wresult,'test','second',@unicodeBook1,unicodeBook2);
+    CheckInf(['a','b','c','xy'],@unicodeBook2);
+    test12_check_1(unicodeBook2);
+    WriteLn('    -- test 1 - ok');
+
+  // --- test 2
+  sequence := sequenceClean.Clone();
+  statement.Clear();
+  SetLength(statement.Reset,1);
+  statement.Reset[0] := Ord('a');
+  SetLength(statement.Elements,4);
+  statement.Elements[0] := TReorderUnit.From(Ord('b'),TReorderWeigthKind.Secondary,0);
+  statement.Elements[1] := TReorderUnit.From(Ord('c'),TReorderWeigthKind.Secondary,0);
+  statement.Elements[2] := TReorderUnit.From([Ord('h'),Ord('i')],TReorderWeigthKind.Secondary,0);
+  statement.Elements[3] := TReorderUnit.From(Ord('k'),TReorderWeigthKind.Tertiary,0);
+  sequence.ApplyStatement(@statement);
+  WriteLn('    Statement #2 = ',sLineBreak,'  ',DumpSequenceAnsi(sequence),sLineBreak);
+  wresult := nil;
+  ComputeWeigths(@sequence.Data[0],sequence.ActualLength,wfirst,wresult);
+  WriteLn(DumpLines(wresult),sLineBreak+sLineBreak);
+  //Generate updatet tables
+  ConstructUnicodeBook(wresult,'test','second',@unicodeBook1,unicodeBook2);
+    CheckInf(['a','b','c','hi','k'],@unicodeBook2);
+    test12_check_2(unicodeBook2);
+    WriteLn('    -- test 2 - ok');
+
+  // --- test 3
+  sequence := sequenceClean.Clone();
+  statement.Clear();
+  SetLength(statement.Reset,1);
+  statement.Reset[0] := Ord('a');
+  SetLength(statement.Elements,4);
+  statement.Elements[0] := TReorderUnit.From(Ord('b'),TReorderWeigthKind.Secondary,0);
+  statement.Elements[1] := TReorderUnit.From(Ord('c'),TReorderWeigthKind.Secondary,0);
+  statement.Elements[2] := TReorderUnit.From([Ord('c'),Ord('i')],TReorderWeigthKind.Secondary,0);
+  statement.Elements[3] := TReorderUnit.From(Ord('k'),TReorderWeigthKind.Tertiary,0);
+  sequence.ApplyStatement(@statement);
+  WriteLn('    Statement #3 = ',sLineBreak,'  ',DumpSequenceAnsi(sequence),sLineBreak);
+  wresult := nil;
+  ComputeWeigths(@sequence.Data[0],sequence.ActualLength,wfirst,wresult);
+  WriteLn(DumpLines(wresult),sLineBreak+sLineBreak);
+  //Generate updatet tables
+  ConstructUnicodeBook(wresult,'test','second',@unicodeBook1,unicodeBook2);
+    CheckInf(['a','b','c','ci','k'],@unicodeBook2);
+    test12_check_3(unicodeBook2);
+    WriteLn('    -- test 3 - ok');
+
+  // --- test 4
+  sequence := sequenceClean.Clone();
+  statement.Clear();
+  SetLength(statement.Reset,1);
+  statement.Reset[0] := Ord('a');
+  SetLength(statement.Elements,5);
+  statement.Elements[0] := TReorderUnit.From(Ord('b'),TReorderWeigthKind.Secondary,0);
+  statement.Elements[1] := TReorderUnit.From(Ord('c'),TReorderWeigthKind.Secondary,0);
+  statement.Elements[2] := TReorderUnit.From([Ord('c'),Ord('i')],TReorderWeigthKind.Secondary,0);
+  statement.Elements[3] := TReorderUnit.From(Ord('k'),TReorderWeigthKind.Tertiary,0);
+  statement.Elements[4] := TReorderUnit.From([Ord('c'),Ord('s')],TReorderWeigthKind.Primary,0);
+  sequence.ApplyStatement(@statement);
+  WriteLn('    Statement #4 = ',sLineBreak,'  ',DumpSequenceAnsi(sequence),sLineBreak);
+  wresult := nil;
+  ComputeWeigths(@sequence.Data[0],sequence.ActualLength,wfirst,wresult);
+  WriteLn(DumpLines(wresult),sLineBreak+sLineBreak);
+  //Generate updatet tables
+  ConstructUnicodeBook(wresult,'test','second',@unicodeBook1,unicodeBook2);
+    CheckInf(['a','b','c','ci','k','cs'],@unicodeBook2);
+    test12_check_4(unicodeBook2);
+    WriteLn('    -- test 4 - ok');
+end;
+
+//------------------------------------------------------
+
+procedure test13_prepareWeigth(var AData : TUCA_LineRecArray);
+var
+  p : PUCA_LineRec;
+begin
+  SetLength(AData,2);
+  p := @AData[Low(AData)];
+    p^.CodePoints := CodePointToArray(Ord('a'));
+    p^.Weights := ToWeight($15EF,$0120,$0002);
+  Inc(p);
+    p^.CodePoints := CodePointToArray([Ord('b')]);
+    p^.Weights := ToWeight($15F0,$0120,$0002);
+end;
+
+procedure test13_PopulateSequence(var ASequence : TOrderedCharacters);
+var
+  i : Integer;
+begin
+  ASequence := TOrderedCharacters.Create();
+  ASequence.Append(TReorderUnit.From(Ord('a'),TReorderWeigthKind.Primary,1));
+  ASequence.Append(TReorderUnit.From(Ord('b'),TReorderWeigthKind.Primary,2));
+  for i := 0 to ASequence.ActualLength - 1 do
+    ASequence.Data[i].Changed := False;
+end;
+
+procedure test13_check_1(const ABook : unicodedata.TUCA_DataBook);
+var
+  p, pb : PUCA_PropItemRec;
+  size, sizeTotal, t: Integer;
+begin
+  sizeTotal := 0;
+// for 'b'
+  p := ABook.Props;
+  pb := p;
+  CheckSimpleProps(p,False,True,1,False,True);
+  CheckWeigths(p,[]);
+  size := SizeOf(TUCA_PropItemRec);
+  Check(p^.GetSelfOnlySize() = size,'GetSelfOnlySize');
+  t := size;
+  sizeTotal:= sizeTotal+size;
+
+// for 'u' as in 'bu'
+  p := PUCA_PropItemRec(PtrUInt(p)+size);
+  CheckSimpleProps(p,True,True,0,False,False);
+  CheckWeigths(p,[$15F0,0,0, $15F0,0,0]);
+  size := SizeOf(TUCA_PropItemRec) +
+          SizeOf(UInt24) +
+          CalcWeigthSize([$15F0,0,0, $15F0,0,0]);
+  Check(p^.GetSelfOnlySize() = size,'GetSelfOnlySize');
+  Check(p^.Size = size,'size(u)');
+  t := t+size;
+  sizeTotal:= sizeTotal+size;
+
+  Check(pb^.Size = t,'size(c)');
+
+  Check(ABook.PropCount = sizeTotal,'size(total)');
+end;
+
+procedure test13_check_2(const ABook : unicodedata.TUCA_DataBook);
+var
+  p, pb : PUCA_PropItemRec;
+  size, sizeTotal, t: Integer;
+begin
+  sizeTotal := 0;
+// for 'b'
+  p := ABook.Props;
+  pb := p;
+  CheckSimpleProps(p,False,True,1,False,True);
+  CheckWeigths(p,[]);
+  size := SizeOf(TUCA_PropItemRec);
+  Check(p^.GetSelfOnlySize() = size,'GetSelfOnlySize');
+  t := size;
+  sizeTotal:= sizeTotal+size;
+
+// for 'u' as in 'bu'
+  p := PUCA_PropItemRec(PtrUInt(p)+size);
+  CheckSimpleProps(p,True,True,0,False,False);
+  CheckWeigths(p,[$15F0,0,0, $15F0,0,0]);
+  size := SizeOf(TUCA_PropItemRec) +
+          SizeOf(UInt24) +
+          CalcWeigthSize([$15F0,0,0, $15F0,0,0]);
+  Check(p^.GetSelfOnlySize() = size,'GetSelfOnlySize');
+  Check(p^.Size = size,'size(u)');
+  t := t+size;
+  sizeTotal:= sizeTotal+size;
+
+  Check(pb^.Size = t,'size(c)');
+
+// for 'c'
+  p := PUCA_PropItemRec(PtrUInt(p)+size);
+  CheckSimpleProps(p,False,True,0,False,False);
+  CheckWeigths(p,[$15F0,0,0, $15F1,0,0]);
+  size := SizeOf(TUCA_PropItemRec) +
+          CalcWeigthSize([$15F0,0,0, $15F1,0,0]);
+  Check(p^.GetSelfOnlySize() = size,'GetSelfOnlySize');
+  Check(p^.Size = size,'size(c)');
+  sizeTotal:= sizeTotal+size;
+
+  Check(ABook.PropCount = sizeTotal,'size(total)');
+end;
+
+procedure test13();
+var
+  sequence, sequenceClean : TOrderedCharacters;
+  statement : TReorderSequence;
+  wfirst, wresult : TUCA_LineRecArray;
+  unicodeBook1, unicodeBook2 : unicodedata.TUCA_DataBook;
+begin
+  statement.Clear();
+  test12_prepareWeigth(wfirst);
+  test12_PopulateSequence(sequenceClean);
+
+  WriteLn('  Initial = ',sLineBreak,'    ',DumpSequenceAnsi(sequenceClean),sLineBreak);
+  WriteLn(DumpLines(wfirst),sLineBreak+sLineBreak);
+  //Generate the original tables
+  ConstructUnicodeBook(wfirst,'test','first',nil,unicodeBook1);
+
+  // --- test 1
+  sequence := sequenceClean.Clone();
+  statement.Clear();
+  SetLength(statement.Elements,1);
+  statement.Elements[0] := TReorderUnit.From(Ord('b'),TReorderWeigthKind.Deletion,0);
+  sequence.ApplyStatement(@statement);
+  statement.Clear();
+  SetLength(statement.Reset,1);
+  statement.Reset[0] := Ord('a');
+  SetLength(statement.Elements,1);
+  statement.Elements[0] := TReorderUnit.From([Ord('b'),Ord('u')],TReorderWeigthKind.Primary,0);
+  sequence.ApplyStatement(@statement);
+  WriteLn('    Statement #1 = ',sLineBreak,'  ',DumpSequenceAnsi(sequence),sLineBreak);
+  wresult := nil;
+  ComputeWeigths(@sequence.Data[0],sequence.ActualLength,wfirst,wresult);
+  WriteLn(DumpLines(wresult),sLineBreak+sLineBreak);
+  //Generate updatet tables
+  ConstructUnicodeBook(wresult,'test','second',@unicodeBook1,unicodeBook2);
+    CheckInf(['a','bu','b'{because b's weigth is now computed!}],@unicodeBook2);
+    test13_check_1(unicodeBook2);
+    WriteLn('    -- test 1 - ok');
+
+  // --- test 2
+  sequence := sequenceClean.Clone();
+  statement.Clear();
+  SetLength(statement.Elements,1);
+  statement.Elements[0] := TReorderUnit.From(Ord('b'),TReorderWeigthKind.Deletion,0);
+  sequence.ApplyStatement(@statement);
+  statement.Clear();
+  SetLength(statement.Reset,2);
+  statement.Reset[0] := Ord('a');
+  SetLength(statement.Elements,2);
+  statement.Elements[0] := TReorderUnit.From([Ord('b'),Ord('u')],TReorderWeigthKind.Primary,0);
+  statement.Elements[1] := TReorderUnit.From(Ord('c'),TReorderWeigthKind.Primary,0);
+  sequence.ApplyStatement(@statement);
+  WriteLn('    Statement #2 = ',sLineBreak,'  ',DumpSequenceAnsi(sequence),sLineBreak);
+  wresult := nil;
+  ComputeWeigths(@sequence.Data[0],sequence.ActualLength,wfirst,wresult);
+  WriteLn(DumpLines(wresult),sLineBreak+sLineBreak);
+  //Generate updatet tables
+  ConstructUnicodeBook(wresult,'test','second',@unicodeBook1,unicodeBook2);
+    CheckInf(['a','bu','c','b'{because b's weigth is now computed!}],@unicodeBook2);
+    test13_check_2(unicodeBook2);
+    WriteLn('    -- test 2 - ok');
+end;
+
+//------------------------------------------------------
+
+procedure test14_prepareWeigth(var AData : TUCA_LineRecArray);
+var
+  p : PUCA_LineRec;
+begin
+  SetLength(AData,1);
+  p := @AData[Low(AData)];
+    p^.CodePoints := CodePointToArray(Ord('a'));
+    p^.Weights := ToWeight($15EF,$0120,$0002);
+end;
+
+procedure test14_PopulateSequence(var ASequence : TOrderedCharacters);
+var
+  i : Integer;
+begin
+  ASequence := TOrderedCharacters.Create();
+  ASequence.Append(TReorderUnit.From(Ord('a'),TReorderWeigthKind.Primary,1));
+  for i := 0 to ASequence.ActualLength - 1 do
+    ASequence.Data[i].Changed := False;
+end;
+
+procedure test14_check_1(const ABook : unicodedata.TUCA_DataBook);
+var
+  p : PUCA_PropItemRec;
+  size, sizeTotal: Integer;
+  ctx : PUCA_PropItemContextTreeRec;
+  ctxItem : PUCA_PropItemContextTreeNodeRec;
+  pb : PByte;
+begin
+  sizeTotal := 0;
+// for 'b'
+  p := ABook.Props;
+  pb := PByte(p);
+  size := 0;
+  CheckSimpleProps(p,True,True,0,True,False);
+  size := SizeOf(TUCA_PropItemRec)+SizeOf(UInt24){codepoint};
+  CheckWeigths(p,[]);
+  ctx := PUCA_PropItemContextTreeRec(PtrUInt(p)+SizeOf(TUCA_PropItemRec)+SizeOf(UInt24));
+  Check(ctx^.Size>0,'ctx^.Size');
+  ctxItem := PUCA_PropItemContextTreeNodeRec(PtrUInt(ctx)+SizeOf(ctx^.Size));
+  Check(ctxItem<>nil,'ctxItem');
+  Check(ctxItem^.Left=0,'ctxItem^.Left');
+  Check(ctxItem^.Right=0,'ctxItem^.Right');
+  Check(ctxItem^.Data.CodePointCount=1,'ctxItem^.Data.CodePointCount');
+  Check(ctxItem^.Data.WeightCount=2,'ctxItem^.Data.WeightCount');
+  pb := PByte(PtrUInt(@ctxItem^.Data)+SizeOf(ctxItem^.Data));
+  Check(Cardinal(PUInt24(pb)^)=Ord('a'),'Context CodePoint');
+  pb := pb + (ctxItem^.Data.CodePointCount*SizeOf(UInt24));
+  CheckWeigths(PUCA_PropWeights(pb),ctxItem^.Data.WeightCount,[$15EF,$120,$3, $15EF,0,0]);
+
+  size := SizeOf(TUCA_PropItemRec)+
+          SizeOf(UInt24){codepoint}+
+          SizeOf(TUCA_PropItemContextTreeRec.Size)+
+          SizeOf(TUCA_PropItemContextTreeNodeRec) +
+          (ctxItem^.Data.CodePointCount*SizeOf(UInt24))+
+          (ctxItem^.Data.WeightCount*SizeOf(TUCA_PropWeights));
+
+  Check(p^.GetSelfOnlySize() = size,'GetSelfOnlySize');
+  sizeTotal:= sizeTotal+size;
+
+  Check(ABook.PropCount = sizeTotal,'size(total)');
+end;
+
+procedure test14_check_2(const ABook : unicodedata.TUCA_DataBook);
+var
+  p : PUCA_PropItemRec;
+  size, sizeTotal : Integer;
+  ctx : PUCA_PropItemContextTreeRec;
+  ctxItem : PUCA_PropItemContextTreeNodeRec;
+  pb : PByte;
+begin
+  sizeTotal := 0;
+// for 'b'
+  p := ABook.Props;
+  pb := PByte(p);
+  size := 0;
+  CheckSimpleProps(p,True,True,0,True,False);
+  CheckWeigths(p,[]);
+  ctx := PUCA_PropItemContextTreeRec(PtrUInt(p)+SizeOf(TUCA_PropItemRec)+SizeOf(UInt24));
+  Check(ctx^.Size>0,'ctx^.Size');
+  ctxItem := PUCA_PropItemContextTreeNodeRec(PtrUInt(ctx)+SizeOf(ctx^.Size));
+  Check(ctxItem<>nil,'ctxItem');
+  Check(ctxItem^.Left=0,'ctxItem^.Left');
+  Check(ctxItem^.Right=0,'ctxItem^.Right');
+  Check(ctxItem^.Data.CodePointCount=1,'ctxItem^.Data.CodePointCount');
+  Check(ctxItem^.Data.WeightCount=2,'ctxItem^.Data.WeightCount');
+  pb := PByte(PtrUInt(@ctxItem^.Data)+SizeOf(ctxItem^.Data));
+  Check(Cardinal(PUInt24(pb)^)=Ord('a'),'Context CodePoint');
+  pb := pb + (ctxItem^.Data.CodePointCount*SizeOf(UInt24));
+  CheckWeigths(PUCA_PropWeights(pb),ctxItem^.Data.WeightCount,[$15EF,$120,$3, $15EF,0,0]);
+  size := SizeOf(TUCA_PropItemRec)+
+          SizeOf(UInt24){codepoint}+
+          SizeOf(TUCA_PropItemContextTreeRec.Size)+
+          SizeOf(TUCA_PropItemContextTreeNodeRec) +
+          (ctxItem^.Data.CodePointCount*SizeOf(UInt24))+
+          (ctxItem^.Data.WeightCount*SizeOf(TUCA_PropWeights));
+  Check(p^.GetSelfOnlySize() = size,'GetSelfOnlySize');
+  sizeTotal:= sizeTotal+size;
+
+// for 'c'
+  p := PUCA_PropItemRec(PtrUInt(p)+size);
+  CheckSimpleProps(p,False,True,0,False,False);
+  CheckWeigths(p,[$15EF,$120,$3, $15F0,0,0]);
+  size := SizeOf(TUCA_PropItemRec) +
+          CalcWeigthSize([$15EF,$120,$3, $15F0,0,0]);
+  Check(p^.GetSelfOnlySize() = size,'GetSelfOnlySize');
+  Check(p^.Size = size,'size(u)');
+  sizeTotal:= sizeTotal+size;
+
+  Check(ABook.PropCount = sizeTotal,'size(total)');
+end;
+
+procedure test14_check_3(const ABook : unicodedata.TUCA_DataBook);
+var
+  p : PUCA_PropItemRec;
+  size, sizeTotal, t: Integer;
+  ctx : PUCA_PropItemContextTreeRec;
+  ctxItem : PUCA_PropItemContextTreeNodeRec;
+  pb : PByte;
+begin
+  sizeTotal := 0;
+// for 'b'
+  p := ABook.Props;
+  pb := PByte(p);
+  size := 0;
+  CheckSimpleProps(p,True,True,0,True,False);
+  CheckWeigths(p,[]);
+  ctx := PUCA_PropItemContextTreeRec(PtrUInt(p)+SizeOf(TUCA_PropItemRec)+SizeOf(UInt24));
+  Check(ctx^.Size>0,'ctx^.Size');
+  ctxItem := PUCA_PropItemContextTreeNodeRec(PtrUInt(ctx)+SizeOf(ctx^.Size));
+  Check(ctxItem<>nil,'ctxItem');
+  Check(ctxItem^.Left=0,'ctxItem^.Left');
+  Check(ctxItem^.Right<>0,'ctxItem^.Right');
+  Check(ctxItem^.Data.CodePointCount=1,'ctxItem^.Data.CodePointCount');
+  Check(ctxItem^.Data.WeightCount=2,'ctxItem^.Data.WeightCount');
+  pb := PByte(PtrUInt(@ctxItem^.Data)+SizeOf(ctxItem^.Data));
+  Check(Cardinal(PUInt24(pb)^)=Ord('a'),'Context CodePoint');
+  pb := pb + (ctxItem^.Data.CodePointCount*SizeOf(UInt24));
+  CheckWeigths(PUCA_PropWeights(pb),ctxItem^.Data.WeightCount,[$15EF,$120,$3, $15EF,0,0]);
+
+  t := SizeOf(TUCA_PropItemContextTreeNodeRec) +
+       (ctxItem^.Data.CodePointCount*SizeOf(UInt24))+
+       (ctxItem^.Data.WeightCount*SizeOf(TUCA_PropWeights));
+  Check(ctxItem^.Right = t,'ctxItem^.Right');
+  ctxItem := PUCA_PropItemContextTreeNodeRec(PtrUInt(ctxItem)+t);
+  Check(ctxItem^.Left=0,'ctxItem^.Left');
+  Check(ctxItem^.Right=0,'ctxItem^.Right');
+  Check(ctxItem^.Data.CodePointCount=1,'ctxItem^.Data.CodePointCount');
+  Check(ctxItem^.Data.WeightCount=2,'ctxItem^.Data.WeightCount');
+  pb := PByte(PtrUInt(@ctxItem^.Data)+SizeOf(ctxItem^.Data));
+  Check(Cardinal(PUInt24(pb)^)=Ord('f'),'Context CodePoint');
+  pb := pb + (ctxItem^.Data.CodePointCount*SizeOf(UInt24));
+  CheckWeigths(PUCA_PropWeights(pb),ctxItem^.Data.WeightCount,[$15EF,$120,$4, $15F1,0,0]);
+
+  size := SizeOf(TUCA_PropItemRec)+
+          SizeOf(UInt24){codepoint}+
+          SizeOf(TUCA_PropItemContextTreeRec.Size)+
+          t+
+          SizeOf(TUCA_PropItemContextTreeNodeRec) +
+          (ctxItem^.Data.CodePointCount*SizeOf(UInt24))+
+          (ctxItem^.Data.WeightCount*SizeOf(TUCA_PropWeights));
+  Check(p^.GetSelfOnlySize() = size,'GetSelfOnlySize');
+  sizeTotal:= sizeTotal+size;
+
+// for 'e'
+  p := PUCA_PropItemRec(PtrUInt(p)+size);
+  CheckSimpleProps(p,False,True,0,False,False);
+  CheckWeigths(p,[$15EF,$120,$3, $15F0,0,0]);
+  size := SizeOf(TUCA_PropItemRec) +
+          CalcWeigthSize([$15EF,$120,$3, $15F0,0,0]);
+  Check(p^.GetSelfOnlySize() = size,'GetSelfOnlySize');
+  Check(p^.Size = size,'size(e)');
+  sizeTotal:= sizeTotal+size;
+
+// for 'f'
+  p := PUCA_PropItemRec(PtrUInt(p)+size);
+  CheckSimpleProps(p,False,True,0,False,False);
+  CheckWeigths(p,[$15EF,$120,$3, $15F1,0,0]);
+  size := SizeOf(TUCA_PropItemRec) +
+          CalcWeigthSize([$15EF,$120,$3, $15F1,0,0]);
+  Check(p^.GetSelfOnlySize() = size,'GetSelfOnlySize');
+  Check(p^.Size = size,'size(f)');
+  sizeTotal:= sizeTotal+size;
+
+  Check(ABook.PropCount = sizeTotal,'size(total)');
+end;
+
+procedure test14_check_4(const ABook : unicodedata.TUCA_DataBook);
+var
+  p : PUCA_PropItemRec;
+  size, sizeTotal, t, ctxSize: Integer;
+  ctx : PUCA_PropItemContextTreeRec;
+  ctxItem, ctxItemParent : PUCA_PropItemContextTreeNodeRec;
+  pb : PByte;
+begin
+  sizeTotal := 0;
+  ctxSize := 0;
+// for 'b'
+  p := ABook.Props;
+  pb := PByte(p);
+  size := 0;
+  CheckSimpleProps(p,True,True,0,True,False);
+  CheckWeigths(p,[]);
+  ctx := PUCA_PropItemContextTreeRec(PtrUInt(p)+SizeOf(TUCA_PropItemRec)+SizeOf(UInt24));
+  Check(ctx^.Size>0,'ctx^.Size');
+  ctxItem := PUCA_PropItemContextTreeNodeRec(PtrUInt(ctx)+SizeOf(ctx^.Size));
+  ctxItemParent := ctxItem;
+  Check(ctxItem<>nil,'ctxItem');
+  Check(ctxItem^.Left<>0,'ctxItem^.Left');
+  Check(ctxItem^.Right<>0,'ctxItem^.Right');
+  Check(ctxItem^.Data.CodePointCount=1,'ctxItem^.Data.CodePointCount');
+  Check(ctxItem^.Data.WeightCount=2,'ctxItem^.Data.WeightCount');
+  pb := PByte(PtrUInt(@ctxItem^.Data)+SizeOf(ctxItem^.Data));
+  Check(Cardinal(PUInt24(pb)^)=Ord('f'),'Context CodePoint');
+  pb := pb + (ctxItem^.Data.CodePointCount*SizeOf(UInt24));
+  CheckWeigths(PUCA_PropWeights(pb),ctxItem^.Data.WeightCount,[$15EF,$120,$4, $15F1,0,0]);
+  t := SizeOf(TUCA_PropItemContextTreeNodeRec) +
+       (ctxItem^.Data.CodePointCount*SizeOf(UInt24))+
+       (ctxItem^.Data.WeightCount*SizeOf(TUCA_PropWeights));
+  ctxSize := ctxSize+t;
+
+  Check(ctxItem^.Left = t,'ctxItem^.Left');
+  ctxItem := PUCA_PropItemContextTreeNodeRec(PtrUInt(ctxItem)+t);
+  Check(ctxItem^.Left=0,'ctxItem^.Left');
+  Check(ctxItem^.Right=0,'ctxItem^.Right');
+  Check(ctxItem^.Data.CodePointCount=1,'ctxItem^.Data.CodePointCount');
+  Check(ctxItem^.Data.WeightCount=2,'ctxItem^.Data.WeightCount');
+  pb := PByte(PtrUInt(@ctxItem^.Data)+SizeOf(ctxItem^.Data));
+  Check(Cardinal(PUInt24(pb)^)=Ord('a'),'Context CodePoint');
+  pb := pb + (ctxItem^.Data.CodePointCount*SizeOf(UInt24));
+  CheckWeigths(PUCA_PropWeights(pb),ctxItem^.Data.WeightCount,[$15EF,$120,$3, $15EF,0,0]);
+  t := SizeOf(TUCA_PropItemContextTreeNodeRec) +
+       (ctxItem^.Data.CodePointCount*SizeOf(UInt24))+
+       (ctxItem^.Data.WeightCount*SizeOf(TUCA_PropWeights));
+  ctxSize := ctxSize+t;
+
+  ctxItem := PUCA_PropItemContextTreeNodeRec(PtrUInt(ctxItemParent)+ctxSize);
+  Check(ctxItem^.Left=0,'ctxItem^.Left');
+  Check(ctxItem^.Right=0,'ctxItem^.Right');
+  Check(ctxItem^.Data.CodePointCount=1,'ctxItem^.Data.CodePointCount');
+  Check(ctxItem^.Data.WeightCount=2,'ctxItem^.Data.WeightCount');
+  pb := PByte(PtrUInt(@ctxItem^.Data)+SizeOf(ctxItem^.Data));
+  Check(Cardinal(PUInt24(pb)^)=Ord('h'),'Context CodePoint');
+  pb := pb + (ctxItem^.Data.CodePointCount*SizeOf(UInt24));
+  CheckWeigths(PUCA_PropWeights(pb),ctxItem^.Data.WeightCount,[$15EF,$121,$6, $15F1,0,0]);
+  t := SizeOf(TUCA_PropItemContextTreeNodeRec) +
+       (ctxItem^.Data.CodePointCount*SizeOf(UInt24))+
+       (ctxItem^.Data.WeightCount*SizeOf(TUCA_PropWeights));
+  ctxSize := ctxSize+t;
+
+  ctxSize := ctxSize + SizeOf(TUCA_PropItemContextTreeRec.Size);
+  Check(ctx^.Size = ctxSize,'ctx^.Size');
+  size := SizeOf(TUCA_PropItemRec)+
+          SizeOf(UInt24){codepoint}+
+          ctxSize;
+  Check(p^.GetSelfOnlySize() = size,'GetSelfOnlySize');
+  sizeTotal:= sizeTotal+size;
+
+// for 'e'
+  p := PUCA_PropItemRec(PtrUInt(p)+size);
+  CheckSimpleProps(p,False,True,0,False,False);
+  CheckWeigths(p,[$15EF,$120,$3, $15F0,0,0]);
+  size := SizeOf(TUCA_PropItemRec) +
+          CalcWeigthSize([$15EF,$120,$3, $15F0,0,0]);
+  Check(p^.GetSelfOnlySize() = size,'GetSelfOnlySize');
+  Check(p^.Size = size,'size(e)');
+  sizeTotal:= sizeTotal+size;
+
+// for 'f'
+  p := PUCA_PropItemRec(PtrUInt(p)+size);
+  CheckSimpleProps(p,False,True,0,False,False);
+  CheckWeigths(p,[$15EF,$120,$3, $15F1,0,0]);
+  size := SizeOf(TUCA_PropItemRec) +
+          CalcWeigthSize([$15EF,$120,$3, $15F1,0,0]);
+  Check(p^.GetSelfOnlySize() = size,'GetSelfOnlySize');
+  Check(p^.Size = size,'size(f)');
+  sizeTotal:= sizeTotal+size;
+
+// for 'g'
+  p := PUCA_PropItemRec(PtrUInt(p)+size);
+  CheckSimpleProps(p,False,True,0,False,False);
+  CheckWeigths(p,[$15EF,$120,$5, $15F1,0,0]);
+  size := SizeOf(TUCA_PropItemRec) +
+          CalcWeigthSize([$15EF,$120,$5, $15F1,0,0]);
+  Check(p^.GetSelfOnlySize() = size,'GetSelfOnlySize');
+  Check(p^.Size = size,'size(g)');
+  sizeTotal:= sizeTotal+size;
+
+// for 'h'
+  p := PUCA_PropItemRec(PtrUInt(p)+size);
+  CheckSimpleProps(p,False,True,0,False,False);
+  CheckWeigths(p,[$15EF,$121,$5, $15F1,0,0]);
+  size := SizeOf(TUCA_PropItemRec) +
+          CalcWeigthSize([$15EF,$121,$5, $15F1,0,0]);
+  Check(p^.GetSelfOnlySize() = size,'GetSelfOnlySize');
+  Check(p^.Size = size,'size(h)');
+  sizeTotal:= sizeTotal+size;
+
+  Check(ABook.PropCount = sizeTotal,'size(total)');
+end;
+
+procedure test14();
+var
+  sequence, sequenceClean : TOrderedCharacters;
+  statement : TReorderSequence;
+  wfirst, wresult : TUCA_LineRecArray;
+  unicodeBook1, unicodeBook2 : unicodedata.TUCA_DataBook;
+begin
+  statement.Clear();
+  test12_prepareWeigth(wfirst);
+  test12_PopulateSequence(sequenceClean);
+
+  WriteLn('  Initial = ',sLineBreak,'    ',DumpSequenceAnsi(sequenceClean),sLineBreak);
+  WriteLn(DumpLines(wfirst),sLineBreak+sLineBreak);
+  //Generate the original tables
+  ConstructUnicodeBook(wfirst,'test','first',nil,unicodeBook1);
+
+  // --- test 1
+  sequence := sequenceClean.Clone();
+  statement.Clear();
+  SetLength(statement.Reset,1);
+  statement.Reset[0] := Ord('a');
+  SetLength(statement.Elements,1);
+  statement.Elements[0] := TReorderUnit.From(Ord('b'),[Ord('a')],TReorderWeigthKind.Tertiary,0);
+  sequence.ApplyStatement(@statement);
+  WriteLn('    Statement #1 = ',sLineBreak,'  ',DumpSequenceAnsi(sequence),sLineBreak);
+  wresult := nil;
+  ComputeWeigths(@sequence.Data[0],sequence.ActualLength,wfirst,wresult);
+  WriteLn(DumpLines(wresult),sLineBreak+sLineBreak);
+  //Generate updatet tables
+  ConstructUnicodeBook(wresult,'test','second',@unicodeBook1,unicodeBook2);
+    test14_check_1(unicodeBook2);
+    WriteLn('    -- test 1 - ok');
+    WriteLn;
+
+  // --- test 2
+  sequence := sequenceClean.Clone();
+  statement.Clear();
+  SetLength(statement.Reset,1);
+  statement.Reset[0] := Ord('a');
+  SetLength(statement.Elements,2);
+  statement.Elements[0] := TReorderUnit.From(Ord('b'),[Ord('a')],TReorderWeigthKind.Tertiary,0);
+  statement.Elements[1] := TReorderUnit.From(Ord('c'),TReorderWeigthKind.Primary,0);
+  sequence.ApplyStatement(@statement);
+  WriteLn('    Statement #2 = ',sLineBreak,'  ',DumpSequenceAnsi(sequence),sLineBreak);
+  wresult := nil;
+  ComputeWeigths(@sequence.Data[0],sequence.ActualLength,wfirst,wresult);
+  WriteLn(DumpLines(wresult),sLineBreak+sLineBreak);
+  //Generate updatet tables
+  ConstructUnicodeBook(wresult,'test','second',@unicodeBook1,unicodeBook2);
+    test14_check_2(unicodeBook2);
+    WriteLn('    -- test 2 - ok');
+    WriteLn;
+
+  // --- test 3
+  sequence := sequenceClean.Clone();
+  statement.Clear();
+  SetLength(statement.Reset,1);
+  statement.Reset[0] := Ord('a');
+  SetLength(statement.Elements,2);
+  statement.Elements[0] := TReorderUnit.From(Ord('e'),TReorderWeigthKind.Primary,0);
+  statement.Elements[1] := TReorderUnit.From(Ord('f'),TReorderWeigthKind.Primary,0);
+  sequence.ApplyStatement(@statement);
+  statement.Clear();
+  SetLength(statement.Reset,1);
+  statement.Reset[0] := Ord('a');
+  SetLength(statement.Elements,1);
+  statement.Elements[0] := TReorderUnit.From(Ord('b'),[Ord('a')],TReorderWeigthKind.Tertiary,0);
+  sequence.ApplyStatement(@statement);
+  statement.Clear();
+  SetLength(statement.Reset,1);
+  statement.Reset[0] := Ord('f');
+  SetLength(statement.Elements,1);
+  statement.Elements[0] := TReorderUnit.From(Ord('b'),[Ord('f')],TReorderWeigthKind.Tertiary,0);
+  sequence.ApplyStatement(@statement);
+
+  WriteLn('    Statement #3 = ',sLineBreak,'  ',DumpSequenceAnsi(sequence),sLineBreak);
+  wresult := nil;
+  ComputeWeigths(@sequence.Data[0],sequence.ActualLength,wfirst,wresult);
+  WriteLn(DumpLines(wresult),sLineBreak+sLineBreak);
+  //Generate updatet tables
+  ConstructUnicodeBook(wresult,'test','second',@unicodeBook1,unicodeBook2);
+    test14_check_3(unicodeBook2);
+    WriteLn('    -- test 3 - ok');
+    WriteLn;
+
+  // --- test 4
+  sequence := sequenceClean.Clone();
+  statement.Clear();
+  SetLength(statement.Reset,1);
+  statement.Reset[0] := Ord('a');
+  SetLength(statement.Elements,4);
+  statement.Elements[0] := TReorderUnit.From(Ord('e'),TReorderWeigthKind.Primary,0);
+  statement.Elements[1] := TReorderUnit.From(Ord('f'),TReorderWeigthKind.Primary,0);
+  statement.Elements[2] := TReorderUnit.From(Ord('g'),TReorderWeigthKind.Tertiary,0);
+  statement.Elements[3] := TReorderUnit.From(Ord('h'),TReorderWeigthKind.Secondary,0);
+  sequence.ApplyStatement(@statement);
+  statement.Clear();
+  SetLength(statement.Reset,1);
+  statement.Reset[0] := Ord('f');
+  SetLength(statement.Elements,1);
+  statement.Elements[0] := TReorderUnit.From(Ord('b'),[Ord('f')],TReorderWeigthKind.Tertiary,0);
+  sequence.ApplyStatement(@statement);
+  statement.Clear();
+  SetLength(statement.Reset,1);
+  statement.Reset[0] := Ord('a');
+  SetLength(statement.Elements,1);
+  statement.Elements[0] := TReorderUnit.From(Ord('b'),[Ord('a')],TReorderWeigthKind.Tertiary,0);
+  sequence.ApplyStatement(@statement);
+  statement.Clear();
+  SetLength(statement.Reset,1);
+  statement.Reset[0] := Ord('h');
+  SetLength(statement.Elements,1);
+  statement.Elements[0] := TReorderUnit.From(Ord('b'),[Ord('h')],TReorderWeigthKind.Tertiary,0);
+  sequence.ApplyStatement(@statement);
+
+  WriteLn('    Statement #4 = ',sLineBreak,'  ',DumpSequenceAnsi(sequence),sLineBreak);
+  wresult := nil;
+  ComputeWeigths(@sequence.Data[0],sequence.ActualLength,wfirst,wresult);
+  WriteLn(DumpLines(wresult),sLineBreak+sLineBreak);
+  //Generate updatet tables
+  ConstructUnicodeBook(wresult,'test','second',@unicodeBook1,unicodeBook2);
+    test14_check_4(unicodeBook2);
+    WriteLn('    -- test 4 - ok');
+    WriteLn;
+end;
+
+//------------------------------------------------------
+
+procedure test15_prepareWeigth(var AData : TUCA_LineRecArray);
+var
+  p : PUCA_LineRec;
+begin
+  SetLength(AData,1);
+  p := @AData[Low(AData)];
+    p^.CodePoints := CodePointToArray(Ord('a'));
+    p^.Weights := ToWeight($15EF,$0120,$0002);
+end;
+
+procedure test15_PopulateSequence(var ASequence : TOrderedCharacters);
+var
+  i : Integer;
+begin
+  ASequence := TOrderedCharacters.Create();
+  ASequence.Append(TReorderUnit.From(Ord('a'),TReorderWeigthKind.Primary,1));
+  for i := 0 to ASequence.ActualLength - 1 do
+    ASequence.Data[i].Changed := False;
+end;
+
+function ConvertEndianFromNative(
+  const AData    : Pointer;
+  const ADataLen : Integer
+) : Boolean;
+type
+  PUCA_PropItemRec = helper.PUCA_PropItemRec;
+var
+  s : PUCA_PropItemRec;
+  x, y : array of Byte;
+  px, py : PUCA_PropItemRec;
+begin
+  if (ADataLen <= 0) then
+    exit(True);
+  s := PUCA_PropItemRec(AData);
+  SetLength(x,ADataLen);
+  px := PUCA_PropItemRec(@x[0]);
+  ReverseFromNativeEndian(s,ADataLen,px);
+
+  SetLength(y,ADataLen);
+  py := PUCA_PropItemRec(@y[0]);
+  ReverseToNativeEndian(px,ADataLen,py);
+  Result := CompareMem(AData,@y[0],Length(x));
+end;
+
+procedure test15();
+var
+  sequence, sequenceClean : TOrderedCharacters;
+  statement : TReorderSequence;
+  wfirst, wresult : TUCA_LineRecArray;
+  unicodeBook1, unicodeBook2 : unicodedata.TUCA_DataBook;
+begin
+  statement.Clear();
+  test12_prepareWeigth(wfirst);
+  test12_PopulateSequence(sequenceClean);
+
+  WriteLn('  Initial = ',sLineBreak,'    ',DumpSequenceAnsi(sequenceClean),sLineBreak);
+  WriteLn(DumpLines(wfirst),sLineBreak+sLineBreak);
+  //Generate the original tables
+  ConstructUnicodeBook(wfirst,'test','first',nil,unicodeBook1);
+
+  // --- test 1
+  sequence := sequenceClean.Clone();
+  SetLength(statement.Reset,1);
+  statement.Reset[0] := Ord('a');
+  SetLength(statement.Elements,1);
+  statement.Elements[0] := TReorderUnit.From(Ord('b'),TReorderWeigthKind.Primary,0);
+  sequence.ApplyStatement(@statement);
+  WriteLn('    Statement #1 = ',sLineBreak,'  ',DumpSequenceAnsi(sequence),sLineBreak);
+  wresult := nil;
+  ComputeWeigths(@sequence.Data[0],sequence.ActualLength,wfirst,wresult);
+  WriteLn(DumpLines(wresult),sLineBreak+sLineBreak);
+  //Generate updatet tables
+  ConstructUnicodeBook(wresult,'test','second',@unicodeBook1,unicodeBook2);
+    Check(ConvertEndianFromNative(unicodeBook2.Props,unicodeBook2.PropCount),'Endian conversion failed.');
+    WriteLn('    -- test 1 - ok');
+
+  // --- test 2
+  sequence := sequenceClean.Clone();
+  statement.Clear();
+  SetLength(statement.Reset,1);
+  statement.Reset[0] := Ord('a');
+  SetLength(statement.Elements,1);
+  statement.Elements[0] := TReorderUnit.From(Ord('b'),[Ord('a')],TReorderWeigthKind.Tertiary,0);
+  sequence.ApplyStatement(@statement);
+  WriteLn('    Statement #2 = ',sLineBreak,'  ',DumpSequenceAnsi(sequence),sLineBreak);
+  wresult := nil;
+  ComputeWeigths(@sequence.Data[0],sequence.ActualLength,wfirst,wresult);
+  WriteLn(DumpLines(wresult),sLineBreak+sLineBreak);
+  //Generate updatet tables
+  ConstructUnicodeBook(wresult,'test','second',@unicodeBook1,unicodeBook2);
+    Check(ConvertEndianFromNative(unicodeBook2.Props,unicodeBook2.PropCount),'Endian conversion failed.');
+    WriteLn('    -- test 2 - ok');
+    WriteLn;
+
+  // --- test 3
+  sequence := sequenceClean.Clone();
+  statement.Clear();
+  SetLength(statement.Reset,1);
+  statement.Reset[0] := Ord('a');
+  SetLength(statement.Elements,2);
+  statement.Elements[0] := TReorderUnit.From(Ord('b'),[Ord('a')],TReorderWeigthKind.Tertiary,0);
+  statement.Elements[1] := TReorderUnit.From(Ord('c'),TReorderWeigthKind.Primary,0);
+  sequence.ApplyStatement(@statement);
+  WriteLn('    Statement #3 = ',sLineBreak,'  ',DumpSequenceAnsi(sequence),sLineBreak);
+  wresult := nil;
+  ComputeWeigths(@sequence.Data[0],sequence.ActualLength,wfirst,wresult);
+  WriteLn(DumpLines(wresult),sLineBreak+sLineBreak);
+  //Generate updatet tables
+  ConstructUnicodeBook(wresult,'test','second',@unicodeBook1,unicodeBook2);
+    Check(ConvertEndianFromNative(unicodeBook2.Props,unicodeBook2.PropCount),'Endian conversion failed.');
+    WriteLn('    -- test 3 - ok');
+    WriteLn;
+
+  // --- test 4
+  sequence := sequenceClean.Clone();
+  statement.Clear();
+  SetLength(statement.Reset,1);
+  statement.Reset[0] := Ord('a');
+  SetLength(statement.Elements,2);
+  statement.Elements[0] := TReorderUnit.From(Ord('e'),TReorderWeigthKind.Primary,0);
+  statement.Elements[1] := TReorderUnit.From(Ord('f'),TReorderWeigthKind.Primary,0);
+  sequence.ApplyStatement(@statement);
+  statement.Clear();
+  SetLength(statement.Reset,1);
+  statement.Reset[0] := Ord('a');
+  SetLength(statement.Elements,1);
+  statement.Elements[0] := TReorderUnit.From(Ord('b'),[Ord('a')],TReorderWeigthKind.Tertiary,0);
+  sequence.ApplyStatement(@statement);
+  statement.Clear();
+  SetLength(statement.Reset,1);
+  statement.Reset[0] := Ord('f');
+  SetLength(statement.Elements,1);
+  statement.Elements[0] := TReorderUnit.From(Ord('b'),[Ord('f')],TReorderWeigthKind.Tertiary,0);
+  sequence.ApplyStatement(@statement);
+  WriteLn('    Statement #4 = ',sLineBreak,'  ',DumpSequenceAnsi(sequence),sLineBreak);
+  wresult := nil;
+  ComputeWeigths(@sequence.Data[0],sequence.ActualLength,wfirst,wresult);
+  WriteLn(DumpLines(wresult),sLineBreak+sLineBreak);
+  //Generate updatet tables
+  ConstructUnicodeBook(wresult,'test','second',@unicodeBook1,unicodeBook2);
+    Check(ConvertEndianFromNative(unicodeBook2.Props,unicodeBook2.PropCount),'Endian conversion failed.');
+    WriteLn('    -- test 4 - ok');
+    WriteLn;
+
+  // --- test 5
+  sequence := sequenceClean.Clone();
+  statement.Clear();
+  SetLength(statement.Reset,1);
+  statement.Reset[0] := Ord('a');
+  SetLength(statement.Elements,4);
+  statement.Elements[0] := TReorderUnit.From(Ord('e'),TReorderWeigthKind.Primary,0);
+  statement.Elements[1] := TReorderUnit.From(Ord('f'),TReorderWeigthKind.Primary,0);
+  statement.Elements[2] := TReorderUnit.From(Ord('g'),TReorderWeigthKind.Tertiary,0);
+  statement.Elements[3] := TReorderUnit.From(Ord('h'),TReorderWeigthKind.Secondary,0);
+  sequence.ApplyStatement(@statement);
+  statement.Clear();
+  SetLength(statement.Reset,1);
+  statement.Reset[0] := Ord('f');
+  SetLength(statement.Elements,1);
+  statement.Elements[0] := TReorderUnit.From(Ord('b'),[Ord('f')],TReorderWeigthKind.Tertiary,0);
+  sequence.ApplyStatement(@statement);
+  statement.Clear();
+  SetLength(statement.Reset,1);
+  statement.Reset[0] := Ord('a');
+  SetLength(statement.Elements,1);
+  statement.Elements[0] := TReorderUnit.From(Ord('b'),[Ord('a')],TReorderWeigthKind.Tertiary,0);
+  sequence.ApplyStatement(@statement);
+  statement.Clear();
+  SetLength(statement.Reset,1);
+  statement.Reset[0] := Ord('h');
+  SetLength(statement.Elements,1);
+  statement.Elements[0] := TReorderUnit.From(Ord('b'),[Ord('h')],TReorderWeigthKind.Tertiary,0);
+  sequence.ApplyStatement(@statement);
+
+  WriteLn('    Statement #5 = ',sLineBreak,'  ',DumpSequenceAnsi(sequence),sLineBreak);
+  wresult := nil;
+  ComputeWeigths(@sequence.Data[0],sequence.ActualLength,wfirst,wresult);
+  WriteLn(DumpLines(wresult),sLineBreak+sLineBreak);
+  //Generate updatet tables
+  ConstructUnicodeBook(wresult,'test','second',@unicodeBook1,unicodeBook2);
+    Check(ConvertEndianFromNative(unicodeBook2.Props,unicodeBook2.PropCount),'Endian conversion failed.');
+    WriteLn('    -- test 5 - ok');
+    WriteLn;
+
+  // --- test 6
+  sequence := sequenceClean.Clone();
+  statement.Clear();
+  SetLength(statement.Reset,1);
+  statement.Reset[0] := Ord('a');
+  SetLength(statement.Elements,2);
+  statement.Elements[0] := TReorderUnit.From([Ord('a'),Ord('d')],[Ord('a')],TReorderWeigthKind.Tertiary,0);
+  statement.Elements[1] := TReorderUnit.From([Ord('a'),Ord('d')],TReorderWeigthKind.Primary,0);
+  sequence.ApplyStatement(@statement);
+  WriteLn('    Statement #6 = ',sLineBreak,'  ',DumpSequenceAnsi(sequence),sLineBreak);
+  wresult := nil;
+  ComputeWeigths(@sequence.Data[0],sequence.ActualLength,wfirst,wresult);
+  WriteLn(DumpLines(wresult),sLineBreak+sLineBreak);
+  //Generate updatet tables
+  ConstructUnicodeBook(wresult,'test','second',@unicodeBook1,unicodeBook2);
+    Check(ConvertEndianFromNative(unicodeBook2.Props,unicodeBook2.PropCount),'Endian conversion failed.');
+    WriteLn('    -- test 6 - ok');
+    WriteLn;
+
+  // --- test 7
+  sequence := sequenceClean.Clone();
+  statement.Clear();
+  SetLength(statement.Reset,1);
+  statement.Reset[0] := Ord('a');
+  SetLength(statement.Elements,3);
+  statement.Elements[0] := TReorderUnit.From([Ord('a'),Ord('d')],[Ord('a')],TReorderWeigthKind.Tertiary,0);
+  statement.Elements[1] := TReorderUnit.From([Ord('a'),Ord('d')],TReorderWeigthKind.Primary,0);
+  statement.Elements[2] := TReorderUnit.From(Ord('e'),[Ord('a'),Ord('d')],TReorderWeigthKind.Identity,0);
+  sequence.ApplyStatement(@statement);
+  WriteLn('    Statement #7 = ',sLineBreak,'  ',DumpSequenceAnsi(sequence),sLineBreak);
+  wresult := nil;
+  ComputeWeigths(@sequence.Data[0],sequence.ActualLength,wfirst,wresult);
+  WriteLn(DumpLines(wresult),sLineBreak+sLineBreak);
+  //Generate updatet tables
+  ConstructUnicodeBook(wresult,'test','second',@unicodeBook1,unicodeBook2);
+    Check(ConvertEndianFromNative(unicodeBook2.Props,unicodeBook2.PropCount),'Endian conversion failed.');
+    WriteLn('    -- test 7 - ok');
+    WriteLn;
+
+  // --- test 8
+  sequence := sequenceClean.Clone();
+  statement.Clear();
+  SetLength(statement.Reset,1);
+  statement.Reset[0] := Ord('a');
+  SetLength(statement.Elements,3);
+  statement.Elements[0] := TReorderUnit.From([Ord('a'),Ord('d')],[Ord('a')],TReorderWeigthKind.Tertiary,0);
+  statement.Elements[1] := TReorderUnit.From([Ord('a'),Ord('x')],TReorderWeigthKind.Primary,0);
+  statement.Elements[2] := TReorderUnit.From([Ord('e'),Ord('a'),Ord('r')],[Ord('a'),Ord('d')],TReorderWeigthKind.Primary,0);
+  sequence.ApplyStatement(@statement);
+  WriteLn('    Statement #8 = ',sLineBreak,'  ',DumpSequenceAnsi(sequence),sLineBreak);
+  wresult := nil;
+  ComputeWeigths(@sequence.Data[0],sequence.ActualLength,wfirst,wresult);
+  WriteLn(DumpLines(wresult),sLineBreak+sLineBreak);
+  //Generate updatet tables
+  ConstructUnicodeBook(wresult,'test','second',@unicodeBook1,unicodeBook2);
+    Check(ConvertEndianFromNative(unicodeBook2.Props,unicodeBook2.PropCount),'Endian conversion failed.');
+    WriteLn('    -- test 8 - ok');
+    WriteLn;
+
+end;
+
 end.
 end.

+ 3 - 20
utils/unicode/cldrxml.pas

@@ -73,18 +73,18 @@ uses
 
 
 const
 const
   s_AT     = 'at';
   s_AT     = 'at';
-  s_BEFORE = 'before';
+  //s_BEFORE = 'before';
   s_CODEPOINT = 'codepoint';
   s_CODEPOINT = 'codepoint';
   s_COLLATION = 'collation';
   s_COLLATION = 'collation';
   s_COLLATIONS = 'collations';
   s_COLLATIONS = 'collations';
   s_CONTEXT = 'context';
   s_CONTEXT = 'context';
-  s_DEFAULT    = 'default';
+  //s_DEFAULT    = 'default';
   s_EXTEND = 'extend';
   s_EXTEND = 'extend';
   s_HEX       = 'hex';
   s_HEX       = 'hex';
   s_POSITION = 'position';
   s_POSITION = 'position';
   s_RESET = 'reset';
   s_RESET = 'reset';
   s_RULES = 'rules';
   s_RULES = 'rules';
-  s_STANDART = 'standard';
+  //s_STANDART = 'standard';
   s_TYPE     = 'type';
   s_TYPE     = 'type';
 
 
 procedure CheckNodeName(ANode : TDOMNode; const AExpectedName : DOMString);
 procedure CheckNodeName(ANode : TDOMNode; const AExpectedName : DOMString);
@@ -226,23 +226,6 @@ var
     end;
     end;
   end;
   end;
 
 
-  {procedure AddElement(AText : DOMString; AWeigthKind : TReorderWeigthKind);overload;
-  var
-    u4str : UCS4String;
-    k : Integer;
-    kp : PReorderUnit;
-  begin
-    u4str := WideStringToUCS4String(AText);
-    EnsureElementLength(elementActualCount+1);
-    kp := @statement^.Elements[elementActualCount];
-    k := Length(u4str) - 1{null terminated};
-   SetLength(kp^.Characters,k);
-    for k := 0 to k - 1 do
-      kp^.Characters[k] := u4str[k];
-    kp^.WeigthKind:= AWeigthKind;
-    elementActualCount := elementActualCount + 1;
-  end;}
-
   procedure AddElement(
   procedure AddElement(
     const AChars      : array of UCS4Char;
     const AChars      : array of UCS4Char;
     const AWeigthKind : TReorderWeigthKind;
     const AWeigthKind : TReorderWeigthKind;

+ 673 - 132
utils/unicode/helper.pas

@@ -23,6 +23,8 @@ unit helper;
 {$H+}
 {$H+}
 {$PACKENUM 1}
 {$PACKENUM 1}
 {$pointermath on}
 {$pointermath on}
+{$typedaddress on}
+{$warn 4056 off}  //Conversion between ordinals and pointers is not portable
 
 
 interface
 interface
 
 
@@ -113,6 +115,9 @@ type
     class operator Implicit(a : TUInt24Rec) : Word;{$ifdef USE_INLINE}inline;{$ENDIF}
     class operator Implicit(a : TUInt24Rec) : Word;{$ifdef USE_INLINE}inline;{$ENDIF}
     class operator Implicit(a : TUInt24Rec) : Byte;{$ifdef USE_INLINE}inline;{$ENDIF}
     class operator Implicit(a : TUInt24Rec) : Byte;{$ifdef USE_INLINE}inline;{$ENDIF}
     class operator Implicit(a : Cardinal) : TUInt24Rec;{$ifdef USE_INLINE}inline;{$ENDIF}
     class operator Implicit(a : Cardinal) : TUInt24Rec;{$ifdef USE_INLINE}inline;{$ENDIF}
+
+    class operator Explicit(a : TUInt24Rec) : Cardinal;{$ifdef USE_INLINE}inline;{$ENDIF}
+
     class operator Equal(a, b: TUInt24Rec): Boolean;{$ifdef USE_INLINE}inline;{$ENDIF}
     class operator Equal(a, b: TUInt24Rec): Boolean;{$ifdef USE_INLINE}inline;{$ENDIF}
 
 
     class operator Equal(a : TUInt24Rec; b : Cardinal): Boolean;{$ifdef USE_INLINE}inline;{$ENDIF}
     class operator Equal(a : TUInt24Rec; b : Cardinal): Boolean;{$ifdef USE_INLINE}inline;{$ENDIF}
@@ -276,12 +281,7 @@ type
   PUCA_DataBook = ^TUCA_DataBook;
   PUCA_DataBook = ^TUCA_DataBook;
   TUCA_DataBookIndex = array of Integer;
   TUCA_DataBookIndex = array of Integer;
 
 
-const
-  BIT_POS_VALIDE = 0;
-  BIT_POS_COMPRESS_WEIGHT_1 = BIT_POS_VALIDE + 1;
-  BIT_POS_COMPRESS_WEIGHT_2 = BIT_POS_COMPRESS_WEIGHT_1 + 1;
 type
 type
-  TWeightLength = 0..24;
   TUCA_PropWeights = packed record
   TUCA_PropWeights = packed record
     Weights  : array[0..2] of Word;
     Weights  : array[0..2] of Word;
     //Variable : Byte;
     //Variable : Byte;
@@ -315,27 +315,26 @@ type
 
 
   TUCA_PropItemRec = packed record
   TUCA_PropItemRec = packed record
   private
   private
+    const FLAG_VALID      = 0;
     const FLAG_CODEPOINT  = 1;
     const FLAG_CODEPOINT  = 1;
     const FLAG_CONTEXTUAL = 2;
     const FLAG_CONTEXTUAL = 2;
     const FLAG_DELETION   = 3;
     const FLAG_DELETION   = 3;
+    const FLAG_COMPRESS_WEIGHT_1 = 6;
+    const FLAG_COMPRESS_WEIGHT_2 = 7;
   private
   private
-    function GetWeightLength: TWeightLength;inline;
-    procedure SetWeightLength(AValue: TWeightLength);inline;
     function GetWeightSize : Word;inline;
     function GetWeightSize : Word;inline;
   public
   public
-    //CodePoint    : UInt24;
-    Valid        : Byte;// On First Bit
+    WeightLength : Byte;
     ChildCount   : Byte;
     ChildCount   : Byte;
     Size         : Word;
     Size         : Word;
     Flags        : Byte;
     Flags        : Byte;
   public
   public
+    function HasCodePoint() : Boolean;inline;
     function GetCodePoint() : UInt24;//inline;
     function GetCodePoint() : UInt24;//inline;
     property CodePoint : UInt24 read GetCodePoint;
     property CodePoint : UInt24 read GetCodePoint;
-    //WeightLength is stored in the 5 last bits of "Valid"
-    property WeightLength : TWeightLength read GetWeightLength write SetWeightLength;
     //Weights    : array[0..WeightLength] of TUCA_PropWeights;
     //Weights    : array[0..WeightLength] of TUCA_PropWeights;
     procedure GetWeightArray(ADest : PUCA_PropWeights);
     procedure GetWeightArray(ADest : PUCA_PropWeights);
-    function GetSelfOnlySize() : Word;inline;
+    function GetSelfOnlySize() : Cardinal;inline;
 
 
     procedure SetContextual(AValue : Boolean);inline;
     procedure SetContextual(AValue : Boolean);inline;
     function GetContextual() : Boolean;inline;
     function GetContextual() : Boolean;inline;
@@ -343,6 +342,9 @@ type
     function GetContext() : PUCA_PropItemContextTreeRec;
     function GetContext() : PUCA_PropItemContextTreeRec;
     procedure SetDeleted(AValue : Boolean);inline;
     procedure SetDeleted(AValue : Boolean);inline;
     function IsDeleted() : Boolean;inline;
     function IsDeleted() : Boolean;inline;
+    function IsValid() : Boolean;inline;
+    function IsWeightCompress_1() : Boolean;inline;
+    function IsWeightCompress_2() : Boolean;inline;
   end;
   end;
   PUCA_PropItemRec = ^TUCA_PropItemRec;
   PUCA_PropItemRec = ^TUCA_PropItemRec;
   TUCA_PropIndexItem = packed record
   TUCA_PropIndexItem = packed record
@@ -353,7 +355,8 @@ type
   TUCA_PropBook = packed record
   TUCA_PropBook = packed record
     ItemSize      : Integer;
     ItemSize      : Integer;
     Index         : array of TUCA_PropIndexItem;
     Index         : array of TUCA_PropIndexItem;
-    Items         : PUCA_PropItemRec;
+    Items         : PUCA_PropItemRec; //Native Endian
+    ItemsOtherEndian  : PUCA_PropItemRec;//Non Native Endian
     VariableLowLimit  : Word;
     VariableLowLimit  : Word;
     VariableHighLimit : Word;
     VariableHighLimit : Word;
   end;
   end;
@@ -403,12 +406,14 @@ type
 type
 type
   TEndianKind = (ekLittle, ekBig);
   TEndianKind = (ekLittle, ekBig);
 const
 const
-  THIS_ENDIAN =
+  ENDIAN_SUFFIX : array[TEndianKind] of string[2] = ('le','be');
 {$IFDEF ENDIAN_LITTLE}
 {$IFDEF ENDIAN_LITTLE}
-    ekLittle;
+  ENDIAN_NATIVE     = ekLittle;
+  ENDIAN_NON_NATIVE = ekBig;
 {$ENDIF ENDIAN_LITTLE}
 {$ENDIF ENDIAN_LITTLE}
 {$IFDEF ENDIAN_BIG}
 {$IFDEF ENDIAN_BIG}
-    ekBig;
+  ENDIAN_NATIVE = ekBig;
+  ENDIAN_NON_NATIVE = ekLittle;
 {$ENDIF ENDIAN_BIG}
 {$ENDIF ENDIAN_BIG}
 
 
   procedure GenerateLicenceText(ADest : TStream);
   procedure GenerateLicenceText(ADest : TStream);
@@ -470,22 +475,22 @@ const
   );
   );
   procedure GenerateUCA_BmpTables(
   procedure GenerateUCA_BmpTables(
           AStream,
           AStream,
-          ABinStream    : TStream;
-    var   AFirstTable   : TucaBmpFirstTable;
-    var   ASecondTable  : TucaBmpSecondTable;
-    const AEndian       : TEndianKind
+          ANativeEndianStream,
+          ANonNativeEndianStream : TStream;
+    var   AFirstTable            : TucaBmpFirstTable;
+    var   ASecondTable           : TucaBmpSecondTable
   );
   );
   procedure GenerateUCA_PropTable(
   procedure GenerateUCA_PropTable(
-  // WARNING : files must be generated for each endianess (Little / Big)
           ADest     : TStream;
           ADest     : TStream;
-    const APropBook : PUCA_PropBook
+    const APropBook : PUCA_PropBook;
+    const AEndian   : TEndianKind
   );
   );
   procedure GenerateUCA_OBmpTables(
   procedure GenerateUCA_OBmpTables(
           AStream,
           AStream,
-          ABinStream    : TStream;
-    var   AFirstTable   : TucaOBmpFirstTable;
-    var   ASecondTable  : TucaOBmpSecondTable;
-    const AEndian       : TEndianKind
+          ANativeEndianStream,
+          ANonNativeEndianStream : TStream;
+    var   AFirstTable            : TucaOBmpFirstTable;
+    var   ASecondTable           : TucaOBmpSecondTable
   );
   );
 
 
   procedure Parse_UnicodeData(
   procedure Parse_UnicodeData(
@@ -505,7 +510,6 @@ const
   procedure MakeBmpTables(
   procedure MakeBmpTables(
     var   AFirstTable   : TBmpFirstTable;
     var   AFirstTable   : TBmpFirstTable;
     var   ASecondTable  : TBmpSecondTable;
     var   ASecondTable  : TBmpSecondTable;
-    const APropList     : TPropRecArray;
     const ADataLineList : TDataLineRecArray
     const ADataLineList : TDataLineRecArray
   );
   );
   procedure MakeBmpTables3Levels(
   procedure MakeBmpTables3Levels(
@@ -608,7 +612,26 @@ type
   function IsBitON(const AData : Byte; const ABit : TBitOrder) : Boolean ;{$IFDEF USE_INLINE}inline;{$ENDIF}
   function IsBitON(const AData : Byte; const ABit : TBitOrder) : Boolean ;{$IFDEF USE_INLINE}inline;{$ENDIF}
   procedure SetBit(var AData : Byte; const ABit : TBitOrder; const AValue : Boolean);{$IFDEF USE_INLINE}inline;{$ENDIF}
   procedure SetBit(var AData : Byte; const ABit : TBitOrder; const AValue : Boolean);{$IFDEF USE_INLINE}inline;{$ENDIF}
 
 
-  function GenerateEndianIncludeFileName(const AStoreName : string): string;inline;
+  function GenerateEndianIncludeFileName(
+    const AStoreName : string;
+    const AEndian    : TEndianKind
+  ): string;inline;
+
+  procedure ReverseFromNativeEndian(
+    const AData    : PUCA_PropItemRec;
+    const ADataLen : Cardinal;
+    const ADest    : PUCA_PropItemRec
+  );
+  procedure ReverseToNativeEndian(
+    const AData    : PUCA_PropItemRec;
+    const ADataLen : Cardinal;
+    const ADest    : PUCA_PropItemRec
+  );
+  procedure CompareProps(
+    const AProp1,
+          AProp2   : PUCA_PropItemRec;
+    const ADataLen : Integer
+  );
 
 
 resourcestring
 resourcestring
   SInsufficientMemoryBuffer = 'Insufficient Memory Buffer';
   SInsufficientMemoryBuffer = 'Insufficient Memory Buffer';
@@ -639,6 +662,14 @@ type
 
 
 { TUInt24Rec }
 { TUInt24Rec }
 
 
+class operator TUInt24Rec.Explicit(a : TUInt24Rec) : Cardinal;
+begin
+  TCardinalRec(Result).byte0 := a.byte0;
+  TCardinalRec(Result).byte1 := a.byte1;
+  TCardinalRec(Result).byte2 := a.byte2;
+  TCardinalRec(Result).byte3 := 0;
+end;
+
 class operator TUInt24Rec.Implicit(a : TUInt24Rec) : Cardinal;
 class operator TUInt24Rec.Implicit(a : TUInt24Rec) : Cardinal;
 begin
 begin
   TCardinalRec(Result).byte0 := a.byte0;
   TCardinalRec(Result).byte0 := a.byte0;
@@ -816,18 +847,13 @@ begin
   Result := a <= Cardinal(b);
   Result := a <= Cardinal(b);
 end;
 end;
 
 
-function GenerateEndianIncludeFileName(const AStoreName : string): string;inline;
-const
-  ENDIAN_SUFFIX =
-{$IFDEF ENDIAN_LITTLE}
-    'le';
-{$ENDIF ENDIAN_LITTLE}
-{$IFDEF ENDIAN_BIG}
-    'be';
-{$ENDIF ENDIAN_BIG}
+function GenerateEndianIncludeFileName(
+  const AStoreName : string;
+  const AEndian    : TEndianKind
+): string;inline;
 begin
 begin
   Result := ExtractFilePath(AStoreName) +
   Result := ExtractFilePath(AStoreName) +
-            ChangeFileExt(ExtractFileName(AStoreName),Format('_%s.inc',[ENDIAN_SUFFIX]));
+            ChangeFileExt(ExtractFileName(AStoreName),Format('_%s.inc',[ENDIAN_SUFFIX[AEndian]]));
 end;
 end;
 
 
 function IsBitON(const AData : Byte; const ABit : TBitOrder) : Boolean ;
 function IsBitON(const AData : Byte; const ABit : TBitOrder) : Boolean ;
@@ -1771,7 +1797,6 @@ end;
 procedure MakeBmpTables(
 procedure MakeBmpTables(
   var   AFirstTable   : TBmpFirstTable;
   var   AFirstTable   : TBmpFirstTable;
   var   ASecondTable  : TBmpSecondTable;
   var   ASecondTable  : TBmpSecondTable;
-  const APropList     : TPropRecArray;
   const ADataLineList : TDataLineRecArray
   const ADataLineList : TDataLineRecArray
 );
 );
 var
 var
@@ -2058,8 +2083,7 @@ begin
   AddLine('  UC_PROP_ARRAY : array[0..(UC_PROP_REC_COUNT-1)] of TUC_Prop = (');
   AddLine('  UC_PROP_ARRAY : array[0..(UC_PROP_REC_COUNT-1)] of TUC_Prop = (');
   p := @APropList[0];
   p := @APropList[0];
   for i := Low(APropList) to High(APropList) - 1 do begin
   for i := Low(APropList) to High(APropList) - 1 do begin
-    locLine := //'    (Category : TUnicodeCategory.' + GetEnumName(pti,Ord(p^.Category)) + ';' +
-               '    (CategoryData : ' + IntToStr(p^.CategoryData) + ';' +
+    locLine := '    (CategoryData : ' + IntToStr(p^.CategoryData) + ';' +
                ' CCC : ' + IntToStr(p^.CCC) + ';' +
                ' CCC : ' + IntToStr(p^.CCC) + ';' +
                ' NumericIndex : ' + IntToStr(p^.NumericIndex) + ';' +
                ' NumericIndex : ' + IntToStr(p^.NumericIndex) + ';' +
                ' SimpleUpperCase : ' + UInt24ToStr(p^.SimpleUpperCase,AEndian) + ';' +
                ' SimpleUpperCase : ' + UInt24ToStr(p^.SimpleUpperCase,AEndian) + ';' +
@@ -2625,7 +2649,7 @@ end;
 function ConstructContextTree(
 function ConstructContextTree(
   const AContext : PUCA_LineContextRec;
   const AContext : PUCA_LineContextRec;
   var   ADestBuffer;
   var   ADestBuffer;
-  const ADestBufferLength : Integer
+  const ADestBufferLength : Cardinal
 ) : PUCA_PropItemContextTreeRec;forward;
 ) : PUCA_PropItemContextTreeRec;forward;
 function ConstructItem(
 function ConstructItem(
         AItem         : PUCA_PropItemRec;
         AItem         : PUCA_PropItemRec;
@@ -2636,56 +2660,57 @@ function ConstructItem(
   const AStoreCP      : Boolean;
   const AStoreCP      : Boolean;
   const AContext      : PUCA_LineContextRec;
   const AContext      : PUCA_LineContextRec;
   const ADeleted      : Boolean
   const ADeleted      : Boolean
-) : Integer;
+) : Cardinal;
 var
 var
-  i, c : Integer;
+  i : Integer;
   p : PUCA_PropItemRec;
   p : PUCA_PropItemRec;
   pw : PUCA_PropWeights;
   pw : PUCA_PropWeights;
   pb : PByte;
   pb : PByte;
   hasContext : Boolean;
   hasContext : Boolean;
   contextTree : PUCA_PropItemContextTreeRec;
   contextTree : PUCA_PropItemContextTreeRec;
+  wl : Integer;
 begin
 begin
   p := AItem;
   p := AItem;
-  {if AStoreCP then begin
-    PUInt24(p)^ := ACodePoint;
-    p := PUCA_PropItemRec(PtrUInt(p) + SizeOf(UInt24));
-  end;  }
+  p^.Size := 0;
   p^.Flags := 0;
   p^.Flags := 0;
-  p^.Valid := 0;
-  SetBit(p^.Valid,BIT_POS_VALIDE,(AValid <> 0));
+  p^.WeightLength := 0;
+  SetBit(p^.Flags,AItem^.FLAG_VALID,(AValid <> 0));
   p^.ChildCount := AChildCount;
   p^.ChildCount := AChildCount;
-  c := Length(AWeights);
-  p^.WeightLength := c;
-  if (c = 0) then begin
+  hasContext := (AContext <> nil) and (Length(AContext^.Data) > 0);
+  if hasContext then
+    wl := 0
+  else
+    wl := Length(AWeights);
+  p^.WeightLength := wl;
+  if (wl = 0) then begin
     Result := SizeOf(TUCA_PropItemRec);
     Result := SizeOf(TUCA_PropItemRec);
     if ADeleted then
     if ADeleted then
       SetBit(AItem^.Flags,AItem^.FLAG_DELETION,True);
       SetBit(AItem^.Flags,AItem^.FLAG_DELETION,True);
   end else begin
   end else begin
-    Result := SizeOf(TUCA_PropItemRec) + (c*SizeOf(TUCA_PropWeights));//PtrUInt(pw) - PtrUInt(AItem);
-    //pw := PUCA_PropWeights(PtrUInt(p) + SizeOf(TUCA_PropItemRec));
+    Result := SizeOf(TUCA_PropItemRec) + (wl*SizeOf(TUCA_PropWeights));
     pb := PByte(PtrUInt(p) + SizeOf(TUCA_PropItemRec));
     pb := PByte(PtrUInt(p) + SizeOf(TUCA_PropItemRec));
     PWord(pb)^ := AWeights[0].Weights[0];
     PWord(pb)^ := AWeights[0].Weights[0];
     pb := pb + 2;
     pb := pb + 2;
     if (AWeights[0].Weights[1] > High(Byte)) then begin
     if (AWeights[0].Weights[1] > High(Byte)) then begin
-      SetBit(p^.Valid,(BIT_POS_COMPRESS_WEIGHT_1),True);
       PWord(pb)^ := AWeights[0].Weights[1];
       PWord(pb)^ := AWeights[0].Weights[1];
       pb := pb + 2;
       pb := pb + 2;
     end else begin
     end else begin
+      SetBit(p^.Flags,p^.FLAG_COMPRESS_WEIGHT_1,True);
       pb^ := AWeights[0].Weights[1];
       pb^ := AWeights[0].Weights[1];
       pb := pb + 1;
       pb := pb + 1;
       Result := Result - 1;
       Result := Result - 1;
     end;
     end;
     if (AWeights[0].Weights[2] > High(Byte)) then begin
     if (AWeights[0].Weights[2] > High(Byte)) then begin
-      SetBit(p^.Valid,(BIT_POS_COMPRESS_WEIGHT_2),True);
       PWord(pb)^ := AWeights[0].Weights[2];
       PWord(pb)^ := AWeights[0].Weights[2];
       pb := pb + 2;
       pb := pb + 2;
     end else begin
     end else begin
+      SetBit(p^.Flags,p^.FLAG_COMPRESS_WEIGHT_2,True);
       pb^ := AWeights[0].Weights[2];
       pb^ := AWeights[0].Weights[2];
       pb := pb + 1;
       pb := pb + 1;
       Result := Result - 1;
       Result := Result - 1;
     end;
     end;
     pw := PUCA_PropWeights(pb);
     pw := PUCA_PropWeights(pb);
-    for i := 1 to c - 1 do begin
+    for i := 1 to wl - 1 do begin
       pw^.Weights[0] := AWeights[i].Weights[0];
       pw^.Weights[0] := AWeights[i].Weights[0];
       pw^.Weights[1] := AWeights[i].Weights[1];
       pw^.Weights[1] := AWeights[i].Weights[1];
       pw^.Weights[2] := AWeights[i].Weights[2];
       pw^.Weights[2] := AWeights[i].Weights[2];
@@ -2700,7 +2725,7 @@ begin
     SetBit(AItem^.Flags,AItem^.FLAG_CODEPOINT,True);
     SetBit(AItem^.Flags,AItem^.FLAG_CODEPOINT,True);
   end;
   end;
   if hasContext then begin
   if hasContext then begin
-    contextTree := ConstructContextTree(AContext,Pointer(PtrUInt(AItem)+Result)^,-1);
+    contextTree := ConstructContextTree(AContext,Pointer(PtrUInt(AItem)+Result)^,MaxInt);
     Result := Result + Cardinal(contextTree^.Size);
     Result := Result + Cardinal(contextTree^.Size);
     SetBit(AItem^.Flags,AItem^.FLAG_CONTEXTUAL,True);
     SetBit(AItem^.Flags,AItem^.FLAG_CONTEXTUAL,True);
   end;
   end;
@@ -2708,13 +2733,12 @@ begin
 end;
 end;
 
 
 function CalcCharChildCount(
 function CalcCharChildCount(
-  const AChar           : Cardinal;
   const ASearchStartPos : Integer;
   const ASearchStartPos : Integer;
   const ALinePos        : Integer;
   const ALinePos        : Integer;
   const ABookLines      : PUCA_LineRec;
   const ABookLines      : PUCA_LineRec;
   const AMaxLength      : Integer;
   const AMaxLength      : Integer;
   const ABookIndex      : TUCA_DataBookIndex;
   const ABookIndex      : TUCA_DataBookIndex;
-  out   ALineCount      : Integer
+  out   ALineCount      : Word
 ) : Byte;
 ) : Byte;
 var
 var
   locLinePos : Integer;
   locLinePos : Integer;
@@ -2726,21 +2750,6 @@ var
     p := @ABookLines[ABookIndex[locLinePos]];
     p := @ABookLines[ABookIndex[locLinePos]];
   end;
   end;
 
 
-  procedure DoDump();
-  var
-    px : PUCA_LineRec;
-    k, ki : Integer;
-  begin
-    WriteLn;
-    WriteLn('Dump');
-    for k := ALinePos to ALinePos + 15 do begin
-      px := @ABookLines[ABookIndex[k]];
-      for ki := 0 to Length(px^.CodePoints) -1 do
-        Write(px^.CodePoints[ki],' ');
-      WriteLn;
-    end;
-  end;
-
 var
 var
   i, locTargetLen, locTargetBufferSize, r : Integer;
   i, locTargetLen, locTargetBufferSize, r : Integer;
   locTarget : array[0..127] of Cardinal;
   locTarget : array[0..127] of Cardinal;
@@ -2758,9 +2767,9 @@ begin
     r := 1;
     r := 1;
     locLastChar := p^.CodePoints[ASearchStartPos];
     locLastChar := p^.CodePoints[ASearchStartPos];
   end;
   end;
-  IncP();
   i := 1;
   i := 1;
   while (i < AMaxLength) do begin
   while (i < AMaxLength) do begin
+    IncP();
     if (Length(p^.CodePoints) < locTargetLen) then
     if (Length(p^.CodePoints) < locTargetLen) then
       Break;
       Break;
     if not CompareMem(@locTarget[0],@p^.CodePoints[0],locTargetBufferSize) then
     if not CompareMem(@locTarget[0],@p^.CodePoints[0],locTargetBufferSize) then
@@ -2769,7 +2778,6 @@ begin
       Inc(r);
       Inc(r);
       locLastChar := p^.CodePoints[ASearchStartPos];
       locLastChar := p^.CodePoints[ASearchStartPos];
     end;
     end;
-    IncP();
     Inc(i);
     Inc(i);
   end;
   end;
   ALineCount := i;
   ALineCount := i;
@@ -2823,9 +2831,10 @@ function InternalConstructFromTrie(
   const AItem  : PUCA_PropItemRec;
   const AItem  : PUCA_PropItemRec;
   const ALines : PUCA_LineRec;
   const ALines : PUCA_LineRec;
   const AStoreCp : Boolean
   const AStoreCp : Boolean
-) : Integer;
+) : Cardinal;
 var
 var
-  i, size : Integer;
+  i : Integer;
+  size : Cardinal;
   p : PUCA_PropItemRec;
   p : PUCA_PropItemRec;
   n : PTrieNode;
   n : PTrieNode;
 begin
 begin
@@ -2876,16 +2885,18 @@ var
   i, c, k, kc : Integer;
   i, c, k, kc : Integer;
   p, p1, p2 : PUCA_PropItemRec;
   p, p1, p2 : PUCA_PropItemRec;
   lines, pl1, pl2 : PUCA_LineRec;
   lines, pl1, pl2 : PUCA_LineRec;
-  childCount, lineCount, size : Integer;
+  childCount, lineCount : Word;
+  size : Cardinal;
   trieRoot : PTrieNode;
   trieRoot : PTrieNode;
-  MaxChildCount, MaxSize : Integer;
+  MaxChildCount, MaxSize : Cardinal;
+  childList : array of PUCA_PropItemRec;
 begin
 begin
   locIndex := CreateIndex(ABook);
   locIndex := CreateIndex(ABook);
   i := Length(ABook^.Lines);
   i := Length(ABook^.Lines);
   i := 30 * i * (SizeOf(TUCA_PropItemRec) + SizeOf(TUCA_PropWeights));
   i := 30 * i * (SizeOf(TUCA_PropItemRec) + SizeOf(TUCA_PropWeights));
-  GetMem(AProps,SizeOf(TUCA_DataBook));
+  AProps := AllocMem(SizeOf(TUCA_DataBook));
   AProps^.ItemSize := i;
   AProps^.ItemSize := i;
-  GetMem(AProps^.Items,i);
+  AProps^.Items := AllocMem(i);
   propIndexCount := 0;
   propIndexCount := 0;
   SetLength(AProps^.Index,Length(ABook^.Lines));
   SetLength(AProps^.Index,Length(ABook^.Lines));
   p := AProps^.Items;
   p := AProps^.Items;
@@ -2909,26 +2920,28 @@ begin
           MaxSize := size;
           MaxSize := size;
       end else begin
       end else begin
         kc := Length(pl1^.CodePoints);
         kc := Length(pl1^.CodePoints);
+        SetLength(childList,kc);
         for k := 0 to kc - 2 do begin
         for k := 0 to kc - 2 do begin
-          size := ConstructItem(p,pl1^.CodePoints[k],0,1,[],(k>0),@pl1^.Context,pl1^.Deleted);
+          size := ConstructItem(p,pl1^.CodePoints[k],0,1,[],(k>0),nil,False);
           if (k = 0) then
           if (k = 0) then
             CapturePropIndex(p,pl1^.CodePoints[k]);
             CapturePropIndex(p,pl1^.CodePoints[k]);
+          childList[k] := p;
           p := PUCA_PropItemRec(PtrUInt(p) + size);
           p := PUCA_PropItemRec(PtrUInt(p) + size);
         end;
         end;
         size := ConstructItem(p,pl1^.CodePoints[kc-1],1,0,pl1^.Weights,True,@pl1^.Context,pl1^.Deleted);
         size := ConstructItem(p,pl1^.CodePoints[kc-1],1,0,pl1^.Weights,True,@pl1^.Context,pl1^.Deleted);
+        childList[kc-1] := p;
         p := PUCA_PropItemRec(PtrUInt(p) + size);
         p := PUCA_PropItemRec(PtrUInt(p) + size);
-        p2 := p;
         for k := kc - 2 downto 0 do begin
         for k := kc - 2 downto 0 do begin
-          p1 := PUCA_PropItemRec(PtrUInt(p2) - p2^.Size);
+          p1 := childList[k];
+          p2 := childList[k+1];
           p1^.Size := p1^.Size + p2^.Size;
           p1^.Size := p1^.Size + p2^.Size;
-          p2 := p1;
         end;
         end;
         if (p1^.Size > MaxSize) then
         if (p1^.Size > MaxSize) then
           MaxSize := p1^.Size;
           MaxSize := p1^.Size;
       end;
       end;
       lineCount := 1;
       lineCount := 1;
     end else begin
     end else begin
-      childCount := CalcCharChildCount(pl1^.CodePoints[0],1,i,lines,c,locIndex,lineCount);
+      childCount := CalcCharChildCount(1,i,lines,c,locIndex,lineCount);
       if (childCount < 1) then
       if (childCount < 1) then
         raise Exception.CreateFmt('Expected "child count > 1" but found %d.',[childCount]);
         raise Exception.CreateFmt('Expected "child count > 1" but found %d.',[childCount]);
       if (lineCount < 2) then
       if (lineCount < 2) then
@@ -2955,19 +2968,21 @@ begin
         MaxSize := size;
         MaxSize := size;
     end else begin
     end else begin
       kc := Length(pl1^.CodePoints);
       kc := Length(pl1^.CodePoints);
+      SetLength(childList,kc);
       for k := 0 to kc - 2 do begin
       for k := 0 to kc - 2 do begin
         size := ConstructItem(p,pl1^.CodePoints[k],0,1,[],(k>0),@pl1^.Context,pl1^.Deleted);
         size := ConstructItem(p,pl1^.CodePoints[k],0,1,[],(k>0),@pl1^.Context,pl1^.Deleted);
         if (k = 0) then
         if (k = 0) then
           CapturePropIndex(p,pl1^.CodePoints[0]);
           CapturePropIndex(p,pl1^.CodePoints[0]);
+        childList[k] := p;
         p := PUCA_PropItemRec(PtrUInt(p) + size);
         p := PUCA_PropItemRec(PtrUInt(p) + size);
       end;
       end;
       size := ConstructItem(p,pl1^.CodePoints[kc-1],1,0,pl1^.Weights,True,@pl1^.Context,pl1^.Deleted);
       size := ConstructItem(p,pl1^.CodePoints[kc-1],1,0,pl1^.Weights,True,@pl1^.Context,pl1^.Deleted);
+      childList[kc-1] := p;
       p := PUCA_PropItemRec(PtrUInt(p) + size);
       p := PUCA_PropItemRec(PtrUInt(p) + size);
-      p2 := p;
-      for k := kc - 2 downto 0 do begin
-        p1 := PUCA_PropItemRec(PtrUInt(p2) - p2^.Size);
+      for i := kc - 2 downto 0 do begin
+        p1 := childList[i];
+        p2 := childList[i+1];
         p1^.Size := p1^.Size + p2^.Size;
         p1^.Size := p1^.Size + p2^.Size;
-        p2 := p1;
       end;
       end;
       if (size > MaxSize) then
       if (size > MaxSize) then
         MaxSize := size;
         MaxSize := size;
@@ -2977,6 +2992,8 @@ begin
   ReAllocMem(AProps^.Items,c);
   ReAllocMem(AProps^.Items,c);
   AProps^.ItemSize := c;
   AProps^.ItemSize := c;
   SetLength(AProps^.Index,propIndexCount);
   SetLength(AProps^.Index,propIndexCount);
+  AProps^.ItemsOtherEndian := AllocMem(AProps^.ItemSize);
+  ReverseFromNativeEndian(AProps^.Items,AProps^.ItemSize,AProps^.ItemsOtherEndian);
 
 
   k := 0;
   k := 0;
   c := High(Word);
   c := High(Word);
@@ -3196,10 +3213,10 @@ end;
 
 
 procedure GenerateUCA_BmpTables(
 procedure GenerateUCA_BmpTables(
         AStream,
         AStream,
-        ABinStream    : TStream;
-  var   AFirstTable   : TucaBmpFirstTable;
-  var   ASecondTable  : TucaBmpSecondTable;
-  const AEndian       : TEndianKind
+        ANativeEndianStream,
+        ANonNativeEndianStream : TStream;
+  var   AFirstTable            : TucaBmpFirstTable;
+  var   ASecondTable           : TucaBmpSecondTable
 );
 );
 
 
   procedure AddLine(AOut : TStream; const ALine : ansistring);
   procedure AddLine(AOut : TStream; const ALine : ansistring);
@@ -3231,31 +3248,52 @@ begin
   AddLine(AStream,locLine);
   AddLine(AStream,locLine);
   AddLine(AStream,'  );' + sLineBreak);
   AddLine(AStream,'  );' + sLineBreak);
 
 
-  AddLine(ABinStream,'const');
-  AddLine(ABinStream,'  UCA_TABLE_2 : array[0..(256*' + IntToStr(Length(ASecondTable)) +'-1)] of UInt24 =(');
+  AddLine(ANativeEndianStream,'const');
+  AddLine(ANativeEndianStream,'  UCA_TABLE_2 : array[0..(256*' + IntToStr(Length(ASecondTable)) +'-1)] of UInt24 =(');
   c := High(ASecondTable);
   c := High(ASecondTable);
   for i := Low(ASecondTable) to c do begin
   for i := Low(ASecondTable) to c do begin
     locLine := '';
     locLine := '';
     for j := Low(TucaBmpSecondTableItem) to High(TucaBmpSecondTableItem) do begin
     for j := Low(TucaBmpSecondTableItem) to High(TucaBmpSecondTableItem) do begin
       value := ASecondTable[i][j];
       value := ASecondTable[i][j];
-      locLine := locLine + UInt24ToStr(value,AEndian) + ',';
+      locLine := locLine + UInt24ToStr(value,ENDIAN_NATIVE) + ',';
       if (((j+1) mod 2) = 0) then begin
       if (((j+1) mod 2) = 0) then begin
         if (i = c) and (j = 255) then
         if (i = c) and (j = 255) then
           Delete(locLine,Length(locLine),1);
           Delete(locLine,Length(locLine),1);
         locLine := '    ' + locLine;
         locLine := '    ' + locLine;
-        AddLine(ABinStream,locLine);
+        AddLine(ANativeEndianStream,locLine);
         locLine := '';
         locLine := '';
       end;
       end;
     end;
     end;
   end;
   end;
-  AddLine(ABinStream,'  );' + sLineBreak);
+  AddLine(ANativeEndianStream,'  );' + sLineBreak);
+
+  AddLine(ANonNativeEndianStream,'const');
+  AddLine(ANonNativeEndianStream,'  UCA_TABLE_2 : array[0..(256*' + IntToStr(Length(ASecondTable)) +'-1)] of UInt24 =(');
+  c := High(ASecondTable);
+  for i := Low(ASecondTable) to c do begin
+    locLine := '';
+    for j := Low(TucaBmpSecondTableItem) to High(TucaBmpSecondTableItem) do begin
+      value := ASecondTable[i][j];
+      locLine := locLine + UInt24ToStr(value,ENDIAN_NON_NATIVE) + ',';
+      if (((j+1) mod 2) = 0) then begin
+        if (i = c) and (j = 255) then
+          Delete(locLine,Length(locLine),1);
+        locLine := '    ' + locLine;
+        AddLine(ANonNativeEndianStream,locLine);
+        locLine := '';
+      end;
+    end;
+  end;
+  AddLine(ANonNativeEndianStream,'  );' + sLineBreak);
 end;
 end;
 
 
 procedure GenerateUCA_PropTable(
 procedure GenerateUCA_PropTable(
 // WARNING : files must be generated for each endianess (Little / Big)
 // WARNING : files must be generated for each endianess (Little / Big)
         ADest     : TStream;
         ADest     : TStream;
-  const APropBook : PUCA_PropBook
+  const APropBook : PUCA_PropBook;
+  const AEndian   : TEndianKind
 );
 );
+
   procedure AddLine(const ALine : ansistring);
   procedure AddLine(const ALine : ansistring);
   var
   var
     buffer : ansistring;
     buffer : ansistring;
@@ -3273,7 +3311,10 @@ begin
   AddLine('const');
   AddLine('const');
   AddLine('  UCA_PROPS : array[0..' + IntToStr(c-1) + '] of Byte = (');
   AddLine('  UCA_PROPS : array[0..' + IntToStr(c-1) + '] of Byte = (');
   locLine := '';
   locLine := '';
-  p := PByte(APropBook^.Items);
+  if (AEndian = ENDIAN_NATIVE) then
+    p := PByte(APropBook^.Items)
+  else
+    p := PByte(APropBook^.ItemsOtherEndian);
   for i := 0 to c - 2 do begin
   for i := 0 to c - 2 do begin
     locLine := locLine + IntToStr(p[i]) + ',';
     locLine := locLine + IntToStr(p[i]) + ',';
     if (((i+1) mod 60) = 0) then begin
     if (((i+1) mod 60) = 0) then begin
@@ -3290,10 +3331,10 @@ end;
 
 
 procedure GenerateUCA_OBmpTables(
 procedure GenerateUCA_OBmpTables(
         AStream,
         AStream,
-        ABinStream    : TStream;
-  var   AFirstTable   : TucaOBmpFirstTable;
-  var   ASecondTable  : TucaOBmpSecondTable;
-  const AEndian       : TEndianKind
+        ANativeEndianStream,
+        ANonNativeEndianStream : TStream;
+  var   AFirstTable            : TucaOBmpFirstTable;
+  var   ASecondTable           : TucaOBmpSecondTable
 );
 );
 
 
   procedure AddLine(AOut : TStream; const ALine : ansistring);
   procedure AddLine(AOut : TStream; const ALine : ansistring);
@@ -3325,23 +3366,41 @@ begin
   AddLine(AStream,locLine);
   AddLine(AStream,locLine);
   AddLine(AStream,'  );' + sLineBreak);
   AddLine(AStream,'  );' + sLineBreak);
 
 
-  AddLine(ABinStream,'  UCAO_TABLE_2 : array[0..('+IntToStr(LOW_SURROGATE_COUNT)+'*' + IntToStr(Length(ASecondTable)) +'-1)] of UInt24 =(');
+  AddLine(ANativeEndianStream,'  UCAO_TABLE_2 : array[0..('+IntToStr(LOW_SURROGATE_COUNT)+'*' + IntToStr(Length(ASecondTable)) +'-1)] of UInt24 =(');
+  c := High(ASecondTable);
+  for i := Low(ASecondTable) to c do begin
+    locLine := '';
+    for j := Low(TucaOBmpSecondTableItem) to High(TucaOBmpSecondTableItem) do begin
+      value := ASecondTable[i][j];
+      locLine := locLine + UInt24ToStr(value,ENDIAN_NATIVE) + ',';
+      if (((j+1) mod 2) = 0) then begin
+        if (i = c) and (j = High(TucaOBmpSecondTableItem)) then
+          Delete(locLine,Length(locLine),1);
+        locLine := '    ' + locLine;
+        AddLine(ANativeEndianStream,locLine);
+        locLine := '';
+      end;
+    end;
+  end;
+  AddLine(ANativeEndianStream,'  );' + sLineBreak);
+
+  AddLine(ANonNativeEndianStream,'  UCAO_TABLE_2 : array[0..('+IntToStr(LOW_SURROGATE_COUNT)+'*' + IntToStr(Length(ASecondTable)) +'-1)] of UInt24 =(');
   c := High(ASecondTable);
   c := High(ASecondTable);
   for i := Low(ASecondTable) to c do begin
   for i := Low(ASecondTable) to c do begin
     locLine := '';
     locLine := '';
     for j := Low(TucaOBmpSecondTableItem) to High(TucaOBmpSecondTableItem) do begin
     for j := Low(TucaOBmpSecondTableItem) to High(TucaOBmpSecondTableItem) do begin
       value := ASecondTable[i][j];
       value := ASecondTable[i][j];
-      locLine := locLine + UInt24ToStr(value,AEndian) + ',';
+      locLine := locLine + UInt24ToStr(value,ENDIAN_NON_NATIVE) + ',';
       if (((j+1) mod 2) = 0) then begin
       if (((j+1) mod 2) = 0) then begin
         if (i = c) and (j = High(TucaOBmpSecondTableItem)) then
         if (i = c) and (j = High(TucaOBmpSecondTableItem)) then
           Delete(locLine,Length(locLine),1);
           Delete(locLine,Length(locLine),1);
         locLine := '    ' + locLine;
         locLine := '    ' + locLine;
-        AddLine(ABinStream,locLine);
+        AddLine(ANonNativeEndianStream,locLine);
         locLine := '';
         locLine := '';
       end;
       end;
     end;
     end;
   end;
   end;
-  AddLine(ABinStream,'  );' + sLineBreak);
+  AddLine(ANonNativeEndianStream,'  );' + sLineBreak);
 end;
 end;
 
 
 //-------------------------------------------
 //-------------------------------------------
@@ -3760,17 +3819,6 @@ end;
 
 
 { TUCA_PropItemRec }
 { TUCA_PropItemRec }
 
 
-function TUCA_PropItemRec.GetWeightLength: TWeightLength;
-begin
-  //Result := TWeightLength(Valid and Byte($FC) shr 3);
-  Result := TWeightLength((Valid and Byte($F8)) shr 3);
-end;
-
-procedure TUCA_PropItemRec.SetWeightLength(AValue: TWeightLength);
-begin
-  Valid := Valid or Byte(Byte(AValue) shl 3);
-end;
-
 function TUCA_PropItemRec.GetWeightSize : Word;
 function TUCA_PropItemRec.GetWeightSize : Word;
 var
 var
   c : Integer;
   c : Integer;
@@ -3779,12 +3827,17 @@ begin
   if (c = 0) then
   if (c = 0) then
     exit(0);
     exit(0);
   Result := c*SizeOf(TUCA_PropWeights);
   Result := c*SizeOf(TUCA_PropWeights);
-  if IsBitON(Self.Valid,BIT_POS_COMPRESS_WEIGHT_1) then
+  if IsWeightCompress_1() then
     Result := Result - 1;
     Result := Result - 1;
-  if IsBitON(Self.Valid,BIT_POS_COMPRESS_WEIGHT_2) then
+  if IsWeightCompress_2() then
     Result := Result - 1;
     Result := Result - 1;
 end;
 end;
 
 
+function TUCA_PropItemRec.HasCodePoint(): Boolean;
+begin
+  Result := IsBitON(Flags,FLAG_CODEPOINT);
+end;
+
 procedure TUCA_PropItemRec.GetWeightArray(ADest: PUCA_PropWeights);
 procedure TUCA_PropItemRec.GetWeightArray(ADest: PUCA_PropWeights);
 var
 var
   c : Integer;
   c : Integer;
@@ -3796,14 +3849,14 @@ begin
   pd := ADest;
   pd := ADest;
   pd^.Weights[0] := PWord(p)^;
   pd^.Weights[0] := PWord(p)^;
   p := p + 2;
   p := p + 2;
-  if IsBitON(Self.Valid,BIT_POS_COMPRESS_WEIGHT_1) then begin
+  if not IsWeightCompress_1() then begin
     pd^.Weights[1] := PWord(p)^;
     pd^.Weights[1] := PWord(p)^;
     p := p + 2;
     p := p + 2;
   end else begin
   end else begin
     pd^.Weights[1] := p^;
     pd^.Weights[1] := p^;
     p := p + 1;
     p := p + 1;
   end;
   end;
-  if IsBitON(Self.Valid,BIT_POS_COMPRESS_WEIGHT_2) then begin
+  if not IsWeightCompress_2() then begin
     pd^.Weights[2] := PWord(p)^;
     pd^.Weights[2] := PWord(p)^;
     p := p + 2;
     p := p + 2;
   end else begin
   end else begin
@@ -3814,16 +3867,20 @@ begin
     Move(p^, (pd+1)^, ((c-1)*SizeOf(TUCA_PropWeights)));
     Move(p^, (pd+1)^, ((c-1)*SizeOf(TUCA_PropWeights)));
 end;
 end;
 
 
-function TUCA_PropItemRec.GetSelfOnlySize: Word;
+function TUCA_PropItemRec.GetSelfOnlySize() : Cardinal;
 begin
 begin
   Result := SizeOf(TUCA_PropItemRec);
   Result := SizeOf(TUCA_PropItemRec);
   if (WeightLength > 0) then begin
   if (WeightLength > 0) then begin
     Result := Result + (WeightLength * Sizeof(TUCA_PropWeights));
     Result := Result + (WeightLength * Sizeof(TUCA_PropWeights));
-    if not IsBitON(Self.Valid,BIT_POS_COMPRESS_WEIGHT_1) then
+    if IsWeightCompress_1() then
       Result := Result - 1;
       Result := Result - 1;
-    if not IsBitON(Self.Valid,BIT_POS_COMPRESS_WEIGHT_2) then
+    if IsWeightCompress_2() then
       Result := Result - 1;
       Result := Result - 1;
   end;
   end;
+  if HasCodePoint() then
+    Result := Result + SizeOf(UInt24);
+  if Contextual then
+    Result := Result + Cardinal(GetContext()^.Size);
 end;
 end;
 
 
 procedure TUCA_PropItemRec.SetContextual(AValue : Boolean);
 procedure TUCA_PropItemRec.SetContextual(AValue : Boolean);
@@ -3858,9 +3915,34 @@ begin
   Result := IsBitON(Flags,FLAG_DELETION);
   Result := IsBitON(Flags,FLAG_DELETION);
 end;
 end;
 
 
+function TUCA_PropItemRec.IsValid() : Boolean;
+begin
+  Result := IsBitON(Flags,FLAG_VALID);
+end;
+
+function TUCA_PropItemRec.IsWeightCompress_1 : Boolean;
+begin
+  Result := IsBitON(Flags,FLAG_COMPRESS_WEIGHT_1);
+end;
+
+function TUCA_PropItemRec.IsWeightCompress_2 : Boolean;
+begin
+  Result := IsBitON(Flags,FLAG_COMPRESS_WEIGHT_2);
+end;
+
 function TUCA_PropItemRec.GetCodePoint: UInt24;
 function TUCA_PropItemRec.GetCodePoint: UInt24;
 begin
 begin
-  Result := PUInt24(PtrUInt(@Self) + Self.GetSelfOnlySize())^;
+  if HasCodePoint() then begin
+    if Contextual then
+      Result := PUInt24(
+                  PtrUInt(@Self) + Self.GetSelfOnlySize()- SizeOf(UInt24) -
+                  Cardinal(GetContext()^.Size)
+                )^
+    else
+      Result := PUInt24(PtrUInt(@Self) + Self.GetSelfOnlySize() - SizeOf(UInt24))^
+  end else begin
+    raise Exception.Create('TUCA_PropItemRec.GetCodePoint : "No code point available."');
+  end
 end;
 end;
 
 
 function avl_CompareCodePoints(Item1, Item2: Pointer): Integer;
 function avl_CompareCodePoints(Item1, Item2: Pointer): Integer;
@@ -3912,7 +3994,7 @@ end;
 function ConstructContextTree(
 function ConstructContextTree(
   const AContext : PUCA_LineContextRec;
   const AContext : PUCA_LineContextRec;
   var   ADestBuffer;
   var   ADestBuffer;
-  const ADestBufferLength : Integer
+  const ADestBufferLength : Cardinal
 ) : PUCA_PropItemContextTreeRec;
 ) : PUCA_PropItemContextTreeRec;
 
 
   function CalcItemOnlySize(AItem : TAVLTreeNode) : Cardinal;
   function CalcItemOnlySize(AItem : TAVLTreeNode) : Cardinal;
@@ -4014,6 +4096,465 @@ begin
   Result := r;
   Result := r;
 end;
 end;
 
 
+procedure ReverseBytes(var AData; const ALength : Integer);
+var
+  i,j : PtrInt;
+  c : Byte;
+  p : PByte;
+begin
+  if (ALength = 1) then
+    exit;
+  p := @AData;
+  j := ALength div 2;
+  for i := 0 to Pred(j) do begin
+    c := p[i];
+    p[i] := p[(ALength - 1 ) - i];
+    p[(ALength - 1 ) - i] := c;
+  end;
+end;
+
+procedure ReverseArray(var AValue; const AArrayLength, AItemSize : PtrInt);
+var
+  p : PByte;
+  i : PtrInt;
+begin
+  if ( AArrayLength > 0 ) and ( AItemSize > 1 ) then begin
+    p := @AValue;
+    for i := 0 to Pred(AArrayLength) do begin
+      ReverseBytes(p^,AItemSize);
+      Inc(p,AItemSize);
+    end;
+  end;
+end;
+
+procedure ReverseContextNodeFromNativeEndian(s, d : PUCA_PropItemContextTreeNodeRec);
+var
+  k : PtrUInt;
+  p_s, p_d : PByte;
+begin
+  d^.Left := s^.Left;
+    ReverseBytes(d^.Left,SizeOf(d^.Left));
+  d^.Right := s^.Right;
+    ReverseBytes(d^.Right,SizeOf(d^.Right));
+  d^.Data.CodePointCount := s^.Data.CodePointCount;
+    ReverseBytes(d^.Data.CodePointCount,SizeOf(d^.Data.CodePointCount));
+  d^.Data.WeightCount := s^.Data.WeightCount;
+    ReverseBytes(d^.Data.WeightCount,SizeOf(d^.Data.WeightCount));
+
+  k := SizeOf(TUCA_PropItemContextTreeNodeRec);
+  p_s := PByte(PtrUInt(s) + k);
+  p_d := PByte(PtrUInt(d) + k);
+  k := (s^.Data.CodePointCount*SizeOf(UInt24));
+  Move(p_s^,p_d^, k);
+    ReverseArray(p_d^,s^.Data.CodePointCount,SizeOf(UInt24));
+  p_s := PByte(PtrUInt(p_s) + k);
+  p_d := PByte(PtrUInt(p_d) + k);
+  k := (s^.Data.WeightCount*SizeOf(TUCA_PropWeights));
+  Move(p_s^,p_d^,k);
+    ReverseArray(p_d^,s^.Data.WeightCount,SizeOf(TUCA_PropWeights));
+  if (s^.Left > 0) then
+    ReverseContextNodeFromNativeEndian(
+      PUCA_PropItemContextTreeNodeRec(PtrUInt(s) + s^.Left),
+      PUCA_PropItemContextTreeNodeRec(PtrUInt(d) + s^.Left)
+    );
+  if (s^.Right > 0) then
+    ReverseContextNodeFromNativeEndian(
+      PUCA_PropItemContextTreeNodeRec(PtrUInt(s) + s^.Right),
+      PUCA_PropItemContextTreeNodeRec(PtrUInt(d) + s^.Right)
+    );
+end;
+
+procedure ReverseContextFromNativeEndian(s, d : PUCA_PropItemContextTreeRec);
+var
+  k : PtrUInt;
+begin
+  d^.Size := s^.Size;
+    ReverseBytes(d^.Size,SizeOf(d^.Size));
+  if (s^.Size = 0) then
+    exit;
+  k := SizeOf(s^.Size);
+  ReverseContextNodeFromNativeEndian(
+    PUCA_PropItemContextTreeNodeRec(PtrUInt(s)+k),
+    PUCA_PropItemContextTreeNodeRec(PtrUInt(d)+k)
+  );
+end;
+
+procedure ReverseFromNativeEndian(
+  const AData    : PUCA_PropItemRec;
+  const ADataLen : Cardinal;
+  const ADest    : PUCA_PropItemRec
+);
+var
+  s, d : PUCA_PropItemRec;
+  sCtx, dCtx : PUCA_PropItemContextTreeRec;
+  dataEnd : PtrUInt;
+  k, i : PtrUInt;
+  p_s, p_d : PByte;
+  pw_s, pw_d : PUCA_PropWeights;
+begin
+  dataEnd := PtrUInt(AData) + ADataLen;
+  s := AData;
+  d := ADest;
+  while True do begin
+    d^.WeightLength := s^.WeightLength;
+      ReverseBytes(d^.WeightLength,SizeOf(d^.WeightLength));
+    d^.ChildCount := s^.ChildCount;
+      ReverseBytes(d^.ChildCount,SizeOf(d^.ChildCount));
+    d^.Size := s^.Size;
+      ReverseBytes(d^.Size,SizeOf(d^.Size));
+    d^.Flags := s^.Flags;
+      ReverseBytes(d^.Flags,SizeOf(d^.Flags));
+    if s^.Contextual then begin
+      k := SizeOf(TUCA_PropItemRec);
+      if s^.HasCodePoint() then
+        k := k + SizeOf(UInt24);
+      sCtx := PUCA_PropItemContextTreeRec(PtrUInt(s) + k);
+      dCtx := PUCA_PropItemContextTreeRec(PtrUInt(d) + k);
+      ReverseContextFromNativeEndian(sCtx,dCtx);
+    end;
+    if s^.HasCodePoint() then begin
+      if s^.Contextual then
+        k := s^.GetSelfOnlySize()- SizeOf(UInt24) - Cardinal(s^.GetContext()^.Size)
+      else
+        k := s^.GetSelfOnlySize() - SizeOf(UInt24);
+      p_s := PByte(PtrUInt(s) + k);
+      p_d := PByte(PtrUInt(d) + k);
+      PUInt24(p_d)^ := PUInt24(p_s)^;
+        ReverseBytes(p_d^,SizeOf(UInt24));
+    end;
+    if (s^.WeightLength > 0) then begin
+      k := SizeOf(TUCA_PropItemRec);
+      p_s := PByte(PtrUInt(s) + k);
+      p_d := PByte(PtrUInt(d) + k);
+      k := SizeOf(Word);
+      PWord(p_d)^ := PWord(p_s)^;
+        ReverseBytes(p_d^,k);
+      p_s := PByte(PtrUInt(p_s) + k);
+      p_d := PByte(PtrUInt(p_d) + k);
+      if s^.IsWeightCompress_1() then begin
+        k := SizeOf(Byte);
+        PByte(p_d)^ := PByte(p_s)^;
+      end else begin
+        k := SizeOf(Word);
+        PWord(p_d)^ := PWord(p_s)^;
+      end;
+      ReverseBytes(p_d^,k);
+      p_s := PByte(PtrUInt(p_s) + k);
+      p_d := PByte(PtrUInt(p_d) + k);
+      if s^.IsWeightCompress_2() then begin
+        k := SizeOf(Byte);
+        PByte(p_d)^ := PByte(p_s)^;
+      end else begin
+        k := SizeOf(Word);
+        PWord(p_d)^ := PWord(p_s)^;
+      end;
+      ReverseBytes(p_d^,k);
+      if (s^.WeightLength > 1) then begin
+        pw_s := PUCA_PropWeights(PtrUInt(p_s) + k);
+        pw_d := PUCA_PropWeights(PtrUInt(p_d) + k);
+        for i := 1 to s^.WeightLength - 1 do begin
+          pw_d^.Weights[0] := pw_s^.Weights[0];
+          pw_d^.Weights[1] := pw_s^.Weights[1];
+          pw_d^.Weights[2] := pw_s^.Weights[2];
+          ReverseArray(pw_d^,3,SizeOf(pw_s^.Weights[0]));
+          Inc(pw_s);
+          Inc(pw_d);
+        end;
+      end;
+    end;
+    k := s^.GetSelfOnlySize();
+    s := PUCA_PropItemRec(PtrUInt(s)+k);
+    d := PUCA_PropItemRec(PtrUInt(d)+k);
+    if (PtrUInt(s) >= dataEnd) then
+      Break;
+  end;
+  if ( (PtrUInt(s)-PtrUInt(AData)) <> (PtrUInt(d)-PtrUInt(ADest)) ) then
+    raise Exception.CreateFmt('Read data length(%d) differs from written data length(%d).',[(PtrUInt(s)-PtrUInt(AData)), (PtrUInt(d)-PtrUInt(ADest))]);
+end;
+//------------------------------------------------------------------------------
+
+procedure ReverseContextNodeToNativeEndian(s, d : PUCA_PropItemContextTreeNodeRec);
+var
+  k : PtrUInt;
+  p_s, p_d : PByte;
+begin
+  d^.Left := s^.Left;
+    ReverseBytes(d^.Left,SizeOf(d^.Left));
+  d^.Right := s^.Right;
+    ReverseBytes(d^.Right,SizeOf(d^.Right));
+  d^.Data.CodePointCount := s^.Data.CodePointCount;
+    ReverseBytes(d^.Data.CodePointCount,SizeOf(d^.Data.CodePointCount));
+  d^.Data.WeightCount := s^.Data.WeightCount;
+    ReverseBytes(d^.Data.WeightCount,SizeOf(d^.Data.WeightCount));
+
+  k := SizeOf(TUCA_PropItemContextTreeNodeRec);
+  p_s := PByte(PtrUInt(s) + k);
+  p_d := PByte(PtrUInt(d) + k);
+  k := (d^.Data.CodePointCount*SizeOf(UInt24));
+  Move(p_s^,p_d^, k);
+    ReverseArray(p_d^,d^.Data.CodePointCount,SizeOf(UInt24));
+  p_s := PByte(PtrUInt(p_s) + k);
+  p_d := PByte(PtrUInt(p_d) + k);
+  k := (d^.Data.WeightCount*SizeOf(TUCA_PropWeights));
+  Move(p_s^,p_d^,k);
+    ReverseArray(p_d^,d^.Data.WeightCount,SizeOf(TUCA_PropWeights));
+  if (d^.Left > 0) then
+    ReverseContextNodeToNativeEndian(
+      PUCA_PropItemContextTreeNodeRec(PtrUInt(s) + d^.Left),
+      PUCA_PropItemContextTreeNodeRec(PtrUInt(d) + d^.Left)
+    );
+  if (d^.Right > 0) then
+    ReverseContextNodeToNativeEndian(
+      PUCA_PropItemContextTreeNodeRec(PtrUInt(s) + d^.Right),
+      PUCA_PropItemContextTreeNodeRec(PtrUInt(d) + d^.Right)
+    );
+end;
+
+procedure ReverseContextToNativeEndian(s, d : PUCA_PropItemContextTreeRec);
+var
+  k : PtrUInt;
+begin
+  d^.Size := s^.Size;
+    ReverseBytes(d^.Size,SizeOf(d^.Size));
+  if (s^.Size = 0) then
+    exit;
+  k := SizeOf(s^.Size);
+  ReverseContextNodeToNativeEndian(
+    PUCA_PropItemContextTreeNodeRec(PtrUInt(s)+k),
+    PUCA_PropItemContextTreeNodeRec(PtrUInt(d)+k)
+  );
+end;
+
+procedure ReverseToNativeEndian(
+  const AData    : PUCA_PropItemRec;
+  const ADataLen : Cardinal;
+  const ADest    : PUCA_PropItemRec
+);
+var
+  s, d : PUCA_PropItemRec;
+  sCtx, dCtx : PUCA_PropItemContextTreeRec;
+  dataEnd : PtrUInt;
+  k, i : PtrUInt;
+  p_s, p_d : PByte;
+  pw_s, pw_d : PUCA_PropWeights;
+begin
+  dataEnd := PtrUInt(AData) + ADataLen;
+  s := AData;
+  d := ADest;
+  while True do begin
+    d^.WeightLength := s^.WeightLength;
+      ReverseBytes(d^.WeightLength,SizeOf(d^.WeightLength));
+    d^.ChildCount := s^.ChildCount;
+      ReverseBytes(d^.ChildCount,SizeOf(d^.ChildCount));
+    d^.Size := s^.Size;
+      ReverseBytes(d^.Size,SizeOf(d^.Size));
+    d^.Flags := s^.Flags;
+      ReverseBytes(d^.Flags,SizeOf(d^.Flags));
+    if d^.Contextual then begin
+      k := SizeOf(TUCA_PropItemRec);
+      if d^.HasCodePoint() then
+        k := k + SizeOf(UInt24);
+      sCtx := PUCA_PropItemContextTreeRec(PtrUInt(s) + k);
+      dCtx := PUCA_PropItemContextTreeRec(PtrUInt(d) + k);
+      ReverseContextToNativeEndian(sCtx,dCtx);
+    end;
+    if d^.HasCodePoint() then begin
+      if d^.Contextual then
+        k := d^.GetSelfOnlySize()- SizeOf(UInt24) - Cardinal(d^.GetContext()^.Size)
+      else
+        k := d^.GetSelfOnlySize() - SizeOf(UInt24);
+      p_s := PByte(PtrUInt(s) + k);
+      p_d := PByte(PtrUInt(d) + k);
+      PUInt24(p_d)^ := PUInt24(p_s)^;
+        ReverseBytes(p_d^,SizeOf(UInt24));
+    end;
+    if (d^.WeightLength > 0) then begin
+      k := SizeOf(TUCA_PropItemRec);
+      p_s := PByte(PtrUInt(s) + k);
+      p_d := PByte(PtrUInt(d) + k);
+      k := SizeOf(Word);
+      PWord(p_d)^ := PWord(p_s)^;
+        ReverseBytes(p_d^,k);
+      p_s := PByte(PtrUInt(p_s) + k);
+      p_d := PByte(PtrUInt(p_d) + k);
+      if d^.IsWeightCompress_1() then begin
+        k := SizeOf(Byte);
+        PByte(p_d)^ := PByte(p_s)^;
+      end else begin
+        k := SizeOf(Word);
+        PWord(p_d)^ := PWord(p_s)^;
+      end;
+      ReverseBytes(p_d^,k);
+      p_s := PByte(PtrUInt(p_s) + k);
+      p_d := PByte(PtrUInt(p_d) + k);
+      if d^.IsWeightCompress_2() then begin
+        k := SizeOf(Byte);
+        PByte(p_d)^ := PByte(p_s)^;
+      end else begin
+        k := SizeOf(Word);
+        PWord(p_d)^ := PWord(p_s)^;
+      end;
+      ReverseBytes(p_d^,k);
+      if (d^.WeightLength > 1) then begin
+        pw_s := PUCA_PropWeights(PtrUInt(p_s) + k);
+        pw_d := PUCA_PropWeights(PtrUInt(p_d) + k);
+        for i := 1 to d^.WeightLength - 1 do begin
+          pw_d^.Weights[0] := pw_s^.Weights[0];
+          pw_d^.Weights[1] := pw_s^.Weights[1];
+          pw_d^.Weights[2] := pw_s^.Weights[2];
+          ReverseArray(pw_d^,3,SizeOf(pw_s^.Weights[0]));
+          Inc(pw_s);
+          Inc(pw_d);
+        end;
+      end;
+    end;
+    k := d^.GetSelfOnlySize();
+    s := PUCA_PropItemRec(PtrUInt(s)+k);
+    d := PUCA_PropItemRec(PtrUInt(d)+k);
+    if (PtrUInt(s) >= dataEnd) then
+      Break;
+  end;
+  if ( (PtrUInt(s)-PtrUInt(AData)) <> (PtrUInt(d)-PtrUInt(ADest)) ) then
+    raise Exception.CreateFmt('Read data length(%d) differs from written data length(%d).',[(PtrUInt(s)-PtrUInt(AData)), (PtrUInt(d)-PtrUInt(ADest))]);
+end;
+
+procedure Check(const ACondition : Boolean; const AMsg : string);overload;
+begin
+  if not ACondition then
+    raise Exception.Create(AMsg);
+end;
+
+procedure Check(
+  const ACondition : Boolean;
+  const AFormatMsg : string;
+  const AArgs      : array of const
+);overload;
+begin
+  Check(ACondition,Format(AFormatMsg,AArgs));
+end;
+
+procedure Check(const ACondition : Boolean);overload;
+begin
+  Check(ACondition,'Check failed.')
+end;
+
+procedure CompareWeights(a, b : PUCA_PropWeights; const ALength : Integer);
+var
+  i : Integer;
+begin
+  if (ALength > 0) then begin
+    for i := 0 to ALength - 1 do begin
+      Check(a[i].Weights[0]=b[i].Weights[0]);
+      Check(a[i].Weights[1]=b[i].Weights[1]);
+      Check(a[i].Weights[2]=b[i].Weights[2]);
+    end;
+  end;
+end;
+
+procedure CompareCodePoints(a, b : PUInt24; const ALength : Integer);
+var
+  i : Integer;
+begin
+  if (ALength > 0) then begin
+    for i := 0 to ALength - 1 do
+      Check(a[i]=b[i]);
+  end;
+end;
+
+procedure CompareContextNode(AProp1, AProp2 : PUCA_PropItemContextTreeNodeRec);
+var
+  a, b : PUCA_PropItemContextTreeNodeRec;
+  k : Cardinal;
+begin
+  if (AProp1=nil) then begin
+    Check(AProp2=nil);
+    exit;
+  end;
+  a := AProp1;
+  b := AProp2;
+  Check(a^.Left=b^.Left);
+  Check(a^.Right=b^.Right);
+  Check(a^.Data.CodePointCount=b^.Data.CodePointCount);
+  Check(a^.Data.WeightCount=b^.Data.WeightCount);
+  k := SizeOf(a^.Data);
+  CompareCodePoints(
+    PUInt24(PtrUInt(a)+k),
+    PUInt24(PtrUInt(b)+k),
+    a^.Data.CodePointCount
+  );
+  k := SizeOf(a^.Data)+ (a^.Data.CodePointCount*SizeOf(UInt24));
+  CompareWeights(
+    PUCA_PropWeights(PtrUInt(a)+k),
+    PUCA_PropWeights(PtrUInt(b)+k),
+    a^.Data.WeightCount
+  );
+  if (a^.Left > 0) then begin
+    k := a^.Left;
+    CompareContextNode(
+      PUCA_PropItemContextTreeNodeRec(PtrUInt(a)+k),
+      PUCA_PropItemContextTreeNodeRec(PtrUInt(b)+k)
+    );
+  end;
+  if (a^.Right > 0) then begin
+    k := a^.Right;
+    CompareContextNode(
+      PUCA_PropItemContextTreeNodeRec(PtrUInt(a)+k),
+      PUCA_PropItemContextTreeNodeRec(PtrUInt(b)+k)
+    );
+  end;
+end;
+
+procedure CompareContext(AProp1, AProp2 : PUCA_PropItemContextTreeRec);
+var
+  a, b : PUCA_PropItemContextTreeNodeRec;
+  k : Integer;
+begin
+  if (AProp1=nil) then begin
+    Check(AProp2=nil);
+    exit;
+  end;
+  Check(AProp1^.Size=AProp2^.Size);
+  k := Cardinal(AProp1^.Size);
+  a := PUCA_PropItemContextTreeNodeRec(PtrUInt(AProp1)+k);
+  b := PUCA_PropItemContextTreeNodeRec(PtrUInt(AProp2)+k);
+  CompareContextNode(a,b);
+end;
+
+procedure CompareProps(const AProp1, AProp2 : PUCA_PropItemRec; const ADataLen : Integer);
+var
+  a, b, pend : PUCA_PropItemRec;
+  wa, wb : array of TUCA_PropWeights;
+  k : Integer;
+begin
+  if (ADataLen <= 0) then
+    exit;
+  a := PUCA_PropItemRec(AProp1);
+  b := PUCA_PropItemRec(AProp2);
+  pend := PUCA_PropItemRec(PtrUInt(AProp1)+ADataLen);
+  while (a<pend) do begin
+    Check(a^.WeightLength=b^.WeightLength);
+    Check(a^.ChildCount=b^.ChildCount);
+    Check(a^.Size=b^.Size);
+    Check(a^.Flags=b^.Flags);
+    if a^.HasCodePoint() then
+      Check(a^.CodePoint = b^.CodePoint);
+    if (a^.WeightLength > 0) then begin
+      k := a^.WeightLength;
+      SetLength(wa,k);
+      SetLength(wb,k);
+      a^.GetWeightArray(@wa[0]);
+      b^.GetWeightArray(@wb[0]);
+      CompareWeights(@wa[0],@wb[0],k);
+    end;
+    if a^.Contextual then
+      CompareContext(a^.GetContext(),b^.GetContext());
+    Check(a^.GetSelfOnlySize()=b^.GetSelfOnlySize());
+    k := a^.GetSelfOnlySize();
+    a := PUCA_PropItemRec(PtrUInt(a)+k);
+    b := PUCA_PropItemRec(PtrUInt(b)+k);
+  end;
+end;
+
 initialization
 initialization
   FS := DefaultFormatSettings;
   FS := DefaultFormatSettings;
   FS.DecimalSeparator := '.';
   FS.DecimalSeparator := '.';

+ 0 - 1
utils/unicode/trie.pas

@@ -145,7 +145,6 @@ begin
     k := p^.ChildCount;
     k := p^.ChildCount;
     p^.Children[k] := CreateNode(AWord[i],AValue);
     p^.Children[k] := CreateNode(AWord[i],AValue);
     p^.ChildCount := k + 1;
     p^.ChildCount := k + 1;
-    p := p^.Children[k];
     Result := True;
     Result := True;
   end;
   end;
 end;
 end;

+ 0 - 2
utils/unicode/uca_test.pas

@@ -135,8 +135,6 @@ begin
     ok := False;
     ok := False;
     kc := p^.ChildCount - 1;
     kc := p^.ChildCount - 1;
     p := PUCA_PropItemRec(PtrUInt(p) + p^.GetSelfOnlySize());
     p := PUCA_PropItemRec(PtrUInt(p) + p^.GetSelfOnlySize());
-    if (i > 1) then
-      p := PUCA_PropItemRec(PtrUInt(p) + SizeOf(UInt24));
     for k := 0 to kc do begin
     for k := 0 to kc do begin
       if (AWord[i] = p^.CodePoint) then begin
       if (AWord[i] = p^.CodePoint) then begin
         ok := True;
         ok := True;

+ 37 - 28
utils/unicode/unihelper.lpr

@@ -31,6 +31,7 @@
 program unihelper;
 program unihelper;
 
 
 {$mode objfpc}{$H+}
 {$mode objfpc}{$H+}
+{$typedadress on}
 
 
 uses
 uses
   SysUtils, Classes,
   SysUtils, Classes,
@@ -64,7 +65,7 @@ end;
 
 
 var
 var
   dataPath, outputPath : string;
   dataPath, outputPath : string;
-  stream, binStream, binStream2 : TMemoryStream;
+  stream, binStreamNE, binStreamOE, tmpStream : TMemoryStream;
   hangulSyllables : TCodePointRecArray;
   hangulSyllables : TCodePointRecArray;
   ucaBook : TUCA_DataBook;
   ucaBook : TUCA_DataBook;
   ucaPropBook : PUCA_PropBook;
   ucaPropBook : PUCA_PropBook;
@@ -124,12 +125,14 @@ begin
     Halt(1);
     Halt(1);
   end;
   end;
 
 
-  binStream2 := nil;
-  binStream := nil;
+  binStreamOE := nil;
+  binStreamNE := nil;
+  tmpStream := nil;
   stream := TMemoryStream.Create();
   stream := TMemoryStream.Create();
   try
   try
-    binStream := TMemoryStream.Create();
-    binStream2 := TMemoryStream.Create();
+    binStreamNE := TMemoryStream.Create();
+    binStreamOE := TMemoryStream.Create();
+    tmpStream := TMemoryStream.Create();
     WriteLn('Load file HangulSyllableType.txt ...', DateTimeToStr(Now));
     WriteLn('Load file HangulSyllableType.txt ...', DateTimeToStr(Now));
     stream.LoadFromFile(dataPath + 'HangulSyllableType.txt');
     stream.LoadFromFile(dataPath + 'HangulSyllableType.txt');
     stream.Position := 0;
     stream.Position := 0;
@@ -204,21 +207,25 @@ begin
     uca_CheckProp_2y(ucaBook,ucaPropBook,@ucaoFirstTable,@ucaoSecondTable);
     uca_CheckProp_2y(ucaBook,ucaPropBook,@ucaoFirstTable,@ucaoSecondTable);
 {$ENDIF UCA_TEST}
 {$ENDIF UCA_TEST}
     WriteLn('Generate UCA Props tables ...');
     WriteLn('Generate UCA Props tables ...');
-    binStream.Clear();
-    GenerateLicenceText(binStream);
-    GenerateUCA_PropTable(binStream,ucaPropBook);
+    binStreamNE.Clear();
+    binStreamOE.Clear();
+    GenerateLicenceText(binStreamNE);
+      GenerateLicenceText(binStreamOE);
+    GenerateUCA_PropTable(binStreamNE,ucaPropBook,ENDIAN_NATIVE);
+      GenerateUCA_PropTable(binStreamOE,ucaPropBook,ENDIAN_NON_NATIVE);
     WriteLn('Generate UCA BMP tables ...');
     WriteLn('Generate UCA BMP tables ...');
     stream.Clear();
     stream.Clear();
     GenerateLicenceText(stream);
     GenerateLicenceText(stream);
     GenerateUCA_Head(stream,@ucaBook,ucaPropBook);
     GenerateUCA_Head(stream,@ucaBook,ucaPropBook);
-    GenerateUCA_BmpTables(stream,binStream,ucaFirstTable,ucaSecondTable,THIS_ENDIAN);
+    GenerateUCA_BmpTables(stream,binStreamNE,binStreamOE,ucaFirstTable,ucaSecondTable);
     WriteLn('Generate UCA OBMP tables ...');
     WriteLn('Generate UCA OBMP tables ...');
-    GenerateUCA_OBmpTables(stream,binStream,ucaoFirstTable,ucaoSecondTable,THIS_ENDIAN);
+    GenerateUCA_OBmpTables(stream,binStreamNE,binStreamOE,ucaoFirstTable,ucaoSecondTable);
     stream.SaveToFile(outputPath + 'ucadata.inc');
     stream.SaveToFile(outputPath + 'ucadata.inc');
     s := outputPath + 'ucadata.inc';
     s := outputPath + 'ucadata.inc';
-    s := GenerateEndianIncludeFileName(s);
-    binStream.SaveToFile(s);
-    binStream.Clear();
+    binStreamNE.SaveToFile(GenerateEndianIncludeFileName(s,ENDIAN_NATIVE));
+      binStreamOE.SaveToFile(GenerateEndianIncludeFileName(s,ENDIAN_NON_NATIVE));
+    binStreamNE.Clear();
+    binStreamOE.Clear();
 
 
 
 
     stream.Clear();
     stream.Clear();
@@ -250,17 +257,18 @@ begin
       end;
       end;
     end;
     end;
 
 
-    binStream2.Clear();
+    binStreamNE.Clear();
+    binStreamOE.Clear();
     WriteLn('Source generation ...', DateTimeToStr(Now));
     WriteLn('Source generation ...', DateTimeToStr(Now));
     WriteLn('BMP Tables sources ...', DateTimeToStr(Now));
     WriteLn('BMP Tables sources ...', DateTimeToStr(Now));
       Generate3lvlBmpTables(stream,lvl3table1,lvl3table2,lvl3table3);
       Generate3lvlBmpTables(stream,lvl3table1,lvl3table2,lvl3table3);
     WriteLn('Properties Table sources ...', DateTimeToStr(Now));
     WriteLn('Properties Table sources ...', DateTimeToStr(Now));
-      binStream.Clear();
-      GenerateNumericTable(binStream,numericTable,True);
-      binStream.SaveToFile(outputPath + 'unicodenumtable.pas');
-      binStream.Clear();
-      GeneratePropTable(binStream,props,ekLittle);
-      GeneratePropTable(binStream2,props,ekBig);
+      tmpStream.Clear();
+      GenerateNumericTable(tmpStream,numericTable,True);
+      tmpStream.SaveToFile(outputPath + 'unicodenumtable.pas');
+      tmpStream.Clear();
+      GeneratePropTable(binStreamNE,props,ENDIAN_NATIVE);
+      GeneratePropTable(binStreamOE,props,ENDIAN_NON_NATIVE);
 //-------------------------------------------
 //-------------------------------------------
 
 
    r := Compress(data);
    r := Compress(data);
@@ -288,13 +296,13 @@ begin
 
 
   //---------------------
   //---------------------
     WriteLn('Decomposition  Table sources ...', DateTimeToStr(Now));
     WriteLn('Decomposition  Table sources ...', DateTimeToStr(Now));
-    GenerateDecompositionBookTable(binStream,decompositionBook,ekLittle);
-    GenerateDecompositionBookTable(binStream2,decompositionBook,ekBig);
+    GenerateDecompositionBookTable(binStreamNE,decompositionBook,ENDIAN_NATIVE);
+    GenerateDecompositionBookTable(binStreamOE,decompositionBook,ENDIAN_NON_NATIVE);
     stream.SaveToFile(outputPath + 'unicodedata.inc');
     stream.SaveToFile(outputPath + 'unicodedata.inc');
-    binStream.SaveToFile(outputPath + 'unicodedata_le.inc');
-    binStream2.SaveToFile(outputPath + 'unicodedata_be.inc');
-    binStream.Clear();
-    binStream2.Clear();
+    binStreamNE.SaveToFile(outputPath + 'unicodedata_'+ENDIAN_SUFFIX[ENDIAN_NATIVE]+'.inc');
+    binStreamOE.SaveToFile(outputPath + 'unicodedata_'+ENDIAN_SUFFIX[ENDIAN_NON_NATIVE]+'.inc');
+    binStreamNE.Clear();
+    binStreamOE.Clear();
 
 
 
 
     h := -1;
     h := -1;
@@ -378,8 +386,9 @@ begin
     end;
     end;
     stream.SaveToFile(outputPath + 'diff2.txt');
     stream.SaveToFile(outputPath + 'diff2.txt');
   finally
   finally
-    binStream2.Free();
-    binStream.Free();
+    tmpStream.Free();
+    binStreamOE.Free();
+    binStreamNE.Free();
     stream.Free();
     stream.Free();
   end;
   end;
 end.
 end.