helpers.odin 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800
  1. package math_big
  2. /*
  3. Copyright 2021 Jeroen van Rijn <[email protected]>.
  4. Made available under Odin's BSD-3 license.
  5. An arbitrary precision mathematics implementation in Odin.
  6. For the theoretical underpinnings, see Knuth's The Art of Computer Programming, Volume 2, section 4.3.
  7. The code started out as an idiomatic source port of libTomMath, which is in the public domain, with thanks.
  8. */
  9. import "core:intrinsics"
  10. import rnd "core:math/rand"
  11. // import "core:fmt"
  12. /*
  13. TODO: Int.flags and Constants like ONE, NAN, etc, are not yet properly handled everywhere.
  14. */
  15. /*
  16. Deallocates the backing memory of one or more `Int`s.
  17. */
  18. int_destroy :: proc(integers: ..^Int) {
  19. integers := integers;
  20. for a in &integers {
  21. assert_if_nil(a);
  22. }
  23. #force_inline internal_int_destroy(..integers);
  24. }
  25. /*
  26. Helpers to set an `Int` to a specific value.
  27. */
  28. int_set_from_integer :: proc(dest: ^Int, src: $T, minimize := false, allocator := context.allocator) -> (err: Error)
  29. where intrinsics.type_is_integer(T) {
  30. context.allocator = allocator;
  31. src := src;
  32. /*
  33. Check that `src` is usable and `dest` isn't immutable.
  34. */
  35. assert_if_nil(dest);
  36. #force_inline internal_error_if_immutable(dest) or_return;
  37. return #force_inline internal_int_set_from_integer(dest, src, minimize);
  38. }
  39. set :: proc { int_set_from_integer, int_copy, int_atoi, };
  40. /*
  41. Copy one `Int` to another.
  42. */
  43. int_copy :: proc(dest, src: ^Int, minimize := false, allocator := context.allocator) -> (err: Error) {
  44. /*
  45. If dest == src, do nothing
  46. */
  47. if (dest == src) { return nil; }
  48. /*
  49. Check that `src` is usable and `dest` isn't immutable.
  50. */
  51. assert_if_nil(dest, src);
  52. context.allocator = allocator;
  53. #force_inline internal_clear_if_uninitialized(src) or_return;
  54. #force_inline internal_error_if_immutable(dest) or_return;
  55. return #force_inline internal_int_copy(dest, src, minimize);
  56. }
  57. copy :: proc { int_copy, };
  58. /*
  59. In normal code, you can also write `a, b = b, a`.
  60. However, that only swaps within the current scope.
  61. This helper swaps completely.
  62. */
  63. int_swap :: proc(a, b: ^Int) {
  64. assert_if_nil(a, b);
  65. #force_inline internal_swap(a, b);
  66. }
  67. swap :: proc { int_swap, };
  68. /*
  69. Set `dest` to |`src`|.
  70. */
  71. int_abs :: proc(dest, src: ^Int, allocator := context.allocator) -> (err: Error) {
  72. /*
  73. Check that `src` is usable and `dest` isn't immutable.
  74. */
  75. assert_if_nil(dest, src);
  76. context.allocator = allocator;
  77. #force_inline internal_clear_if_uninitialized(src) or_return;
  78. #force_inline internal_error_if_immutable(dest) or_return;
  79. return #force_inline internal_int_abs(dest, src);
  80. }
  81. platform_abs :: proc(n: $T) -> T where intrinsics.type_is_integer(T) {
  82. return n if n >= 0 else -n;
  83. }
  84. abs :: proc{ int_abs, platform_abs, };
  85. /*
  86. Set `dest` to `-src`.
  87. */
  88. int_neg :: proc(dest, src: ^Int, allocator := context.allocator) -> (err: Error) {
  89. /*
  90. Check that `src` is usable and `dest` isn't immutable.
  91. */
  92. assert_if_nil(dest, src);
  93. context.allocator = allocator;
  94. #force_inline internal_clear_if_uninitialized(src) or_return;
  95. #force_inline internal_error_if_immutable(dest) or_return;
  96. return #force_inline internal_int_neg(dest, src);
  97. }
  98. neg :: proc { int_neg, };
  99. /*
  100. Helpers to extract values from the `Int`.
  101. */
  102. int_bitfield_extract_single :: proc(a: ^Int, offset: int, allocator := context.allocator) -> (bit: _WORD, err: Error) {
  103. return #force_inline int_bitfield_extract(a, offset, 1, allocator);
  104. }
  105. int_bitfield_extract :: proc(a: ^Int, offset, count: int, allocator := context.allocator) -> (res: _WORD, err: Error) {
  106. /*
  107. Check that `a` is usable.
  108. */
  109. assert_if_nil(a);
  110. context.allocator = allocator;
  111. #force_inline internal_clear_if_uninitialized(a) or_return;
  112. return #force_inline internal_int_bitfield_extract(a, offset, count);
  113. }
  114. /*
  115. Resize backing store.
  116. */
  117. shrink :: proc(a: ^Int, allocator := context.allocator) -> (err: Error) {
  118. /*
  119. Check that `a` is usable.
  120. */
  121. assert_if_nil(a);
  122. context.allocator = allocator;
  123. #force_inline internal_clear_if_uninitialized(a) or_return;
  124. return #force_inline internal_shrink(a);
  125. }
  126. int_grow :: proc(a: ^Int, digits: int, allow_shrink := false, allocator := context.allocator) -> (err: Error) {
  127. /*
  128. Check that `a` is usable.
  129. */
  130. assert_if_nil(a);
  131. return #force_inline internal_int_grow(a, digits, allow_shrink, allocator);
  132. }
  133. grow :: proc { int_grow, };
  134. /*
  135. Clear `Int` and resize it to the default size.
  136. */
  137. int_clear :: proc(a: ^Int, minimize := false, allocator := context.allocator) -> (err: Error) {
  138. /*
  139. Check that `a` is usable.
  140. */
  141. assert_if_nil(a);
  142. return #force_inline internal_int_clear(a, minimize, allocator);
  143. }
  144. clear :: proc { int_clear, };
  145. zero :: clear;
  146. /*
  147. Set the `Int` to 1 and optionally shrink it to the minimum backing size.
  148. */
  149. int_one :: proc(a: ^Int, minimize := false, allocator := context.allocator) -> (err: Error) {
  150. /*
  151. Check that `a` is usable.
  152. */
  153. assert_if_nil(a);
  154. return #force_inline internal_one(a, minimize, allocator);
  155. }
  156. one :: proc { int_one, };
  157. /*
  158. Set the `Int` to -1 and optionally shrink it to the minimum backing size.
  159. */
  160. int_minus_one :: proc(a: ^Int, minimize := false, allocator := context.allocator) -> (err: Error) {
  161. /*
  162. Check that `a` is usable.
  163. */
  164. assert_if_nil(a);
  165. return #force_inline internal_minus_one(a, minimize, allocator);
  166. }
  167. minus_one :: proc { int_minus_one, };
  168. /*
  169. Set the `Int` to Inf and optionally shrink it to the minimum backing size.
  170. */
  171. int_inf :: proc(a: ^Int, minimize := false, allocator := context.allocator) -> (err: Error) {
  172. /*
  173. Check that `a` is usable.
  174. */
  175. assert_if_nil(a);
  176. return #force_inline internal_inf(a, minimize, allocator);
  177. }
  178. inf :: proc { int_inf, };
  179. /*
  180. Set the `Int` to -Inf and optionally shrink it to the minimum backing size.
  181. */
  182. int_minus_inf :: proc(a: ^Int, minimize := false, allocator := context.allocator) -> (err: Error) {
  183. /*
  184. Check that `a` is usable.
  185. */
  186. assert_if_nil(a);
  187. return #force_inline internal_minus_inf(a, minimize, allocator);
  188. }
  189. minus_inf :: proc { int_inf, };
  190. /*
  191. Set the `Int` to NaN and optionally shrink it to the minimum backing size.
  192. */
  193. int_nan :: proc(a: ^Int, minimize := false, allocator := context.allocator) -> (err: Error) {
  194. /*
  195. Check that `a` is usable.
  196. */
  197. assert_if_nil(a);
  198. return #force_inline internal_nan(a, minimize, allocator);
  199. }
  200. nan :: proc { int_nan, };
  201. power_of_two :: proc(a: ^Int, power: int, allocator := context.allocator) -> (err: Error) {
  202. /*
  203. Check that `a` is usable.
  204. */
  205. assert_if_nil(a);
  206. return #force_inline internal_int_power_of_two(a, power, allocator);
  207. }
  208. int_get_u128 :: proc(a: ^Int, allocator := context.allocator) -> (res: u128, err: Error) {
  209. /*
  210. Check that `a` is usable.
  211. */
  212. assert_if_nil(a);
  213. return int_get(a, u128, allocator);
  214. }
  215. get_u128 :: proc { int_get_u128, };
  216. int_get_i128 :: proc(a: ^Int, allocator := context.allocator) -> (res: i128, err: Error) {
  217. /*
  218. Check that `a` is usable.
  219. */
  220. assert_if_nil(a);
  221. return int_get(a, i128, allocator);
  222. }
  223. get_i128 :: proc { int_get_i128, };
  224. int_get_u64 :: proc(a: ^Int, allocator := context.allocator) -> (res: u64, err: Error) {
  225. /*
  226. Check that `a` is usable.
  227. */
  228. assert_if_nil(a);
  229. return int_get(a, u64, allocator);
  230. }
  231. get_u64 :: proc { int_get_u64, };
  232. int_get_i64 :: proc(a: ^Int, allocator := context.allocator) -> (res: i64, err: Error) {
  233. /*
  234. Check that `a` is usable.
  235. */
  236. assert_if_nil(a);
  237. return int_get(a, i64, allocator);
  238. }
  239. get_i64 :: proc { int_get_i64, };
  240. int_get_u32 :: proc(a: ^Int, allocator := context.allocator) -> (res: u32, err: Error) {
  241. /*
  242. Check that `a` is usable.
  243. */
  244. assert_if_nil(a);
  245. return int_get(a, u32, allocator);
  246. }
  247. get_u32 :: proc { int_get_u32, };
  248. int_get_i32 :: proc(a: ^Int, allocator := context.allocator) -> (res: i32, err: Error) {
  249. /*
  250. Check that `a` is usable.
  251. */
  252. assert_if_nil(a);
  253. return int_get(a, i32, allocator);
  254. }
  255. get_i32 :: proc { int_get_i32, };
  256. /*
  257. TODO: Think about using `count_bits` to check if the value could be returned completely,
  258. and maybe return max(T), .Integer_Overflow if not?
  259. */
  260. int_get :: proc(a: ^Int, $T: typeid, allocator := context.allocator) -> (res: T, err: Error) where intrinsics.type_is_integer(T) {
  261. /*
  262. Check that `a` is usable.
  263. */
  264. assert_if_nil(a);
  265. #force_inline internal_clear_if_uninitialized(a, allocator) or_return;
  266. return #force_inline internal_int_get(a, T);
  267. }
  268. get :: proc { int_get, };
  269. int_get_float :: proc(a: ^Int, allocator := context.allocator) -> (res: f64, err: Error) {
  270. /*
  271. Check that `a` is usable.
  272. */
  273. assert_if_nil(a);
  274. #force_inline internal_clear_if_uninitialized(a, allocator) or_return;
  275. return #force_inline internal_int_get_float(a);
  276. }
  277. /*
  278. Count bits in an `Int`.
  279. */
  280. count_bits :: proc(a: ^Int, allocator := context.allocator) -> (count: int, err: Error) {
  281. /*
  282. Check that `a` is usable.
  283. */
  284. assert_if_nil(a);
  285. #force_inline internal_clear_if_uninitialized(a, allocator) or_return;
  286. return #force_inline internal_count_bits(a), nil;
  287. }
  288. /*
  289. Returns the number of trailing zeroes before the first one.
  290. Differs from regular `ctz` in that 0 returns 0.
  291. */
  292. int_count_lsb :: proc(a: ^Int, allocator := context.allocator) -> (count: int, err: Error) {
  293. /*
  294. Check that `a` is usable.
  295. */
  296. assert_if_nil(a);
  297. #force_inline internal_clear_if_uninitialized(a, allocator) or_return;
  298. return #force_inline internal_int_count_lsb(a);
  299. }
  300. platform_count_lsb :: #force_inline proc(a: $T) -> (count: int)
  301. where intrinsics.type_is_integer(T) && intrinsics.type_is_unsigned(T) {
  302. return int(intrinsics.count_trailing_zeros(a)) if a > 0 else 0;
  303. }
  304. count_lsb :: proc { int_count_lsb, platform_count_lsb, };
  305. int_random_digit :: proc(r: ^rnd.Rand = nil) -> (res: DIGIT) {
  306. when _DIGIT_BITS == 60 { // DIGIT = u64
  307. return DIGIT(rnd.uint64(r)) & _MASK;
  308. } else when _DIGIT_BITS == 28 { // DIGIT = u32
  309. return DIGIT(rnd.uint32(r)) & _MASK;
  310. } else {
  311. panic("Unsupported DIGIT size.");
  312. }
  313. return 0; // We shouldn't get here.
  314. }
  315. int_rand :: proc(dest: ^Int, bits: int, r: ^rnd.Rand = nil, allocator := context.allocator) -> (err: Error) {
  316. /*
  317. Check that `a` is usable.
  318. */
  319. assert_if_nil(dest);
  320. return #force_inline internal_int_rand(dest, bits, r, allocator);
  321. }
  322. rand :: proc { int_rand, };
  323. /*
  324. Internal helpers.
  325. */
  326. assert_initialized :: proc(a: ^Int, loc := #caller_location) {
  327. assert_if_nil(a);
  328. assert(is_initialized(a), "`Int` was not properly initialized.", loc);
  329. }
  330. zero_unused :: proc(dest: ^Int, old_used := -1) {
  331. assert_if_nil(dest);
  332. if ! #force_inline is_initialized(dest) { return; }
  333. #force_inline internal_zero_unused(dest, old_used);
  334. }
  335. clear_if_uninitialized_single :: proc(arg: ^Int, allocator := context.allocator) -> (err: Error) {
  336. assert_if_nil(arg);
  337. return #force_inline internal_clear_if_uninitialized_single(arg, allocator);
  338. }
  339. clear_if_uninitialized_multi :: proc(args: ..^Int, allocator := context.allocator) -> (err: Error) {
  340. args := args;
  341. assert_if_nil(..args);
  342. for i in &args {
  343. #force_inline internal_clear_if_uninitialized_single(i, allocator) or_return;
  344. }
  345. return err;
  346. }
  347. clear_if_uninitialized :: proc {clear_if_uninitialized_single, clear_if_uninitialized_multi, };
  348. error_if_immutable_single :: proc(arg: ^Int) -> (err: Error) {
  349. if arg != nil && .Immutable in arg.flags { return .Assignment_To_Immutable; }
  350. return nil;
  351. }
  352. error_if_immutable_multi :: proc(args: ..^Int) -> (err: Error) {
  353. for i in args {
  354. if i != nil && .Immutable in i.flags { return .Assignment_To_Immutable; }
  355. }
  356. return nil;
  357. }
  358. error_if_immutable :: proc {error_if_immutable_single, error_if_immutable_multi, };
  359. /*
  360. Allocates several `Int`s at once.
  361. */
  362. int_init_multi :: proc(integers: ..^Int, allocator := context.allocator) -> (err: Error) {
  363. assert_if_nil(..integers);
  364. integers := integers;
  365. for a in &integers {
  366. #force_inline internal_clear(a, true, allocator) or_return;
  367. }
  368. return nil;
  369. }
  370. init_multi :: proc { int_init_multi, };
  371. copy_digits :: proc(dest, src: ^Int, digits: int, offset := int(0), allocator := context.allocator) -> (err: Error) {
  372. context.allocator = allocator;
  373. /*
  374. Check that `src` is usable and `dest` isn't immutable.
  375. */
  376. assert_if_nil(dest, src);
  377. #force_inline internal_clear_if_uninitialized(src) or_return;
  378. return #force_inline internal_copy_digits(dest, src, digits, offset);
  379. }
  380. /*
  381. Trim unused digits.
  382. This is used to ensure that leading zero digits are trimmed and the leading "used" digit will be non-zero.
  383. Typically very fast. Also fixes the sign if there are no more leading digits.
  384. */
  385. clamp :: proc(a: ^Int, allocator := context.allocator) -> (err: Error) {
  386. assert_if_nil(a);
  387. #force_inline internal_clear_if_uninitialized(a, allocator) or_return;
  388. for a.used > 0 && a.digit[a.used - 1] == 0 {
  389. a.used -= 1;
  390. }
  391. if z, _ := is_zero(a); z {
  392. a.sign = .Zero_or_Positive;
  393. }
  394. return nil;
  395. }
  396. /*
  397. Size binary representation
  398. */
  399. int_to_bytes_size :: proc(a: ^Int, signed := false, allocator := context.allocator) -> (size_in_bytes: int, err: Error) {
  400. assert_if_nil(a);
  401. #force_inline internal_clear_if_uninitialized(a, allocator) or_return;
  402. size_in_bits := internal_count_bits(a);
  403. size_in_bytes = (size_in_bits / 8);
  404. size_in_bytes += 0 if size_in_bits % 8 == 0 else 1;
  405. size_in_bytes += 1 if signed else 0;
  406. return;
  407. }
  408. /*
  409. Return Little Endian binary representation of `a`, either signed or unsigned.
  410. If `a` is negative and we ask for the default unsigned representation, we return abs(a).
  411. */
  412. int_to_bytes_little :: proc(a: ^Int, buf: []u8, signed := false, allocator := context.allocator) -> (err: Error) {
  413. assert_if_nil(a);
  414. size_in_bytes := int_to_bytes_size(a, signed, allocator) or_return;
  415. l := len(buf);
  416. if size_in_bytes > l { return .Buffer_Overflow; }
  417. size_in_bits := internal_count_bits(a);
  418. i := 0;
  419. if signed {
  420. buf[l - 1] = 1 if a.sign == .Negative else 0;
  421. }
  422. for offset := 0; offset < size_in_bits; offset += 8 {
  423. bits, _ := internal_int_bitfield_extract(a, offset, 8);
  424. buf[i] = u8(bits & 255); i += 1;
  425. }
  426. return;
  427. }
  428. /*
  429. Return Big Endian binary representation of `a`, either signed or unsigned.
  430. If `a` is negative and we ask for the default unsigned representation, we return abs(a).
  431. */
  432. int_to_bytes_big :: proc(a: ^Int, buf: []u8, signed := false, allocator := context.allocator) -> (err: Error) {
  433. assert_if_nil(a);
  434. size_in_bytes := int_to_bytes_size(a, signed, allocator) or_return;
  435. l := len(buf);
  436. if size_in_bytes > l { return .Buffer_Overflow; }
  437. size_in_bits := internal_count_bits(a);
  438. i := l - 1;
  439. if signed {
  440. buf[0] = 1 if a.sign == .Negative else 0;
  441. }
  442. for offset := 0; offset < size_in_bits; offset += 8 {
  443. bits, _ := internal_int_bitfield_extract(a, offset, 8);
  444. buf[i] = u8(bits & 255); i -= 1;
  445. }
  446. return;
  447. }
  448. /*
  449. Return Python 3.x compatible Little Endian binary representation of `a`, either signed or unsigned.
  450. If `a` is negative when asking for an unsigned number, we return an error like Python does.
  451. */
  452. int_to_bytes_little_python :: proc(a: ^Int, buf: []u8, signed := false, allocator := context.allocator) -> (err: Error) {
  453. assert_if_nil(a);
  454. if !signed && a.sign == .Negative { return .Invalid_Argument; }
  455. l := len(buf);
  456. size_in_bytes := int_to_bytes_size(a, signed, allocator) or_return;
  457. if size_in_bytes > l { return .Buffer_Overflow; }
  458. if a.sign == .Negative {
  459. t := &Int{};
  460. defer destroy(t);
  461. internal_complement(t, a, allocator) or_return;
  462. size_in_bits := internal_count_bits(t);
  463. i := 0;
  464. for offset := 0; offset < size_in_bits; offset += 8 {
  465. bits, _ := internal_int_bitfield_extract(t, offset, 8);
  466. buf[i] = 255 - u8(bits & 255); i += 1;
  467. }
  468. buf[l-1] = 255;
  469. } else {
  470. size_in_bits := internal_count_bits(a);
  471. i := 0;
  472. for offset := 0; offset < size_in_bits; offset += 8 {
  473. bits, _ := internal_int_bitfield_extract(a, offset, 8);
  474. buf[i] = u8(bits & 255); i += 1;
  475. }
  476. }
  477. return;
  478. }
  479. /*
  480. Return Python 3.x compatible Big Endian binary representation of `a`, either signed or unsigned.
  481. If `a` is negative when asking for an unsigned number, we return an error like Python does.
  482. */
  483. int_to_bytes_big_python :: proc(a: ^Int, buf: []u8, signed := false, allocator := context.allocator) -> (err: Error) {
  484. assert_if_nil(a);
  485. if !signed && a.sign == .Negative { return .Invalid_Argument; }
  486. if a.sign == .Zero_or_Positive { return int_to_bytes_big(a, buf, signed, allocator); }
  487. l := len(buf);
  488. size_in_bytes := int_to_bytes_size(a, signed, allocator) or_return;
  489. if size_in_bytes > l { return .Buffer_Overflow; }
  490. t := &Int{};
  491. defer destroy(t);
  492. internal_complement(t, a, allocator) or_return;
  493. size_in_bits := internal_count_bits(t);
  494. i := l - 1;
  495. for offset := 0; offset < size_in_bits; offset += 8 {
  496. bits, _ := internal_int_bitfield_extract(t, offset, 8);
  497. buf[i] = 255 - u8(bits & 255); i -= 1;
  498. }
  499. buf[0] = 255;
  500. return;
  501. }
  502. /*
  503. Read `Int` from a Big Endian binary representation.
  504. Sign is detected from the first byte if `signed` is true.
  505. */
  506. int_from_bytes_big :: proc(a: ^Int, buf: []u8, signed := false, allocator := context.allocator) -> (err: Error) {
  507. assert_if_nil(a);
  508. buf := buf;
  509. l := len(buf);
  510. if l == 0 { return .Invalid_Argument; }
  511. sign: Sign;
  512. size_in_bits := l * 8;
  513. if signed {
  514. /*
  515. First byte denotes the sign.
  516. */
  517. size_in_bits -= 8;
  518. }
  519. size_in_digits := (size_in_bits + _DIGIT_BITS - 1) / _DIGIT_BITS;
  520. size_in_digits += 0 if size_in_bits % 8 == 0 else 1;
  521. internal_zero(a, false, allocator) or_return;
  522. internal_grow(a, size_in_digits, false, allocator) or_return;
  523. if signed {
  524. sign = .Zero_or_Positive if buf[0] == 0 else .Negative;
  525. buf = buf[1:];
  526. }
  527. for v in buf {
  528. internal_shl(a, a, 8) or_return;
  529. a.digit[0] |= DIGIT(v);
  530. }
  531. a.sign = sign;
  532. a.used = size_in_digits;
  533. return internal_clamp(a);
  534. }
  535. /*
  536. Read `Int` from a Big Endian Python binary representation.
  537. Sign is detected from the first byte if `signed` is true.
  538. */
  539. int_from_bytes_big_python :: proc(a: ^Int, buf: []u8, signed := false, allocator := context.allocator) -> (err: Error) {
  540. assert_if_nil(a);
  541. buf := buf;
  542. l := len(buf);
  543. if l == 0 { return .Invalid_Argument; }
  544. sign: Sign;
  545. size_in_bits := l * 8;
  546. if signed {
  547. /*
  548. First byte denotes the sign.
  549. */
  550. size_in_bits -= 8;
  551. }
  552. size_in_digits := (size_in_bits + _DIGIT_BITS - 1) / _DIGIT_BITS;
  553. size_in_digits += 0 if size_in_bits % 8 == 0 else 1;
  554. internal_zero(a, false, allocator) or_return;
  555. internal_grow(a, size_in_digits, false, allocator) or_return;
  556. if signed {
  557. sign = .Zero_or_Positive if buf[0] == 0 else .Negative;
  558. buf = buf[1:];
  559. }
  560. for v in buf {
  561. internal_shl(a, a, 8) or_return;
  562. if signed && sign == .Negative {
  563. a.digit[0] |= DIGIT(255 - v);
  564. } else {
  565. a.digit[0] |= DIGIT(v);
  566. }
  567. }
  568. a.sign = sign;
  569. a.used = size_in_digits;
  570. internal_clamp(a) or_return;
  571. if signed && sign == .Negative {
  572. return internal_sub(a, a, 1);
  573. }
  574. return nil;
  575. }
  576. /*
  577. Read `Int` from a Little Endian binary representation.
  578. Sign is detected from the last byte if `signed` is true.
  579. */
  580. int_from_bytes_little :: proc(a: ^Int, buf: []u8, signed := false, allocator := context.allocator) -> (err: Error) {
  581. assert_if_nil(a);
  582. buf := buf;
  583. l := len(buf);
  584. if l == 0 { return .Invalid_Argument; }
  585. sign: Sign;
  586. size_in_bits := l * 8;
  587. if signed {
  588. /*
  589. First byte denotes the sign.
  590. */
  591. size_in_bits -= 8;
  592. }
  593. size_in_digits := (size_in_bits + _DIGIT_BITS - 1) / _DIGIT_BITS;
  594. size_in_digits += 0 if size_in_bits % 8 == 0 else 1;
  595. internal_zero(a, false, allocator) or_return;
  596. internal_grow(a, size_in_digits, false, allocator) or_return;
  597. if signed {
  598. sign = .Zero_or_Positive if buf[l-1] == 0 else .Negative;
  599. buf = buf[:l-1];
  600. l -= 1;
  601. }
  602. for _, i in buf {
  603. internal_shl(a, a, 8) or_return;
  604. a.digit[0] |= DIGIT(buf[l-i-1]);
  605. }
  606. a.sign = sign;
  607. a.used = size_in_digits;
  608. return internal_clamp(a);
  609. }
  610. /*
  611. Read `Int` from a Little Endian Python binary representation.
  612. Sign is detected from the first byte if `signed` is true.
  613. */
  614. int_from_bytes_little_python :: proc(a: ^Int, buf: []u8, signed := false, allocator := context.allocator) -> (err: Error) {
  615. assert_if_nil(a);
  616. buf := buf;
  617. l := len(buf);
  618. if l == 0 { return .Invalid_Argument; }
  619. sign: Sign;
  620. size_in_bits := l * 8;
  621. if signed {
  622. /*
  623. First byte denotes the sign.
  624. */
  625. size_in_bits -= 8;
  626. }
  627. size_in_digits := (size_in_bits + _DIGIT_BITS - 1) / _DIGIT_BITS;
  628. size_in_digits += 0 if size_in_bits % 8 == 0 else 1;
  629. internal_zero(a, false, allocator) or_return;
  630. internal_grow(a, size_in_digits, false, allocator) or_return;
  631. if signed {
  632. sign = .Zero_or_Positive if buf[l-1] == 0 else .Negative;
  633. buf = buf[:l-1];
  634. l -= 1;
  635. }
  636. for _, i in buf {
  637. internal_shl(a, a, 8) or_return;
  638. if signed && sign == .Negative {
  639. a.digit[0] |= DIGIT(255 - buf[l-i-1]);
  640. } else {
  641. a.digit[0] |= DIGIT(buf[l-i-1]);
  642. }
  643. }
  644. a.sign = sign;
  645. a.used = size_in_digits;
  646. internal_clamp(a) or_return;
  647. if signed && sign == .Negative {
  648. return internal_sub(a, a, 1);
  649. }
  650. return nil;
  651. }
  652. /*
  653. Initialize constants.
  654. */
  655. INT_ONE, INT_ZERO, INT_MINUS_ONE, INT_INF, INT_MINUS_INF, INT_NAN := &Int{}, &Int{}, &Int{}, &Int{}, &Int{}, &Int{};
  656. initialize_constants :: proc() -> (res: int) {
  657. internal_set( INT_ZERO, 0); INT_ZERO.flags = {.Immutable};
  658. internal_set( INT_ONE, 1); INT_ONE.flags = {.Immutable};
  659. internal_set(INT_MINUS_ONE, -1); INT_MINUS_ONE.flags = {.Immutable};
  660. /*
  661. We set these special values to -1 or 1 so they don't get mistake for zero accidentally.
  662. This allows for shortcut tests of is_zero as .used == 0.
  663. */
  664. internal_set( INT_NAN, 1); INT_NAN.flags = {.Immutable, .NaN};
  665. internal_set( INT_INF, 1); INT_INF.flags = {.Immutable, .Inf};
  666. internal_set( INT_INF, -1); INT_MINUS_INF.flags = {.Immutable, .Inf};
  667. return _DEFAULT_MUL_KARATSUBA_CUTOFF;
  668. }
  669. /*
  670. Destroy constants.
  671. Optional for an EXE, as this would be called at the very end of a process.
  672. */
  673. destroy_constants :: proc() {
  674. internal_destroy(INT_ONE, INT_ZERO, INT_MINUS_ONE, INT_INF, INT_MINUS_INF, INT_NAN);
  675. }
  676. assert_if_nil :: #force_inline proc(integers: ..^Int, loc := #caller_location) {
  677. integers := integers;
  678. for i in &integers {
  679. assert(i != nil, "(nil)", loc);
  680. }
  681. }