Browse Source

Improve the `PtrSet` to be as simple and small as possible

gingerBill 2 years ago
parent
commit
d06a0e7093
5 changed files with 157 additions and 202 deletions
  1. 1 0
      src/check_stmt.cpp
  2. 13 20
      src/checker.cpp
  3. 1 1
      src/ptr_map.cpp
  4. 132 171
      src/ptr_set.cpp
  5. 10 10
      src/threading.cpp

+ 1 - 0
src/check_stmt.cpp

@@ -1289,6 +1289,7 @@ gb_internal void check_type_switch_stmt(CheckerContext *ctx, Ast *node, u32 mod_
 		for (Type *t : variants) {
 		for (Type *t : variants) {
 			if (!type_ptr_set_exists(&seen, t)) {
 			if (!type_ptr_set_exists(&seen, t)) {
 				array_add(&unhandled, t);
 				array_add(&unhandled, t);
+				gb_printf_err("HERE: %p %s\n", t, type_to_string(t));
 			}
 			}
 		}
 		}
 
 

+ 13 - 20
src/checker.cpp

@@ -66,15 +66,10 @@ gb_internal void scope_reserve(Scope *scope, isize capacity) {
 }
 }
 
 
 gb_internal void entity_graph_node_set_destroy(EntityGraphNodeSet *s) {
 gb_internal void entity_graph_node_set_destroy(EntityGraphNodeSet *s) {
-	if (s->hashes.data != nullptr) {
-		ptr_set_destroy(s);
-	}
+	ptr_set_destroy(s);
 }
 }
 
 
 gb_internal void entity_graph_node_set_add(EntityGraphNodeSet *s, EntityGraphNode *n) {
 gb_internal void entity_graph_node_set_add(EntityGraphNodeSet *s, EntityGraphNode *n) {
-	if (s->hashes.data == nullptr) {
-		ptr_set_init(s);
-	}
 	ptr_set_add(s, n);
 	ptr_set_add(s, n);
 }
 }
 
 
@@ -2556,7 +2551,6 @@ gb_internal Array<EntityGraphNode *> generate_entity_dependency_graph(CheckerInf
 							}
 							}
 							// IMPORTANT NOTE/TODO(bill, 2020-11-15): These three calls take the majority of the
 							// IMPORTANT NOTE/TODO(bill, 2020-11-15): These three calls take the majority of the
 							// the time to process
 							// the time to process
-
 							entity_graph_node_set_add(&p->succ, s);
 							entity_graph_node_set_add(&p->succ, s);
 							entity_graph_node_set_add(&s->pred, p);
 							entity_graph_node_set_add(&s->pred, p);
 							// Remove edge to 'n'
 							// Remove edge to 'n'
@@ -2577,7 +2571,7 @@ gb_internal Array<EntityGraphNode *> generate_entity_dependency_graph(CheckerInf
 	for_array(i, G) {
 	for_array(i, G) {
 		EntityGraphNode *n = G[i];
 		EntityGraphNode *n = G[i];
 		n->index = i;
 		n->index = i;
-		n->dep_count = n->succ.entries.count;
+		n->dep_count = n->succ.count;
 		GB_ASSERT(n->dep_count >= 0);
 		GB_ASSERT(n->dep_count >= 0);
 	}
 	}
 
 
@@ -4228,7 +4222,7 @@ gb_internal Array<ImportGraphNode *> generate_import_dependency_graph(Checker *c
 	for (auto const &entry : M) {
 	for (auto const &entry : M) {
 		auto n = entry.value;
 		auto n = entry.value;
 		n->index = i++;
 		n->index = i++;
-		n->dep_count = n->succ.entries.count;
+		n->dep_count = n->succ.count;
 		GB_ASSERT(n->dep_count >= 0);
 		GB_ASSERT(n->dep_count >= 0);
 		array_add(&G, n);
 		array_add(&G, n);
 	}
 	}
@@ -5706,17 +5700,6 @@ gb_internal void check_parsed_files(Checker *c) {
 		check_scope_usage(c, f->scope);
 		check_scope_usage(c, f->scope);
 	}
 	}
 
 
-	TIME_SECTION("add untyped expression values");
-	// Add untyped expression values
-	for (UntypedExprInfo u = {}; mpmc_dequeue(&c->global_untyped_queue, &u); /**/) {
-		GB_ASSERT(u.expr != nullptr && u.info != nullptr);
-		if (is_type_typed(u.info->type)) {
-			compiler_error("%s (type %s) is typed!", expr_to_string(u.expr), type_to_string(u.info->type));
-		}
-		add_type_and_value(&c->builtin_ctx, u.expr, u.info->mode, u.info->type, u.info->value);
-	}
-
-
 	TIME_SECTION("add basic type information");
 	TIME_SECTION("add basic type information");
 	// Add "Basic" type information
 	// Add "Basic" type information
 	for (isize i = 0; i < Basic_COUNT; i++) {
 	for (isize i = 0; i < Basic_COUNT; i++) {
@@ -5810,6 +5793,16 @@ gb_internal void check_parsed_files(Checker *c) {
 	GB_ASSERT(c->info.entity_queue.count.load(std::memory_order_relaxed) == 0);
 	GB_ASSERT(c->info.entity_queue.count.load(std::memory_order_relaxed) == 0);
 	GB_ASSERT(c->info.definition_queue.count.load(std::memory_order_relaxed) == 0);
 	GB_ASSERT(c->info.definition_queue.count.load(std::memory_order_relaxed) == 0);
 
 
+	TIME_SECTION("add untyped expression values");
+	// Add untyped expression values
+	for (UntypedExprInfo u = {}; mpmc_dequeue(&c->global_untyped_queue, &u); /**/) {
+		GB_ASSERT(u.expr != nullptr && u.info != nullptr);
+		if (is_type_typed(u.info->type)) {
+			compiler_error("%s (type %s) is typed!", expr_to_string(u.expr), type_to_string(u.info->type));
+		}
+		add_type_and_value(&c->builtin_ctx, u.expr, u.info->mode, u.info->type, u.info->value);
+	}
+
 	TIME_SECTION("sort init procedures");
 	TIME_SECTION("sort init procedures");
 	check_sort_init_procedures(c);
 	check_sort_init_procedures(c);
 
 

+ 1 - 1
src/ptr_map.cpp

@@ -41,7 +41,7 @@ gb_internal gb_inline u32 ptr_map_hash_key(uintptr key) {
 	u32 word = ((state >> ((state >> 28u) + 4u)) ^ state) * 277803737u;
 	u32 word = ((state >> ((state >> 28u) + 4u)) ^ state) * 277803737u;
 	res = (word >> 22u) ^ word;
 	res = (word >> 22u) ^ word;
 #endif
 #endif
-	return res ^ (res == MAP_SENTINEL);
+	return res;
 }
 }
 gb_internal gb_inline u32 ptr_map_hash_key(void const *key) {
 gb_internal gb_inline u32 ptr_map_hash_key(void const *key) {
 	return ptr_map_hash_key((uintptr)key);
 	return ptr_map_hash_key((uintptr)key);

+ 132 - 171
src/ptr_set.cpp

@@ -1,19 +1,22 @@
 template <typename T>
 template <typename T>
-struct PtrSetEntry {
-	static_assert(sizeof(T) == sizeof(void *), "Key size must be pointer size");
-
-	T        ptr;
-	MapIndex next;
+struct TypeIsPointer {
+	enum {value = false};
+};
 
 
-	operator T() const noexcept {
-		return this->ptr;
-	}
+template <typename T>
+struct TypeIsPointer<T *> {
+	enum {value = true};
 };
 };
 
 
+
 template <typename T>
 template <typename T>
 struct PtrSet {
 struct PtrSet {
-	Slice<MapIndex>       hashes;
-	Array<PtrSetEntry<T>> entries;
+	static_assert(TypeIsPointer<T>::value, "PtrSet::T must be a pointer");
+	static constexpr T TOMBSTONE = (T)(~uintptr(0));
+
+	T *   keys;
+	usize count;
+	usize capacity;
 };
 };
 
 
 template <typename T> gb_internal void ptr_set_init   (PtrSet<T> *s, isize capacity = 16);
 template <typename T> gb_internal void ptr_set_init   (PtrSet<T> *s, isize capacity = 16);
@@ -30,225 +33,183 @@ gb_internal gbAllocator ptr_set_allocator(void) {
 
 
 template <typename T>
 template <typename T>
 gb_internal void ptr_set_init(PtrSet<T> *s, isize capacity) {
 gb_internal void ptr_set_init(PtrSet<T> *s, isize capacity) {
+	GB_ASSERT(s->keys == nullptr);
 	if (capacity != 0) {
 	if (capacity != 0) {
 		capacity = next_pow2_isize(gb_max(16, capacity));
 		capacity = next_pow2_isize(gb_max(16, capacity));
+		s->keys = gb_alloc_array(ptr_set_allocator(), T, capacity);
+		// This memory will be zeroed, no need to explicitly zero it
 	}
 	}
-
-	slice_init(&s->hashes,  ptr_set_allocator(), capacity);
-	array_init(&s->entries, ptr_set_allocator(), 0, capacity);
-	for (isize i = 0; i < capacity; i++) {
-		s->hashes.data[i] = MAP_SENTINEL;
-	}
+	s->count = 0;
+	s->capacity = capacity;
 }
 }
 
 
 template <typename T>
 template <typename T>
 gb_internal void ptr_set_destroy(PtrSet<T> *s) {
 gb_internal void ptr_set_destroy(PtrSet<T> *s) {
-	if (s->entries.allocator.proc == nullptr) {
-		s->entries.allocator = ptr_set_allocator();
-	}
-	slice_free(&s->hashes, s->entries.allocator);
-	array_free(&s->entries);
+	gb_free(ptr_set_allocator(), s->keys);
+	s->keys = nullptr;
+	s->count = 0;
+	s->capacity = 0;
 }
 }
 
 
 template <typename T>
 template <typename T>
-gb_internal MapIndex ptr_set__add_entry(PtrSet<T> *s, T ptr) {
-	PtrSetEntry<T> e = {};
-	e.ptr = ptr;
-	e.next = MAP_SENTINEL;
-	array_add(&s->entries, e);
-	return cast(MapIndex)(s->entries.count-1);
-}
-
-
-template <typename T>
-gb_internal MapFindResult ptr_set__find(PtrSet<T> *s, T ptr) {
-	MapFindResult fr = {MAP_SENTINEL, MAP_SENTINEL, MAP_SENTINEL};
-	if (s->hashes.count != 0) {
-		u32 hash = ptr_map_hash_key(ptr);
-		fr.hash_index = cast(MapIndex)(hash & (s->hashes.count-1));
-		fr.entry_index = s->hashes.data[fr.hash_index];
-		while (fr.entry_index != MAP_SENTINEL) {
-			if (s->entries.data[fr.entry_index].ptr == ptr) {
-				return fr;
+gb_internal isize ptr_set__find(PtrSet<T> *s, T ptr) {
+	GB_ASSERT(ptr != nullptr);
+	if (s->count != 0) {
+	#if 0
+		for (usize i = 0; i < s->capacity; i++) {
+			if (s->keys[i] == ptr) {
+				return i;
 			}
 			}
-			fr.entry_prev = fr.entry_index;
-			fr.entry_index = s->entries.data[fr.entry_index].next;
 		}
 		}
-	}
-	return fr;
-}
-
-template <typename T>
-gb_internal MapFindResult ptr_set__find_from_entry(PtrSet<T> *s, PtrSetEntry<T> *e) {
-	MapFindResult fr = {MAP_SENTINEL, MAP_SENTINEL, MAP_SENTINEL};
-	if (s->hashes.count != 0) {
-		u32 hash = ptr_map_hash_key(e->ptr);
-		fr.hash_index = cast(MapIndex)(hash & (s->hashes.count-1));
-		fr.entry_index = s->hashes.data[fr.hash_index];
-		while (fr.entry_index != MAP_SENTINEL) {
-			if (&s->entries.data[fr.entry_index] == e) {
-				return fr;
+	#else
+		u32 hash = ptr_map_hash_key(ptr);
+		usize mask = s->capacity-1;
+		usize hash_index = cast(usize)hash & mask;
+		for (usize i = 0; i < s->capacity; i++) {
+			T key = s->keys[hash_index];
+			if (key == ptr) {
+				return hash_index;
+			} else if (key == nullptr) {
+				return -1;
 			}
 			}
-			fr.entry_prev = fr.entry_index;
-			fr.entry_index = s->entries.data[fr.entry_index].next;
+			hash_index = (hash_index+1)&mask;
 		}
 		}
+	#endif
 	}
 	}
-	return fr;
+	return -1;
 }
 }
 
 
 template <typename T>
 template <typename T>
 gb_internal bool ptr_set__full(PtrSet<T> *s) {
 gb_internal bool ptr_set__full(PtrSet<T> *s) {
-	return 0.75f * s->hashes.count <= s->entries.count;
+	return 0.75f * s->capacity <= s->count;
 }
 }
 
 
 template <typename T>
 template <typename T>
-gb_internal void ptr_set_reset_entries(PtrSet<T> *s) {
-	for (isize i = 0; i < s->hashes.count; i++) {
-		s->hashes.data[i] = MAP_SENTINEL;
-	}
-	for (isize i = 0; i < s->entries.count; i++) {
-		MapFindResult fr;
-		PtrSetEntry<T> *e = &s->entries.data[i];
-		e->next = MAP_SENTINEL;
-		fr = ptr_set__find_from_entry(s, e);
-		if (fr.entry_prev == MAP_SENTINEL) {
-			s->hashes[fr.hash_index] = cast(MapIndex)i;
-		} else {
-			s->entries[fr.entry_prev].next = cast(MapIndex)i;
-		}
+gb_internal gb_inline void ptr_set_grow(PtrSet<T> *old_set) {
+	if (old_set->capacity == 0) {
+		ptr_set_init(old_set);
+		return;
 	}
 	}
-}
 
 
-template <typename T>
-gb_internal void ptr_set_reserve(PtrSet<T> *s, isize cap) {
-	if (s->entries.allocator.proc == nullptr) {
-		s->entries.allocator = ptr_set_allocator();
-	}
-	array_reserve(&s->entries, cap);
-	if (s->entries.count*2 < s->hashes.count) {
-		return;
+	PtrSet<T> new_set = {};
+	ptr_set_init(&new_set, gb_max(old_set->capacity<<1, 16));
+
+	for (T ptr : *old_set) {
+		bool was_new = ptr_set_update(&new_set, ptr);
+		GB_ASSERT(!was_new);
 	}
 	}
-	slice_resize(&s->hashes, s->entries.allocator, cap*2);
-	ptr_set_reset_entries(s);
-}
+	GB_ASSERT(old_set->count == new_set.count);
 
 
-template <typename T>
-gb_internal gb_inline void ptr_set_grow(PtrSet<T> *s) {
-	isize new_count = gb_max(s->hashes.count<<1, 16);
-	ptr_set_reserve(s, new_count);
+	ptr_set_destroy(old_set);
+
+	*old_set = new_set;
 }
 }
 
 
 
 
 template <typename T>
 template <typename T>
 gb_internal gb_inline bool ptr_set_exists(PtrSet<T> *s, T ptr) {
 gb_internal gb_inline bool ptr_set_exists(PtrSet<T> *s, T ptr) {
-	isize index = ptr_set__find(s, ptr).entry_index;
-	return index != MAP_SENTINEL;
+	return ptr_set__find(s, ptr) >= 0;
 }
 }
 
 
-// Returns true if it already exists
-template <typename T>
-gb_internal T ptr_set_add(PtrSet<T> *s, T ptr) {
-	MapIndex index;
-	MapFindResult fr;
-	if (s->hashes.count == 0) {
-		ptr_set_grow(s);
-	}
-	fr = ptr_set__find(s, ptr);
-	if (fr.entry_index == MAP_SENTINEL) {
-		index = ptr_set__add_entry(s, ptr);
-		if (fr.entry_prev != MAP_SENTINEL) {
-			s->entries.data[fr.entry_prev].next = index;
-		} else {
-			s->hashes.data[fr.hash_index] = index;
-		}
-	}
-	if (ptr_set__full(s)) {
-		ptr_set_grow(s);
-	}
-	return ptr;
-}
 
 
 template <typename T>
 template <typename T>
 gb_internal bool ptr_set_update(PtrSet<T> *s, T ptr) { // returns true if it previously existsed
 gb_internal bool ptr_set_update(PtrSet<T> *s, T ptr) { // returns true if it previously existsed
-	bool exists = false;
-	MapIndex index;
-	MapFindResult fr;
-	if (s->hashes.count == 0) {
+	if (ptr_set_exists(s, ptr)) {
+		return true;
+	}
+
+	if (s->keys == nullptr) {
+		ptr_set_init(s);
+	} else if (ptr_set__full(s)) {
 		ptr_set_grow(s);
 		ptr_set_grow(s);
 	}
 	}
-	fr = ptr_set__find(s, ptr);
-	if (fr.entry_index != MAP_SENTINEL) {
-		exists = true;
-	} else {
-		index = ptr_set__add_entry(s, ptr);
-		if (fr.entry_prev != MAP_SENTINEL) {
-			s->entries.data[fr.entry_prev].next = index;
-		} else {
-			s->hashes.data[fr.hash_index] = index;
+	GB_ASSERT(s->count < s->capacity);
+	GB_ASSERT(s->capacity >= 0);
+
+	usize mask = s->capacity-1;
+	u32 hash = ptr_map_hash_key(ptr);
+	usize hash_index = (cast(usize)hash) & mask;
+	GB_ASSERT(hash_index < s->capacity);
+	for (usize i = 0; i < s->capacity; i++) {
+		T *key = &s->keys[hash_index];
+		GB_ASSERT(*key != ptr);
+		if (*key == PtrSet<T>::TOMBSTONE || *key == nullptr) {
+			*key = ptr;
+			s->count++;
+			return false;
 		}
 		}
+		hash_index = (hash_index+1)&mask;
 	}
 	}
-	if (ptr_set__full(s)) {
-		ptr_set_grow(s);
-	}
-	return exists;
-}
-
 
 
+	GB_PANIC("ptr set out of memory");
+	return false;
+}
 
 
 template <typename T>
 template <typename T>
-gb_internal void ptr_set__erase(PtrSet<T> *s, MapFindResult fr) {
-	MapFindResult last;
-	if (fr.entry_prev == MAP_SENTINEL) {
-		s->hashes.data[fr.hash_index] = s->entries.data[fr.entry_index].next;
-	} else {
-		s->entries.data[fr.entry_prev].next = s->entries.data[fr.entry_index].next;
-	}
-	if (cast(isize)fr.entry_index == s->entries.count-1) {
-		array_pop(&s->entries);
-		return;
-	}
-	s->entries.data[fr.entry_index] = s->entries.data[s->entries.count-1];
-	last = ptr_set__find(s, s->entries.data[fr.entry_index].ptr);
-	if (last.entry_prev != MAP_SENTINEL) {
-		s->entries.data[last.entry_prev].next = fr.entry_index;
-	} else {
-		s->hashes.data[last.hash_index] = fr.entry_index;
-	}
+gb_internal T ptr_set_add(PtrSet<T> *s, T ptr) {
+	ptr_set_update(s, ptr);
+	return ptr;
 }
 }
 
 
+
 template <typename T>
 template <typename T>
 gb_internal void ptr_set_remove(PtrSet<T> *s, T ptr) {
 gb_internal void ptr_set_remove(PtrSet<T> *s, T ptr) {
-	MapFindResult fr = ptr_set__find(s, ptr);
-	if (fr.entry_index != MAP_SENTINEL) {
-		ptr_set__erase(s, fr);
+	isize index = ptr_set__find(s, ptr);
+	if (index >= 0) {
+		GB_ASSERT(s->count > 0);
+		s->keys[index] = PtrSet<T>::TOMBSTONE;
+		s->count--;
 	}
 	}
 }
 }
 
 
 template <typename T>
 template <typename T>
 gb_internal gb_inline void ptr_set_clear(PtrSet<T> *s) {
 gb_internal gb_inline void ptr_set_clear(PtrSet<T> *s) {
-	array_clear(&s->entries);
-	for (isize i = 0; i < s->hashes.count; i++) {
-		s->hashes.data[i] = MAP_SENTINEL;
-	}
+	s->count = 0;
+	gb_zero_size(s->keys, s->capacity*gb_size_of(T));
 }
 }
 
 
-
-template <typename T>
-gb_internal PtrSetEntry<T> *begin(PtrSet<T> &m) noexcept {
-	return m.entries.data;
-}
 template <typename T>
 template <typename T>
-gb_internal PtrSetEntry<T> const *begin(PtrSet<T> const &m) noexcept {
-	return m.entries.data;
-}
+struct PtrSetIterator {
+	PtrSet<T> *set;
+	usize index;
+
+	PtrSetIterator<T> &operator++() noexcept {
+		for (;;) {
+			++index;
+			if (set->capacity == index) {
+				return *this;
+			}
+			T key = set->keys[index];
+			if (key != nullptr && key != PtrSet<T>::TOMBSTONE) {
+				return *this;
+			}
+		}
+	}
+
+	bool operator==(PtrSetIterator<T> const &other) const noexcept {
+		return this->set == other.set && this->index == other.index;
+	}
+
+
+	operator T *() const {
+		return &set->keys[index];
+	}
+};
 
 
 
 
 template <typename T>
 template <typename T>
-gb_internal PtrSetEntry<T> *end(PtrSet<T> &m) noexcept {
-	return m.entries.data + m.entries.count;
+gb_internal PtrSetIterator<T> begin(PtrSet<T> &set) noexcept {
+	usize index = 0;
+	while (index < set.capacity) {
+		T key = set.keys[index];
+		if (key != nullptr && key != PtrSet<T>::TOMBSTONE) {
+			break;
+		}
+		index++;
+	}
+	return PtrSetIterator<T>{&set, index};
 }
 }
-
 template <typename T>
 template <typename T>
-gb_internal PtrSetEntry<T> const *end(PtrSet<T> const &m) noexcept {
-	return m.entries.data + m.entries.count;
+gb_internal PtrSetIterator<T> end(PtrSet<T> &set) noexcept {
+	return PtrSetIterator<T>{&set, set.capacity};
 }
 }

+ 10 - 10
src/threading.cpp

@@ -699,13 +699,13 @@ extern "C" int __ulock_wake(uint32_t operation, void *addr, uint64_t wake_value)
 gb_internal void futex_signal(Futex *f) {
 gb_internal void futex_signal(Futex *f) {
 	for (;;) {
 	for (;;) {
 		int ret = __ulock_wake(UL_COMPARE_AND_WAIT | ULF_NO_ERRNO, f, 0);
 		int ret = __ulock_wake(UL_COMPARE_AND_WAIT | ULF_NO_ERRNO, f, 0);
-		if (ret >= 0) {
+		if (ret == 0) {
 			return;
 			return;
 		}
 		}
-		if (ret == EINTR || ret == EFAULT) {
+		if (ret == -EINTR || ret == -EFAULT) {
 			continue;
 			continue;
 		}
 		}
-		if (ret == ENOENT) {
+		if (ret == -ENOENT) {
 			return;
 			return;
 		}
 		}
 		GB_PANIC("Failed in futex wake!\n");
 		GB_PANIC("Failed in futex wake!\n");
@@ -716,13 +716,13 @@ gb_internal void futex_broadcast(Futex *f) {
 	for (;;) {
 	for (;;) {
 		enum { ULF_WAKE_ALL = 0x00000100 };
 		enum { ULF_WAKE_ALL = 0x00000100 };
 		int ret = __ulock_wake(UL_COMPARE_AND_WAIT | ULF_NO_ERRNO | ULF_WAKE_ALL, f, 0);
 		int ret = __ulock_wake(UL_COMPARE_AND_WAIT | ULF_NO_ERRNO | ULF_WAKE_ALL, f, 0);
-		if (ret >= 0) {
+		if (ret == 0) {
 			return;
 			return;
 		}
 		}
-		if (ret == EINTR || ret == EFAULT) {
+		if (ret == -EINTR || ret == -EFAULT) {
 			continue;
 			continue;
 		}
 		}
-		if (ret == ENOENT) {
+		if (ret == -ENOENT) {
 			return;
 			return;
 		}
 		}
 		GB_PANIC("Failed in futex wake!\n");
 		GB_PANIC("Failed in futex wake!\n");
@@ -732,16 +732,16 @@ gb_internal void futex_broadcast(Futex *f) {
 gb_internal void futex_wait(Futex *f, Footex val) {
 gb_internal void futex_wait(Futex *f, Footex val) {
 	for (;;) {
 	for (;;) {
 		int ret = __ulock_wait(UL_COMPARE_AND_WAIT | ULF_NO_ERRNO, f, val, 0);
 		int ret = __ulock_wait(UL_COMPARE_AND_WAIT | ULF_NO_ERRNO, f, val, 0);
-		if (ret >= 0) {
+		if (ret == 0) {
 			if (*f != val) {
 			if (*f != val) {
 				return;
 				return;
 			}
 			}
 			continue;
 			continue;
 		}
 		}
-		if (ret == EINTR || ret == EFAULT) {
-			continue;
+		if (ret == -EINTR || ret == -EFAULT) {
+			-continue;
 		}
 		}
-		if (ret == ENOENT) {
+		if (ret == -ENOENT) {
 			return;
 			return;
 		}
 		}