A RetroSearch Logo

Home - News ( United States | United Kingdom | Italy | Germany ) - Football scores

Search Query:

Showing content from https://github.com/struct/isoalloc/commit/049c12e4c2ad5c21a768f7f3873d84bf1106646a below:

minor perf improvements related to issue #51 · struct/isoalloc@049c12e · GitHub

@@ -741,7 +741,7 @@ INTERNAL_HIDDEN bit_slot_t iso_scan_zone_free_slot_slow(iso_alloc_zone *zone) {

741 741 742 742

INTERNAL_HIDDEN iso_alloc_zone *is_zone_usable(iso_alloc_zone *zone, size_t size) {

743 743

/* This zone may fit this chunk but if the zone was

744 -

* created for chunks more than N* larger than the

744 +

* created for chunks more than (N * larger) than the

745 745

* requested allocation size then we would be wasting

746 746

* a lot of memory by using it. We only do this for

747 747

* sizes beyond ZONE_1024 bytes. In other words we can

@@ -837,9 +837,9 @@ INTERNAL_HIDDEN iso_alloc_zone *iso_find_zone_fit(size_t size) {

837 837

* slower iterative approach is used. The longer a

838 838

* program runs the more likely we will fail this

839 839

* fast path as default zones may fill up */

840 -

if(size >= ZONE_512 && size <= ZONE_8192) {

840 +

if(size >= ZONE_512 && size <= MAX_DEFAULT_ZONE_SZ) {

841 841

i = _default_zone_count >> 1;

842 -

} else if(size > ZONE_8192) {

842 +

} else if(size > MAX_DEFAULT_ZONE_SZ) {

843 843

i = _default_zone_count;

844 844

}

845 845

#endif

@@ -1038,6 +1038,7 @@ INTERNAL_HIDDEN INLINE size_t next_pow2(size_t sz) {

1038 1038

return sz + 1;

1039 1039

}

1040 1040 1041 +

/* Populates the thread cache, requires the root is locked and zone is unmasked */

1041 1042

INTERNAL_HIDDEN INLINE void populate_thread_caches(iso_alloc_zone *zone) {

1042 1043

#if THREAD_SUPPORT && THREAD_CACHE

1043 1044

if(thread_bit_slot_cache.chunk == NULL) {

@@ -1051,6 +1052,11 @@ INTERNAL_HIDDEN INLINE void populate_thread_caches(iso_alloc_zone *zone) {

1051 1052

}

1052 1053

}

1053 1054 1055 +

/* Don't cache this zone if it was recently cached */

1056 +

if(thread_zone_cache_count != 0 && thread_zone_cache[thread_zone_cache_count - 1].zone == zone) {

1057 +

return;

1058 +

}

1059 + 1054 1060

if(thread_zone_cache_count < THREAD_ZONE_CACHE_SZ) {

1055 1061

thread_zone_cache[thread_zone_cache_count].zone = zone;

1056 1062

thread_zone_cache[thread_zone_cache_count].chunk_size = zone->chunk_size;

@@ -1065,7 +1071,8 @@ INTERNAL_HIDDEN INLINE void populate_thread_caches(iso_alloc_zone *zone) {

1065 1071 1066 1072

INTERNAL_HIDDEN void *_iso_alloc(iso_alloc_zone *zone, size_t size) {

1067 1073

#if THREAD_SUPPORT && THREAD_CACHE

1068 -

if(LIKELY(zone == NULL) && size <= SMALL_SZ_MAX && thread_bit_slot_cache.chunk_size >= size && thread_bit_slot_cache.chunk != NULL) {

1074 +

if(LIKELY(zone == NULL) && size <= SMALL_SZ_MAX && thread_bit_slot_cache.chunk_size >= size &&

1075 +

thread_bit_slot_cache.chunk != NULL) {

1069 1076

void *p = thread_bit_slot_cache.chunk;

1070 1077

thread_bit_slot_cache.chunk = NULL;

1071 1078

thread_bit_slot_cache.chunk_size = 0;

@@ -1143,10 +1150,10 @@ INTERNAL_HIDDEN void *_iso_alloc(iso_alloc_zone *zone, size_t size) {

1143 1150

zone = iso_find_zone_fit(size);

1144 1151

}

1145 1152 1146 -

if(zone != NULL) {

1147 -

/* We only need to check if the zone is usable

1148 -

* if it's a custom zone. If we chose this zone

1149 -

* then its guaranteed to already be usable */

1153 +

/* We only need to check if the zone is usable

1154 +

* if it's a custom zone. If we chose this zone

1155 +

* then its guaranteed to already be usable */

1156 +

if(LIKELY(zone != NULL)) {

1150 1157

if(zone->internally_managed == false) {

1151 1158

zone = is_zone_usable(zone, size);

1152 1159

RetroSearch is an open source project built by @garambo | Open a GitHub Issue

Search and Browse the WWW like it's 1997 | Search results from DuckDuckGo

HTML: 3.2 | Encoding: UTF-8 | Version: 0.7.4