elf.c: section limit inference for linux, fixes for stb_ds.h hashing, more compilation flags

This commit is contained in:
2024-07-28 01:44:39 +03:00
parent 36dcf14db7
commit 5ddf0eb879
9 changed files with 137 additions and 33 deletions

View File

@@ -295,8 +295,9 @@ void textures_dump_atlases(struct texture_cache *cache) {
static t_texture_key textures_load(struct texture_cache *cache, const char *path) {
/* no need to do anything if it was loaded already */
if (shgeti(cache->hash, path) >= 0)
return (t_texture_key){0};
const ptrdiff_t i = shgeti(cache->hash, path);
if (i >= 0)
return (t_texture_key){ (uint16_t)i };
SDL_Surface *surface = image_to_surface(path);
struct texture new_texture = {0};
@@ -308,12 +309,12 @@ static t_texture_key textures_load(struct texture_cache *cache, const char *path
upload_texture_from_surface(new_texture.loner_texture, surface);
new_texture.srcrect = (t_rect) { .w = surface->w, .h = surface->h };
shput(cache->hash, path, new_texture);
return (t_texture_key){ shgeti(cache->hash, path) };
return (t_texture_key){ (uint16_t)shgeti(cache->hash, path) };
} else {
new_texture.atlas_index = cache->atlas_index;
shput(cache->hash, path, new_texture);
cache->is_dirty = true;
return (t_texture_key){ shgeti(cache->hash, path) };
return (t_texture_key){ (uint16_t)shgeti(cache->hash, path) };
}
}
@@ -363,35 +364,44 @@ void textures_update_atlas(struct texture_cache *cache) {
}
/* EXPERIMANTAL: LIKELY TO BE REMOVED! */
/* todo: If it's proven to be useful: add runtime checking for .rodata > .data */
#ifdef __unix__ /* use rodata elf section for fast lookups of repeating textures */
#ifdef __linux__ /* use rodata elf section for fast lookups of repeating textures */
extern const char start_rodata_address[];
extern const char stop_rodata_heuristic[];
#include "system/linux/elf.h"
asm(".set start_rodata_address, .rodata");
asm(".set stop_rodata_heuristic, .data"); /* there's nothing in default linker script to know the size of .rodata */
static const char *rodata_start;
static const char *rodata_stop;
/* TODO: it might be better to contruct a new table that hashes pointers, not strings, to texture keys */
/* this way every used texture will benefit, no matter the order of commission */
t_texture_key textures_get_key(struct texture_cache *cache, const char *path) {
static const char *last_path = NULL;
static t_texture_key last_texture;
static struct ptr_to_texture {
const void *key;
t_texture_key value;
} *ptr_to_texture;
/* fast path */
if (rodata_stop == NULL)
if (!infer_elf_section_bounds(".rodata", &rodata_start, &rodata_stop))
CRY("Section inference", ".rodata section lookup failed");
/* the fastest path */
if (path == last_path)
return last_texture;
else {
/* moderately fast path, by pointer hashing */
const ptrdiff_t texture = hmgeti(ptr_to_texture, path);
if (texture != -1) {
if (path >= rodata_start && path < rodata_stop)
last_path = path;
last_texture = ptr_to_texture[texture].value;
return last_texture;
}
}
/* hash tables are assumed to be stable, so we just return indices */
ptrdiff_t texture = shgeti(cache->hash, path);
/* try loading */
last_texture = textures_load(cache, path);
hmput(ptr_to_texture, path, last_texture);
/* load it if it isn't */
if (texture == -1) {
last_texture = textures_load(cache, path);
} else
last_texture = (t_texture_key){ (uint16_t)texture };
if (path >= start_rodata_address && path < stop_rodata_heuristic)
if (path >= rodata_start && path < rodata_stop)
last_path = path;
return last_texture;
@@ -400,7 +410,7 @@ t_texture_key textures_get_key(struct texture_cache *cache, const char *path) {
#else
t_texture_key textures_get_key(struct texture_cache *cache, const char *path) {
/* hash tables are assumed to be stable, so we just return indices */
ptrdiff_t texture = shgeti(cache->hash, path);
const ptrdiff_t texture = shgeti(cache->hash, path);
/* load it if it isn't */
if (texture == -1) {