diff options
author | Zhao Qiang <qiang.zhao@freescale.com> | 2015-11-30 10:48:52 +0800 |
---|---|---|
committer | Scott Wood <scottwood@freescale.com> | 2015-12-22 17:10:17 -0600 |
commit | de2dd0eb30af55d3893979d5641c50c7a8969c99 (patch) | |
tree | dcfd94f13f29b4ad8bcf32b2849876467aa3626c /lib/genalloc.c | |
parent | e9d764f803964a54ca7da4a67d124fe824ebd80a (diff) | |
download | op-kernel-dev-de2dd0eb30af55d3893979d5641c50c7a8969c99.zip op-kernel-dev-de2dd0eb30af55d3893979d5641c50c7a8969c99.tar.gz |
genalloc:support memory-allocation with bytes-alignment to genalloc
Bytes alignment is required to manage some special RAM,
so add gen_pool_first_fit_align to genalloc,
meanwhile add gen_pool_alloc_algo to pass algo in case user
layer using more than one algo, and pass data to
gen_pool_first_fit_align(modify gen_pool_alloc as a wrapper)
Signed-off-by: Zhao Qiang <qiang.zhao@freescale.com>
Signed-off-by: Scott Wood <scottwood@freescale.com>
Diffstat (limited to 'lib/genalloc.c')
-rw-r--r-- | lib/genalloc.c | 61 |
1 files changed, 55 insertions, 6 deletions
diff --git a/lib/genalloc.c b/lib/genalloc.c index 116a166..b8cf89d 100644 --- a/lib/genalloc.c +++ b/lib/genalloc.c @@ -270,6 +270,25 @@ EXPORT_SYMBOL(gen_pool_destroy); */ unsigned long gen_pool_alloc(struct gen_pool *pool, size_t size) { + return gen_pool_alloc_algo(pool, size, pool->algo, pool->data); +} +EXPORT_SYMBOL(gen_pool_alloc); + +/** + * gen_pool_alloc_algo - allocate special memory from the pool + * @pool: pool to allocate from + * @size: number of bytes to allocate from the pool + * @algo: algorithm passed from caller + * @data: data passed to algorithm + * + * Allocate the requested number of bytes from the specified pool. + * Uses the pool allocation function (with first-fit algorithm by default). + * Can not be used in NMI handler on architectures without + * NMI-safe cmpxchg implementation. + */ +unsigned long gen_pool_alloc_algo(struct gen_pool *pool, size_t size, + genpool_algo_t algo, void *data) +{ struct gen_pool_chunk *chunk; unsigned long addr = 0; int order = pool->min_alloc_order; @@ -290,8 +309,8 @@ unsigned long gen_pool_alloc(struct gen_pool *pool, size_t size) end_bit = chunk_size(chunk) >> order; retry: - start_bit = pool->algo(chunk->bits, end_bit, start_bit, nbits, - pool->data); + start_bit = algo(chunk->bits, end_bit, start_bit, + nbits, data, pool); if (start_bit >= end_bit) continue; remain = bitmap_set_ll(chunk->bits, start_bit, nbits); @@ -310,7 +329,7 @@ retry: rcu_read_unlock(); return addr; } -EXPORT_SYMBOL(gen_pool_alloc); +EXPORT_SYMBOL(gen_pool_alloc_algo); /** * gen_pool_dma_alloc - allocate special memory from the pool for DMA usage @@ -501,15 +520,42 @@ EXPORT_SYMBOL(gen_pool_set_algo); * @start: The bitnumber to start searching at * @nr: The number of zeroed bits we're looking for * @data: additional data - unused + * @pool: pool to find the fit region memory from */ unsigned long gen_pool_first_fit(unsigned long *map, unsigned long size, - unsigned long start, unsigned int nr, void *data) + unsigned long start, unsigned int nr, void *data, + struct gen_pool *pool) { return bitmap_find_next_zero_area(map, size, start, nr, 0); } EXPORT_SYMBOL(gen_pool_first_fit); /** + * gen_pool_first_fit_align - find the first available region + * of memory matching the size requirement (alignment constraint) + * @map: The address to base the search on + * @size: The bitmap size in bits + * @start: The bitnumber to start searching at + * @nr: The number of zeroed bits we're looking for + * @data: data for alignment + * @pool: pool to get order from + */ +unsigned long gen_pool_first_fit_align(unsigned long *map, unsigned long size, + unsigned long start, unsigned int nr, void *data, + struct gen_pool *pool) +{ + struct genpool_data_align *alignment; + unsigned long align_mask; + int order; + + alignment = data; + order = pool->min_alloc_order; + align_mask = ((alignment->align + (1UL << order) - 1) >> order) - 1; + return bitmap_find_next_zero_area(map, size, start, nr, align_mask); +} +EXPORT_SYMBOL(gen_pool_first_fit_align); + +/** * gen_pool_first_fit_order_align - find the first available region * of memory matching the size requirement. The region will be aligned * to the order of the size specified. @@ -518,10 +564,11 @@ EXPORT_SYMBOL(gen_pool_first_fit); * @start: The bitnumber to start searching at * @nr: The number of zeroed bits we're looking for * @data: additional data - unused + * @pool: pool to find the fit region memory from */ unsigned long gen_pool_first_fit_order_align(unsigned long *map, unsigned long size, unsigned long start, - unsigned int nr, void *data) + unsigned int nr, void *data, struct gen_pool *pool) { unsigned long align_mask = roundup_pow_of_two(nr) - 1; @@ -537,12 +584,14 @@ EXPORT_SYMBOL(gen_pool_first_fit_order_align); * @start: The bitnumber to start searching at * @nr: The number of zeroed bits we're looking for * @data: additional data - unused + * @pool: pool to find the fit region memory from * * Iterate over the bitmap to find the smallest free region * which we can allocate the memory. */ unsigned long gen_pool_best_fit(unsigned long *map, unsigned long size, - unsigned long start, unsigned int nr, void *data) + unsigned long start, unsigned int nr, void *data, + struct gen_pool *pool) { unsigned long start_bit = size; unsigned long len = size + 1; |