@@ -250,20 +250,23 @@ void gen_pool_destroy(struct gen_pool *pool)
250
250
EXPORT_SYMBOL (gen_pool_destroy );
251
251
252
252
/**
253
- * gen_pool_alloc - allocate special memory from the pool
253
+ * gen_pool_alloc_addr - allocate special memory from the pool
254
254
* @pool: pool to allocate from
255
255
* @size: number of bytes to allocate from the pool
256
+ * @alloc_addr: if non-zero, allocate starting at alloc_addr.
256
257
*
257
258
* Allocate the requested number of bytes from the specified pool.
258
259
* Uses a first-fit algorithm. Can not be used in NMI handler on
259
260
* architectures without NMI-safe cmpxchg implementation.
260
261
*/
261
- unsigned long gen_pool_alloc (struct gen_pool * pool , size_t size )
262
+ unsigned long gen_pool_alloc_addr (struct gen_pool * pool , size_t size ,
263
+ unsigned long alloc_addr )
262
264
{
263
265
struct gen_pool_chunk * chunk ;
264
266
unsigned long addr = 0 ;
265
267
int order = pool -> min_alloc_order ;
266
268
int nbits , start_bit = 0 , end_bit , remain ;
269
+ int alloc_bit_needed = 0 ;
267
270
268
271
#ifndef CONFIG_ARCH_HAVE_NMI_SAFE_CMPXCHG
269
272
BUG_ON (in_nmi ());
@@ -272,16 +275,30 @@ unsigned long gen_pool_alloc(struct gen_pool *pool, size_t size)
272
275
if (size == 0 )
273
276
return 0 ;
274
277
278
+ if (alloc_addr & (1 << order ) - 1 )
279
+ return 0 ;
280
+
275
281
nbits = (size + (1UL << order ) - 1 ) >> order ;
276
282
rcu_read_lock ();
277
283
list_for_each_entry_rcu (chunk , & pool -> chunks , next_chunk ) {
278
284
if (size > atomic_read (& chunk -> avail ))
279
285
continue ;
280
286
281
287
end_bit = (chunk -> end_addr - chunk -> start_addr ) >> order ;
288
+ if (alloc_addr ) {
289
+ if (alloc_addr < chunk -> start_addr ||
290
+ alloc_addr >= chunk -> end_addr )
291
+ continue ;
292
+ if (alloc_addr + size > chunk -> end_addr )
293
+ return 0 ;
294
+ alloc_bit_needed = start_bit =
295
+ (alloc_addr - chunk -> start_addr ) >> order ;
296
+ }
282
297
retry :
283
298
start_bit = bitmap_find_next_zero_area (chunk -> bits , end_bit ,
284
299
start_bit , nbits , 0 );
300
+ if (alloc_addr && alloc_bit_needed != start_bit )
301
+ return 0 ;
285
302
if (start_bit >= end_bit )
286
303
continue ;
287
304
remain = bitmap_set_ll (chunk -> bits , start_bit , nbits );
@@ -300,7 +317,7 @@ unsigned long gen_pool_alloc(struct gen_pool *pool, size_t size)
300
317
rcu_read_unlock ();
301
318
return addr ;
302
319
}
303
- EXPORT_SYMBOL (gen_pool_alloc );
320
+ EXPORT_SYMBOL (gen_pool_alloc_addr );
304
321
305
322
/**
306
323
* gen_pool_free - free allocated special memory back to the pool
0 commit comments