File indexing completed on 2025-01-17 09:55:53
0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017 #ifndef _NUMA_H
0018 #define _NUMA_H 1
0019
0020
0021 #define LIBNUMA_API_VERSION 2
0022
0023
0024
0025 #include <stddef.h>
0026 #include <string.h>
0027 #include <sys/types.h>
0028 #include <stdlib.h>
0029
0030 #if defined(__x86_64__) || defined(__i386__)
0031 #define NUMA_NUM_NODES 128
0032 #else
0033 #define NUMA_NUM_NODES 2048
0034 #endif
0035
0036 #ifdef __cplusplus
0037 extern "C" {
0038 #endif
0039
0040 typedef struct {
0041 unsigned long n[NUMA_NUM_NODES/(sizeof(unsigned long)*8)];
0042 } nodemask_t;
0043
0044 struct bitmask {
0045 unsigned long size;
0046 unsigned long *maskp;
0047 };
0048
0049
0050 int numa_bitmask_isbitset(const struct bitmask *, unsigned int);
0051 struct bitmask *numa_bitmask_setall(struct bitmask *);
0052 struct bitmask *numa_bitmask_clearall(struct bitmask *);
0053 struct bitmask *numa_bitmask_setbit(struct bitmask *, unsigned int);
0054 struct bitmask *numa_bitmask_clearbit(struct bitmask *, unsigned int);
0055 unsigned int numa_bitmask_nbytes(struct bitmask *);
0056 unsigned int numa_bitmask_weight(const struct bitmask *);
0057 struct bitmask *numa_bitmask_alloc(unsigned int);
0058 void numa_bitmask_free(struct bitmask *);
0059 int numa_bitmask_equal(const struct bitmask *, const struct bitmask *);
0060 void copy_nodemask_to_bitmask(nodemask_t *, struct bitmask *);
0061 void copy_bitmask_to_nodemask(struct bitmask *, nodemask_t *);
0062 void copy_bitmask_to_bitmask(struct bitmask *, struct bitmask *);
0063
0064
0065
0066 static inline void nodemask_zero(nodemask_t *mask)
0067 {
0068 struct bitmask tmp;
0069
0070 tmp.maskp = (unsigned long *)mask;
0071 tmp.size = sizeof(nodemask_t) * 8;
0072 numa_bitmask_clearall(&tmp);
0073 }
0074
0075 static inline void nodemask_zero_compat(nodemask_t *mask)
0076 {
0077 struct bitmask tmp;
0078
0079 tmp.maskp = (unsigned long *)mask;
0080 tmp.size = sizeof(nodemask_t) * 8;
0081 numa_bitmask_clearall(&tmp);
0082 }
0083
0084 static inline void nodemask_set_compat(nodemask_t *mask, int node)
0085 {
0086 mask->n[node / (8*sizeof(unsigned long))] |=
0087 (1UL<<(node%(8*sizeof(unsigned long))));
0088 }
0089
0090 static inline void nodemask_clr_compat(nodemask_t *mask, int node)
0091 {
0092 mask->n[node / (8*sizeof(unsigned long))] &=
0093 ~(1UL<<(node%(8*sizeof(unsigned long))));
0094 }
0095
0096 static inline int nodemask_isset_compat(const nodemask_t *mask, int node)
0097 {
0098 if ((unsigned)node >= NUMA_NUM_NODES)
0099 return 0;
0100 if (mask->n[node / (8*sizeof(unsigned long))] &
0101 (1UL<<(node%(8*sizeof(unsigned long)))))
0102 return 1;
0103 return 0;
0104 }
0105
0106 static inline int nodemask_equal(const nodemask_t *a, const nodemask_t *b)
0107 {
0108 struct bitmask tmp_a, tmp_b;
0109
0110 tmp_a.maskp = (unsigned long *)a;
0111 tmp_a.size = sizeof(nodemask_t) * 8;
0112
0113 tmp_b.maskp = (unsigned long *)b;
0114 tmp_b.size = sizeof(nodemask_t) * 8;
0115
0116 return numa_bitmask_equal(&tmp_a, &tmp_b);
0117 }
0118
0119 static inline int nodemask_equal_compat(const nodemask_t *a, const nodemask_t *b)
0120 {
0121 struct bitmask tmp_a, tmp_b;
0122
0123 tmp_a.maskp = (unsigned long *)a;
0124 tmp_a.size = sizeof(nodemask_t) * 8;
0125
0126 tmp_b.maskp = (unsigned long *)b;
0127 tmp_b.size = sizeof(nodemask_t) * 8;
0128
0129 return numa_bitmask_equal(&tmp_a, &tmp_b);
0130 }
0131
0132
0133
0134 int numa_available(void);
0135
0136
0137
0138
0139 int numa_max_node(void);
0140 int numa_max_possible_node(void);
0141
0142 int numa_preferred(void);
0143
0144
0145 long long numa_node_size64(int node, long long *freep);
0146 long numa_node_size(int node, long *freep);
0147
0148 int numa_pagesize(void);
0149
0150
0151
0152 extern struct bitmask *numa_all_nodes_ptr;
0153
0154
0155 extern struct bitmask *numa_nodes_ptr;
0156
0157
0158 extern nodemask_t numa_all_nodes;
0159
0160
0161 extern struct bitmask *numa_all_cpus_ptr;
0162
0163
0164 extern struct bitmask *numa_no_nodes_ptr;
0165
0166
0167 extern nodemask_t numa_no_nodes;
0168
0169
0170 void numa_bind(struct bitmask *nodes);
0171
0172
0173 void numa_set_interleave_mask(struct bitmask *nodemask);
0174
0175
0176 struct bitmask *numa_get_interleave_mask(void);
0177
0178
0179 struct bitmask *numa_allocate_nodemask(void);
0180
0181 static inline void numa_free_nodemask(struct bitmask *b)
0182 {
0183 numa_bitmask_free(b);
0184 }
0185
0186
0187 void numa_set_preferred(int node);
0188
0189
0190 int numa_has_preferred_many(void);
0191
0192
0193 void numa_set_preferred_many(struct bitmask *bitmask);
0194
0195
0196 struct bitmask *numa_preferred_many(void);
0197
0198
0199 void numa_set_localalloc(void);
0200
0201
0202 void numa_set_membind(struct bitmask *nodemask);
0203
0204
0205
0206 void numa_set_membind_balancing(struct bitmask *bmp);
0207
0208
0209 struct bitmask *numa_get_membind(void);
0210
0211
0212 struct bitmask *numa_get_mems_allowed(void);
0213
0214 int numa_get_interleave_node(void);
0215
0216
0217
0218
0219
0220 void *numa_alloc_interleaved_subset(size_t size, struct bitmask *nodemask);
0221
0222 void *numa_alloc_interleaved(size_t size);
0223
0224 void *numa_alloc_onnode(size_t size, int node);
0225
0226 void *numa_alloc_local(size_t size);
0227
0228 void *numa_alloc(size_t size);
0229
0230 void *numa_realloc(void *old_addr, size_t old_size, size_t new_size);
0231
0232 void numa_free(void *mem, size_t size);
0233
0234
0235
0236
0237
0238 void numa_interleave_memory(void *mem, size_t size, struct bitmask *mask);
0239
0240
0241 void numa_tonode_memory(void *start, size_t size, int node);
0242
0243
0244 void numa_tonodemask_memory(void *mem, size_t size, struct bitmask *mask);
0245
0246
0247 void numa_setlocal_memory(void *start, size_t size);
0248
0249
0250 void numa_police_memory(void *start, size_t size);
0251
0252
0253 int numa_run_on_node_mask(struct bitmask *mask);
0254
0255 int numa_run_on_node_mask_all(struct bitmask *mask);
0256
0257 int numa_run_on_node(int node);
0258
0259 struct bitmask * numa_get_run_node_mask(void);
0260
0261
0262 void numa_set_bind_policy(int strict);
0263
0264
0265 void numa_set_strict(int flag);
0266
0267
0268 int numa_num_possible_nodes(void);
0269
0270
0271 int numa_num_possible_cpus(void);
0272
0273
0274 int numa_num_configured_nodes(void);
0275
0276
0277 int numa_num_configured_cpus(void);
0278
0279
0280 int numa_num_task_cpus(void);
0281 int numa_num_thread_cpus(void);
0282
0283
0284 int numa_num_task_nodes(void);
0285 int numa_num_thread_nodes(void);
0286
0287
0288 struct bitmask *numa_allocate_cpumask(void);
0289
0290 static inline void numa_free_cpumask(struct bitmask *b)
0291 {
0292 numa_bitmask_free(b);
0293 }
0294
0295
0296 int numa_node_to_cpus(int, struct bitmask *);
0297
0298 void numa_node_to_cpu_update(void);
0299
0300
0301 int numa_node_of_cpu(int cpu);
0302
0303
0304 int numa_distance(int node1, int node2);
0305
0306
0307
0308
0309
0310 void numa_error(char *where);
0311
0312
0313
0314 extern int numa_exit_on_error;
0315
0316
0317 void numa_warn(int num, char *fmt, ...);
0318
0319
0320 extern int numa_exit_on_warn;
0321
0322 int numa_migrate_pages(int pid, struct bitmask *from, struct bitmask *to);
0323
0324 int numa_move_pages(int pid, unsigned long count, void **pages,
0325 const int *nodes, int *status, int flags);
0326
0327 int numa_sched_getaffinity(pid_t, struct bitmask *);
0328 int numa_sched_setaffinity(pid_t, struct bitmask *);
0329
0330
0331 struct bitmask *numa_parse_nodestring(const char *);
0332
0333
0334
0335 struct bitmask *numa_parse_nodestring_all(const char *);
0336
0337
0338 struct bitmask *numa_parse_cpustring(const char *);
0339
0340
0341
0342 struct bitmask *numa_parse_cpustring_all(const char *);
0343
0344
0345
0346
0347 int numa_has_home_node(void);
0348
0349
0350 int numa_set_mempolicy_home_node(void *start, unsigned long len,
0351 int home_node, int flags);
0352
0353
0354
0355
0356
0357
0358
0359 static inline void numa_set_interleave_mask_compat(nodemask_t *nodemask)
0360 {
0361 struct bitmask tmp;
0362
0363 tmp.maskp = (unsigned long *)nodemask;
0364 tmp.size = sizeof(nodemask_t) * 8;
0365 numa_set_interleave_mask(&tmp);
0366 }
0367
0368 static inline nodemask_t numa_get_interleave_mask_compat(void)
0369 {
0370 struct bitmask *tp;
0371 nodemask_t mask;
0372
0373 tp = numa_get_interleave_mask();
0374 copy_bitmask_to_nodemask(tp, &mask);
0375 numa_bitmask_free(tp);
0376 return mask;
0377 }
0378
0379 static inline void numa_bind_compat(nodemask_t *mask)
0380 {
0381 struct bitmask *tp;
0382
0383 tp = numa_allocate_nodemask();
0384 copy_nodemask_to_bitmask(mask, tp);
0385 numa_bind(tp);
0386 numa_bitmask_free(tp);
0387 }
0388
0389 static inline void numa_set_membind_compat(nodemask_t *mask)
0390 {
0391 struct bitmask tmp;
0392
0393 tmp.maskp = (unsigned long *)mask;
0394 tmp.size = sizeof(nodemask_t) * 8;
0395 numa_set_membind(&tmp);
0396 }
0397
0398 static inline nodemask_t numa_get_membind_compat(void)
0399 {
0400 struct bitmask *tp;
0401 nodemask_t mask;
0402
0403 tp = numa_get_membind();
0404 copy_bitmask_to_nodemask(tp, &mask);
0405 numa_bitmask_free(tp);
0406 return mask;
0407 }
0408
0409 static inline void *numa_alloc_interleaved_subset_compat(size_t size,
0410 const nodemask_t *mask)
0411 {
0412 struct bitmask tmp;
0413
0414 tmp.maskp = (unsigned long *)mask;
0415 tmp.size = sizeof(nodemask_t) * 8;
0416 return numa_alloc_interleaved_subset(size, &tmp);
0417 }
0418
0419 static inline int numa_run_on_node_mask_compat(const nodemask_t *mask)
0420 {
0421 struct bitmask tmp;
0422
0423 tmp.maskp = (unsigned long *)mask;
0424 tmp.size = sizeof(nodemask_t) * 8;
0425 return numa_run_on_node_mask(&tmp);
0426 }
0427
0428 static inline nodemask_t numa_get_run_node_mask_compat(void)
0429 {
0430 struct bitmask *tp;
0431 nodemask_t mask;
0432
0433 tp = numa_get_run_node_mask();
0434 copy_bitmask_to_nodemask(tp, &mask);
0435 numa_bitmask_free(tp);
0436 return mask;
0437 }
0438
0439 static inline void numa_interleave_memory_compat(void *mem, size_t size,
0440 const nodemask_t *mask)
0441 {
0442 struct bitmask tmp;
0443
0444 tmp.maskp = (unsigned long *)mask;
0445 tmp.size = sizeof(nodemask_t) * 8;
0446 numa_interleave_memory(mem, size, &tmp);
0447 }
0448
0449 static inline void numa_tonodemask_memory_compat(void *mem, size_t size,
0450 const nodemask_t *mask)
0451 {
0452 struct bitmask tmp;
0453
0454 tmp.maskp = (unsigned long *)mask;
0455 tmp.size = sizeof(nodemask_t) * 8;
0456 numa_tonodemask_memory(mem, size, &tmp);
0457 }
0458
0459 static inline int numa_sched_getaffinity_compat(pid_t pid, unsigned len,
0460 unsigned long *mask)
0461 {
0462 struct bitmask tmp;
0463
0464 tmp.maskp = (unsigned long *)mask;
0465 tmp.size = len * 8;
0466 return numa_sched_getaffinity(pid, &tmp);
0467 }
0468
0469 static inline int numa_sched_setaffinity_compat(pid_t pid, unsigned len,
0470 unsigned long *mask)
0471 {
0472 struct bitmask tmp;
0473
0474 tmp.maskp = (unsigned long *)mask;
0475 tmp.size = len * 8;
0476 return numa_sched_setaffinity(pid, &tmp);
0477 }
0478
0479 static inline int numa_node_to_cpus_compat(int node, unsigned long *buffer,
0480 int buffer_len)
0481 {
0482 struct bitmask tmp;
0483
0484 tmp.maskp = (unsigned long *)buffer;
0485 tmp.size = buffer_len * 8;
0486 return numa_node_to_cpus(node, &tmp);
0487 }
0488
0489
0490
0491
0492
0493
0494
0495 #ifdef NUMA_VERSION1_COMPATIBILITY
0496 #include <numacompat1.h>
0497 #endif
0498
0499 #ifdef __cplusplus
0500 }
0501 #endif
0502
0503 #endif