@@ -16,8 +16,6 @@
*/
extern unsigned long avenrun[]; /* Load averages */
extern void get_avenrun(unsigned long *loads, unsigned long offset, int shift);
-extern void get_avenrun_ve(unsigned long *loads,
- unsigned long offset, int shift);
#define FSHIFT 11 /* nr of bits of precision */
#define FIXED_1 (1<<FSHIFT) /* 1.0 as fixed-point */
@@ -76,12 +76,20 @@ void get_avenrun(unsigned long *loads, unsigned long offset, int shift)
loads[2] = (avenrun[2] + offset) << shift;
}
-void get_avenrun_ve(unsigned long *loads, unsigned long offset, int shift)
+int get_avenrun_tg(struct task_group *tg, unsigned long *loads,
+ unsigned long offset, int shift)
{
- struct task_group *tg = task_group(current);
+ /* Get current tg if not provided. */
+ tg = tg ? tg : task_group(current);
+
+ if (tg == &root_task_group)
+ return -ENOSYS;
+
loads[0] = (tg->avenrun[0] + offset) << shift;
loads[1] = (tg->avenrun[1] + offset) << shift;
loads[2] = (tg->avenrun[2] + offset) << shift;
+
+ return 0;
}
long calc_load_fold_active(struct rq *this_rq, long adjust)
@@ -2543,6 +2543,8 @@ SYSCALL_DEFINE3(getcpu, unsigned __user *, cpup, unsigned __user *, nodep,
}
extern void si_meminfo_ve(struct sysinfo *si, struct ve_struct *ve);
+extern int get_avenrun_tg(struct task_group *tg, unsigned long *loads,
+ unsigned long offset, int shift);
/**
* do_sysinfo - fill in sysinfo struct
@@ -2575,7 +2577,9 @@ static int do_sysinfo(struct sysinfo *info)
info->procs = nr_threads_ve(ve);
- get_avenrun_ve(info->loads, 0, SI_LOAD_SHIFT - FSHIFT);
+ /* does not fail on non-VE0 task group */
+ (void)get_avenrun_tg(NULL, info->loads,
+ 0, SI_LOAD_SHIFT - FSHIFT);
}
/*