@Article{ SpetterMBLvSSPVH2017,
title = {Volitional regulation of brain responses to food stimuli in overweight and obese subjects: a real-time fMRI feedback study},
journal = {Appetite},
year = {2017},
month = {5},
volume = {112},
pages = {188–195},
abstract = {Obese subjects who achieve weight loss show increased functional connectivity between dorsolateral prefrontal cortex (dlPFC) and ventromedial prefrontal cortex (vmPFC), key areas of executive control and reward processing. We investigated the potential of real-time functional magnetic resonance imaging (rt-fMRI) neurofeedback training to achieve healthier food choices by enhancing self-control of the interplay between these brain areas. We trained eight male individuals with overweight or obesity (age: 31.8 ± 4.4 years, BMI: 29.4 ± 1.4 kg/m2) to up-regulate functional connectivity between the dlPFC and the vmPFC by means of a four-day rt-fMRI neurofeedback protocol including, on each day, three training runs comprised of six up-regulation and six passive viewing trials. During the up-regulation runs of the four training days, participants successfully learned to increase functional connectivity between dlPFC and vmPFC. In addition, a trend towards less high-calorie food choices emerged from before to after training, which however was associated with a trend towards increased covertly assessed snack intake. Findings of this proof-of-concept study indicate that overweight and obese participants can increase functional connectivity between brain areas that orchestrate the top-down control of appetite for high-calorie foods. Neurofeedback training might therefore be a useful tool in achieving and maintaining weight loss.},
web_url = {http://www.sciencedirect.com/science/article/pii/S0195666316305220},
state = {published},
DOI = {10.1016/j.appet.2017.01.032},
author = {Spetter MS; Malekshahi R; Birbaumer N; L\"uhrs M; van der Veer AH{aveer}; Scheffler K{scheffler}{Department High-Field Magnetic Resonance}; Spuckti S; Preissl H; Veit R{veit}{Department High-Field Magnetic Resonance}; Hallschmid M}
}
@Poster{ vanderVeerLAWBM2018,
title = {Where am I? In terms of my physical and of my perceived body},
year = {2018},
month = {5},
day = {19},
number = {23.377},
abstract = {We investigated the following three questions: 1) Where do people locate themselves on their body? 2) How precisely can people locate their body parts? and 3) Do people locate themselves differently in terms of their perceived compared to their physical body dimensions? Alsmith and Longo (2014) asked participants to point directly to themselves with a physical pointer. They found pointing to be to two distinct locations, upper face and upper torso. To investigate the robustness of their findings, we used virtual reality (VR), because this allows for systematic control over experimental variables and easy manipulation of visual information.
In a VR headset and on a large-scale immersive display, participants rotated a pointer in their sagittal plane instructed to "Point directly to you", but also to nine of their body parts (feet, knees, hips, waist, shoulders, chin, nose, eyes and top of the head) previously measured for their physical heights. From the pointed-to body parts a perceived body was constructed, to which the self-locations were alternatively scaled/normalized. Pointing to self relative to the physical body was frequently found for all body regions above mid-torso, as well as above the head (Supplement, left graph). Participants pointed precisely to many body parts, but not to feet and knees, nor to the top of the head. Relative to the perceived body, pointing to self resembled more the results from the earlier physical setup, that is participants pointed mainly to upper torso and the face (Supplement, right graph).
These results suggest, that a) people do not have one specific location where they locate themselves, and b) people do not accurately point to their entire body in the vertical plane.},
web_url = {http://visionsciences1.org/vss_public/mobile/program_search.php},
event_name = {18th Annual Meeting of the Vision Sciences Society (VSS 2018)},
event_place = {St. Pete Beach, FL, USA},
state = {published},
author = {van der Veer AH{aveer}; Longo MR; Alsmith AJT; Wong HY; B\"ulthoff HH{hhb}{Department Human Perception, Cognition and Action}; Mohler BJ{mohler}{Department Human Perception, Cognition and Action}}
}
@Poster{ VanderVeerLAWBM2017,
title = {Where am I? Pointing to myself and body parts in virtual reality},
year = {2017},
month = {8},
day = {30},
pages = {187-188},
abstract = {Where do people experience themselves and their body parts to be? It seems obvious that we are located where our bodies are. However, it is not a given, whether we associate our location with specific part(s) of our body. In one study where they were asked to directly point at themselves,
people most frequently stopped a physical pointer at upper
face and upper torso (Alsmith and Longo, 2014). This
suggests, they experienced themselves not homogeneously
spread out over their bodies, nor in a single bodily location, but rather in these two distinct regions. To investigate the robustness of these findings, we asked participants to point directly at themselves and nine different body parts, in two virtual reality (VR) setups (VR headset and large immersive screen). Investigating in VR provides strong experimental control, specifically the possibility to manipulate visual and body-based cues about the body separately. For pointing at self, we found frequent pointing at all body regions above mid-torso, as well as above the head. For many of their body parts people were pointing remarkably accurately. However, when pointing at knees, feet and top of the head, participants were not very exact, especially in the VR headset (without visual access to the body). However, when normalizing pointing to self by indicators of how participants may perceive their body part locations in VR, the pattern found for pointing at self interestingly changes, such that it resembles more the pointing to self in the physical setup (mainly face and upper torso).},
web_url = {http://journals.sagepub.com/pb-assets/cmscontent/PEC/ECVP2017_abstracts_1.0.pdf},
event_name = {40th European Conference on Visual Perception (ECVP 2017)},
event_place = {Berlin, Germany},
state = {published},
author = {Van der Veer A{aveer}; Longo M; Alsmith A; Wong HY; B\"ulthoff HH{hhb}{Department Human Perception, Cognition and Action}; Mohler BJ{mohler}{Department Human Perception, Cognition and Action}}
}
@Poster{ VanderVeerLAWM2017,
title = {Where am I in virtual reality?},
year = {2017},
month = {3},
web_url = {http://www.mind-and-brain.de/people/einstein-visiting-fellows/vittorio-gallese/symposium-2017-me-i/},
event_name = {Symposium Me & I: Conceptual and Empirical Perspectives on the Self},
event_place = {Berlin, Germany},
state = {published},
author = {van der Veer AH{aveer}; Longo MR; Alsmith AJT; Wong HJ; Mohler BJ{mohler}}
}
@Poster{ VanderVeerLAWM2017_2,
title = {Where am I in virtual reality?},
year = {2017},
month = {3},
event_name = {5th Mind, Brain & Body Symposium (MBBS 2017)},
event_place = {Berlin, Germany},
state = {published},
author = {van der Veer AH{aveer}; Longo MR; Alsmith AJT; Wong HJ; Mohler BJ{mohler}}
}