BibTeX for papers by David Kotz; for complete/updated list see https://www.cs.dartmouth.edu/~kotz/research/papers.html @InProceedings{bi:vision, author = {Shengjie Bi and David Kotz}, title = {{Eating detection with a head-mounted video camera}}, booktitle = {{Proceedings of the IEEE International Conference on Healthcare Informatics}}, year = 2022, month = {June}, pages = {60--66}, publisher = {IEEE}, copyright = {IEEE}, DOI = {10.1109/ICHI54592.2022.00021}, URL = {https://www.cs.dartmouth.edu/~kotz/research/bi-vision/index.html}, abstract = {In this paper, we present a computer-vision based approach to detect eating. Specifically, our goal is to develop a wearable system that is effective and robust enough to automatically detect when people eat, and for how long. We collected video from a cap-mounted camera on 10 participants for about 55 hours in free-living conditions. We evaluated performance of eating detection with four different Convolutional Neural Network (CNN) models. The best model achieved accuracy 90.9\% and F1 score 78.7\% for eating detection with a 1-minute resolution. We also discuss the resources needed to deploy a 3D CNN model in wearable or mobile platforms, in terms of computation, memory, and power. We believe this paper is the first work to experiment with video-based (rather than image-based) eating detection in free-living scenarios.}, } @Article{odame:chewing, author = {Kofi Odame and Maria Nyamukuru and Mohsen Shahghasemi and Shengjie Bi and David Kotz}, title = {{Analog Gated Recurrent Neural Network for Detecting Chewing Events}}, journal = {IEEE Transactions on Biomedical Circuits and Systems}, year = 2022, month = {December}, volume = 16, number = 6, pages = {1106--1115}, publisher = {IEEE}, copyright = {IEEE}, DOI = {10.1109/TBCAS.2022.3218889}, URL = {https://www.cs.dartmouth.edu/~kotz/research/odame-chewing/index.html}, abstract = {We present a novel gated recurrent neural network to detect when a person is chewing on food. We implemented the neural network as a custom analog integrated circuit in a 0.18 {$\mu$}m CMOS technology. The neural network was trained on 6.4 hours of data collected from a contact microphone that was mounted on volunteers' mastoid bones. When tested on 1.6 hours of previously-unseen data, the analog neural network identified chewing events at a 24-second time resolution. It achieved a recall of 91\% and an F1-score of 94\% while consuming 1.1 {$\mu$}W of power. A system for detecting whole eating episodes--- like meals and snacks--- that is based on the novel analog neural network consumes an estimated 18.8 {$\mu$}W of power.}, } @TechReport{bi:video-tr, author = {Shengjie Bi and David Kotz}, title = {{Eating detection with a head-mounted video camera}}, institution = {Dartmouth Computer Science}, year = 2021, month = {December}, number = {TR2021-1002}, copyright = {the authors}, URL = {https://www.cs.dartmouth.edu/~kotz/research/bi-video-tr/index.html}, abstract = {In this paper, we present a computer-vision based approach to detect eating. Specifically, our goal is to develop a wearable system that is effective and robust enough to automatically detect when people eat, and for how long. We collected video from a cap-mounted camera on 10 participants for about 55 hours in free-living conditions. We evaluated performance of eating detection with four different Convolutional Neural Network (CNN) models. The best model achieved accuracy 90.9\% and F1 score 78.7\% for eating detection with a 1-minute resolution. We also discuss the resources needed to deploy a 3D CNN model in wearable or mobile platforms, in terms of computation, memory, and power. We believe this paper is the first work to experiment with video-based (rather than image-based) eating detection in free-living scenarios.}, } @PhdThesis{bi:thesis, author = {Shengjie Bi}, title = {{Detection of health-related behaviours using head-mounted devices}}, school = {Dartmouth Computer Science}, year = 2021, month = {May}, copyright = {the author}, address = {Hanover, NH}, URL = {https://www.cs.dartmouth.edu/~kotz/research/bi-thesis/index.html}, note = {PhD Dissertation}, abstract = { The detection of health-related behaviors is the basis of many mobile-sensing applications for healthcare and can trigger other inquiries or interventions. Wearable sensors have been widely used for mobile sensing due to their ever-decreasing cost, ease of deployment, and ability to provide continuous monitoring. In this dissertation, we develop a generalizable approach to sensing eating-related behavior. \par First, we developed Auracle, a wearable earpiece that can automatically detect eating episodes. Using an off-the-shelf contact microphone placed behind the ear, Auracle captures the sound of a person chewing as it passes through the head. This audio data is then processed by a custom circuit board. We collected data with 14 participants for 32 hours in free-living conditions and achieved accuracy exceeding 92.8\% and F1 score exceeding77.5\% for eating detection with 1-minute resolution. \par Second, we adapted Auracle for measuring children's eating behavior, and improved the accuracy and robustness of the eating-activity detection algorithms. We used this improved prototype in a laboratory study with a sample of 10 children for 60 total sessions and collected 22.3 hours of data in both meal and snack scenarios. Overall, we achieved 95.5\% accuracy and 95.7\% F1 score for eating detection with 1-minute resolution. \par Third, we developed a computer-vision approach for eating detection in free-living scenarios. Using a miniature head-mounted camera, we collected data with 10 participants for about 55 hours. The camera was fixed under the brim of a cap, pointing to the mouth of the wearer and continuously recording video (but not audio) throughout their normal daily activity. We evaluated performance for eating detection using four different Convolutional Neural Network (CNN) models. The best model achieved 90.9\% accuracy and 78.7\%F1 score for eating detection with 1-minute resolution. Finally, we validated the feasibility of deploying the 3D CNN model in wearable or mobile platforms when considering computation, memory, and power constraints.}, } @Misc{bi:auracle-patent, author = {Shengjie Bi and Tao Wang and Nicole Tobias and Josephine Nordrum and Robert Halvorsen and Ron Peterson and Kelly Caine and Xing-Dong Yang and Kofi Odame and Ryan Halter and Jacob Sorber and David Kotz}, title = {{System for detecting eating with sensor mounted by the ear}}, howpublished = {U.S. Patent Application PCT/US2019/044317; Worldwide Patent Application WO2020028481A9}, year = 2021, month = {February}, day = 1, URL = {https://www.cs.dartmouth.edu/~kotz/research/bi-auracle-patent/index.html}, note = {Priority date 2018-07-31; Filed 2019-07-31; Amended 2021-02-01}, abstract = {A wearable device for detecting eating episodes uses a contact microphone to provide audio signals through an analog front end to an analog-to-digital converter to digitize the audio and provide digitized audio to a processor; and a processor configured with firmware in a memory to extract features from the digitized audio. A classifier determines eating episodes from the extracted features. In embodiments, messages describing the detected eating episodes are transmitted to a cell phone, insulin pump, or camera configured to record video of the wearer's mouth.}, } @InProceedings{bi:children, author = {Shengjie Bi and Yiyang Lu and Nicole Tobias and Ella Ryan and Travis Masterson and Sougata Sen and Ryan Halter and Jacob Sorber and Diane Gilbert-Diamond and David Kotz}, title = {{Measuring children's eating behavior with a wearable device}}, booktitle = {{Proceedings of the IEEE International Conference on Healthcare Informatics (ICHI)}}, year = 2020, month = {December}, publisher = {IEEE}, copyright = {IEEE}, DOI = {10.1109/ICHI48887.2020.9374304}, URL = {https://www.cs.dartmouth.edu/~kotz/research/bi-children/index.html}, abstract = {Poor eating habits in children and teenagers can lead to obesity, eating disorders, or life-threatening health problems. Although researchers have studied children's eating behavior for decades, the research community has had limited technology to support the observation and measurement of fine-grained details of a child's eating behavior. In this paper, we present the feasibility of adapting the Auracle, an existing research-grade earpiece designed to automatically and unobtrusively recognize eating behavior in adults, for measuring children's eating behavior. We identified and addressed several challenges pertaining to monitoring eating behavior in children, paying particular attention to device fit and comfort. We also improved the accuracy and robustness of the eating-activity detection algorithms. We used this improved prototype in a lab study with a sample of 10 children for 60 total sessions and collected 22.3 hours of data in both meal and snack scenarios. Overall, we achieved an accuracy exceeding 85.0\% and an F1 score exceeding 84.2\% for eating detection with a 3-second resolution, and a 95.5\% accuracy and a 95.7\% F1 score for eating detection with a 1-minute resolution.}, } @Article{bi:ubicomp18, author = {Shengjie Bi and Tao Wang and Nicole Tobias and Josephine Nordrum and Shang Wang and George Halvorsen and Sougata Sen and Ronald Peterson and Kofi Odame and Kelly Caine and Ryan Halter and Jacob Sorber and David Kotz}, title = {{Auracle: Detecting Eating Episodes with an Ear-Mounted Sensor}}, journal = {Proceedings of the ACM on Interactive, Mobile, Wearable and Ubiquitous Technologies (IMWUT) (Ubicomp)}, year = 2018, month = {September}, volume = 2, number = 3, articleno = 92, numpages = 27, publisher = {ACM}, copyright = {ACM}, DOI = {10.1145/3264902}, URL = {https://www.cs.dartmouth.edu/~kotz/research/bi-ubicomp18/index.html}, abstract = {In this paper, we propose Auracle, a wearable earpiece that can automatically recognize eating behavior. More specifically, in free-living conditions, we can recognize when and for how long a person is eating. Using an off-the-shelf contact microphone placed behind the ear, Auracle captures the sound of a person chewing as it passes through the bone and tissue of the head. This audio data is then processed by a custom analog/digital circuit board. To ensure reliable (yet comfortable) contact between microphone and skin, all hardware components are incorporated into a 3D-printed behind-the-head framework. We collected field data with 14 participants for 32 hours in free-living conditions and additional eating data with 10 participants for 2 hours in a laboratory setting. We achieved accuracy exceeding 92.8\% and F1 score exceeding 77.5\% for eating detection. Moreover, Auracle successfully detected 20-24 eating episodes (depending on the metrics) out of 26 in free-living conditions. We demonstrate that our custom device could sense, process, and classify audio data in real time. Additionally, we estimate Auracle can last 28.1 hours with a 110 mAh battery while communicating its observations of eating behavior to a smartphone over Bluetooth.}, } @InProceedings{bi:mobisys17, author = {Shengjie Bi and Ellen Davenport and Jun Gong and Ronald Peterson and Kevin Storer and Tao Wang and Kelly Caine and Ryan Halter and David Kotz and Kofi Odame and Jacob Sorber and Xing-Dong Yang}, title = {{Poster: Auracle --- A Wearable Device for Detecting and Monitoring Eating Behavior}}, booktitle = {{Proceedings of the ACM International Conference on Mobile Systems, Applications, and Services (MobiSys)}}, year = 2017, month = {June}, pages = 176, publisher = {ACM}, copyright = {ACM}, DOI = {10.1145/3081333.3089320}, URL = {https://www.cs.dartmouth.edu/~kotz/research/bi-mobisys17/index.html}, abstract = {The Auracle aims to be a wearable earpiece that detects eating behavior, to be fielded by health-science researchers in their efforts to study eating behavior and ultimately to develop interventions useful to individuals striving to address chronic disease related to eating.}, } @InProceedings{bi:wearsys17, author = {Shengjie Bi and Tao Wang and Ellen Davenport and Ronald Peterson and Ryan Halter and Jacob Sorber and David Kotz}, title = {{Toward a Wearable Sensor for Eating Detection}}, booktitle = {{Proceedings of the ACM Workshop on Wearable Systems and Applications (WearSys)}}, year = 2017, month = {June}, pages = {17--22}, publisher = {ACM}, copyright = {ACM}, DOI = {10.1145/3089351.3089355}, URL = {https://www.cs.dartmouth.edu/~kotz/research/bi-wearsys17/index.html}, abstract = {Researchers strive to understand eating behavior as a means to develop diets and interventions that can help people achieve and maintain a healthy weight, recover from eating disorders, or manage their diet and nutrition for personal wellness. A major challenge for eating-behavior research is to understand when, where, what, and how people eat. In this paper, we evaluate sensors and algorithms designed to detect eating activities, more specifically, when people eat. We compare two popular methods for eating recognition (based on acoustic and electromyography (EMG) sensors) individually and combined. We built a data-acquisition system using two off-the-shelf sensors and conducted a study with 20 participants. Our preliminary results show that the system we implemented can detect eating with an accuracy exceeding 90.9\% while the crunchiness level of food varies. We are developing a wearable system that can capture, process, and classify sensor data to detect eating in real-time.}, }