@article {10.3844/jcssp.2025.704.712, article_type = {journal}, title = {Feature Extraction of Sit-to-Stand Transition Using Vision-Based Pose Estimation}, author = {Rizal, Achmad and Erfianto, Bayu and Hadiyoso, Sugondo and Istiqomah,}, volume = {21}, number = {3}, year = {2025}, month = {Feb}, pages = {704-712}, doi = {10.3844/jcssp.2025.704.712}, url = {https://thescipub.com/abstract/jcssp.2025.704.712}, abstract = {Sit-to-Stand Transition (STS) and vice versa are important indicators of independence and physical health. For normal people, this movement is easy to do but can be difficult for people who have health problems, such as those who have had an injury or stroke. Sit-to-stand assessments can be used to detect spinal disorders. However, spinal kinematic measurements require special instrumentation and are expensive. The use of Artificial Intelligence (AI) in pose estimation plays an important role in the assessment or detection of a person's movements. Video-based assessment can be an alternative method that is inexpensive and easy to use. Therefore, this preliminary study proposes a sit-to-stand motion extraction method that uses vision-based pose estimation. Time trends and sit-to-stand trajectories were then calculated for the two videos with different subjects. The results show different trends in both time and trajectory. From this study, it is known that STS time and trajectory are potential features for abnormal detection. This proposed method is expected to be utilized in the medical field to detect movement abnormalities, especially sit-to-stand movements.}, journal = {Journal of Computer Science}, publisher = {Science Publications} }