{"payload":{"pageCount":1,"repositories":[{"type":"Public","name":"highlow_release","owner":"Human-Signals-Lab","isFork":false,"description":"The supplementary information for the Interspeech 2024 paper.","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":0,"forksCount":0,"license":null,"participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-07-23T19:30:36.459Z"}},{"type":"Public","name":"AudioIMU","owner":"Human-Signals-Lab","isFork":false,"description":"","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":5,"forksCount":0,"license":null,"participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-02-05T23:29:39.982Z"}},{"type":"Public","name":"LAPNet-HAR","owner":"Human-Signals-Lab","isFork":false,"description":"","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":3,"forksCount":3,"license":null,"participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2022-12-15T13:56:01.940Z"}},{"type":"Public","name":"Sound-and-Wrist-Motion-for-Activities-of-Daily-Living-with-Smartwatches","owner":"Human-Signals-Lab","isFork":false,"description":"Code for: \"Leveraging Sound and Wrist Motion to Detect Activities of Daily Living with Commodity Smartwatches\"","allTopics":["activity-recognition","inertial-sensors","acoustic-model","smartwatch-data","machine-learning","deep-learning","pytorch"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":6,"forksCount":2,"license":null,"participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2022-10-24T21:19:33.694Z"}},{"type":"Public","name":"Eating-Detection-Wrist-Inertial","owner":"Human-Signals-Lab","isFork":false,"description":"Code and data for paper \"A Practical Approach for Recognizing Eating Moments with Wrist-Mounted Inertial Sensing\" from Ubicomp 2015","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":0,"forksCount":0,"license":null,"participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2022-08-02T15:41:15.127Z"}},{"type":"Public","name":"Acoustic-Activity-Recognition-Bounded-by-Conversational-Assistant-Interactions","owner":"Human-Signals-Lab","isFork":false,"description":"Code for \"Ok Google, What Am I Doing? Acoustic Activity Recognition Bounded by Conversational Assistant Interactions\"","allTopics":["deep-learning","activity-recognition","acoustic-model","conversational-assistant"],"primaryLanguage":{"name":"G-code","color":"#D08CF2"},"pullRequestCount":0,"issueCount":0,"starsCount":0,"forksCount":0,"license":null,"participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2021-03-31T17:01:23.132Z"}}],"repositoryCount":6,"userInfo":null,"searchable":true,"definitions":[],"typeFilters":[{"id":"all","text":"All"},{"id":"public","text":"Public"},{"id":"source","text":"Sources"},{"id":"fork","text":"Forks"},{"id":"archived","text":"Archived"},{"id":"template","text":"Templates"}],"compactMode":false},"title":"Human-Signals-Lab repositories"}