{"payload":{"pageCount":1,"repositories":[{"type":"Public","name":"ziplab.github.io","owner":"ziplab","isFork":true,"description":"This is the website project for Zhuang Intelligent Processing Lab","allTopics":[],"primaryLanguage":{"name":"JavaScript","color":"#f1e05a"},"pullRequestCount":0,"issueCount":0,"starsCount":1,"forksCount":41919,"license":"MIT License","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-09-14T01:19:53.477Z"}},{"type":"Public","name":"LongVLM","owner":"ziplab","isFork":false,"description":"","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":1,"issueCount":5,"starsCount":48,"forksCount":3,"license":null,"participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-07-30T15:15:58.338Z"}},{"type":"Public","name":"ZipLLM","owner":"ziplab","isFork":false,"description":"","allTopics":[],"primaryLanguage":null,"pullRequestCount":0,"issueCount":0,"starsCount":1,"forksCount":0,"license":null,"participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-06-27T06:45:41.419Z"}},{"type":"Public","name":"mvsplat","owner":"ziplab","isFork":true,"description":"🌊[arXiv'24] MVSplat: Efficient 3D Gaussian Splatting from Sparse Multi-View Images","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":0,"forksCount":32,"license":"Other","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-03-25T23:59:50.255Z"}},{"type":"Public","name":"PTQD","owner":"ziplab","isFork":false,"description":"The official implementation of PTQD: Accurate Post-Training Quantization for Diffusion Models","allTopics":[],"primaryLanguage":{"name":"Jupyter Notebook","color":"#DA5B0B"},"pullRequestCount":0,"issueCount":7,"starsCount":85,"forksCount":4,"license":null,"participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-03-12T11:58:39.125Z"}},{"type":"Public","name":"QLLM","owner":"ziplab","isFork":false,"description":"[ICLR 2024] This is the official PyTorch implementation of \"QLLM: Accurate and Efficient Low-Bitwidth Quantization for Large Language Models\"","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":1,"starsCount":19,"forksCount":0,"license":"Apache License 2.0","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-03-12T07:00:10.981Z"}},{"type":"Public","name":"MPVSS","owner":"ziplab","isFork":false,"description":"","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":2,"starsCount":25,"forksCount":1,"license":"Other","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-02-29T01:22:24.246Z"}},{"type":"Public","name":"SN-Netv2","owner":"ziplab","isFork":false,"description":"[ECCV 2024] This is the official implementation of \"Stitched ViTs are Flexible Vision Backbones\".","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":22,"forksCount":1,"license":"Apache License 2.0","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-01-23T08:40:57.000Z"}},{"type":"Public","name":"SPViT","owner":"ziplab","isFork":false,"description":"[TPAMI 2024] This is the official repository for our paper: ''Pruning Self-attentions into Convolutional Layers in Single Path''.","allTopics":["model-compression","network-pruning","vision-transformer"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":2,"starsCount":105,"forksCount":14,"license":"Apache License 2.0","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-12-30T01:41:58.050Z"}},{"type":"Public","name":"Stitched_LLaMA","owner":"ziplab","isFork":false,"description":"[CVPR 2024] A framework to fine-tune LLaMAs on instruction-following task and get many Stitched LLaMAs with customized number of parameters, e.g., Stitched LLaMA 8B, 9B, and 10B...","allTopics":[],"primaryLanguage":null,"pullRequestCount":0,"issueCount":1,"starsCount":7,"forksCount":0,"license":null,"participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-12-01T02:04:15.882Z"}},{"type":"Public","name":"LITv2","owner":"ziplab","isFork":false,"description":"[NeurIPS 2022 Spotlight] This is the official PyTorch implementation of \"Fast Vision Transformers with HiLo Attention\"","allTopics":["detection","classification","segmentation","vision-transformer","pytorch"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":2,"starsCount":238,"forksCount":14,"license":"Apache License 2.0","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-11-17T07:04:50.517Z"}},{"type":"Public","name":"SPT","owner":"ziplab","isFork":false,"description":"[ICCV 2023 oral] This is the official repository for our paper: ''Sensitivity-Aware Visual Parameter-Efficient Fine-Tuning''.","allTopics":["adapter","transfer-learning","lora","peft","prompt-tuning","parameter-efficient-fine-tuning"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":5,"starsCount":60,"forksCount":2,"license":"MIT License","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-09-24T06:21:10.648Z"}},{"type":"Public","name":"efficient-stable-diffusion","owner":"ziplab","isFork":false,"description":"","allTopics":[],"primaryLanguage":null,"pullRequestCount":0,"issueCount":0,"starsCount":16,"forksCount":0,"license":null,"participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-09-12T00:43:02.020Z"}},{"type":"Public","name":"SN-Net","owner":"ziplab","isFork":false,"description":"[CVPR 2023 Highlight] This is the official implementation of \"Stitchable Neural Networks\".","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":238,"forksCount":11,"license":"Apache License 2.0","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-03-28T10:06:02.094Z"}},{"type":"Public","name":"FASeg","owner":"ziplab","isFork":false,"description":"[CVPR 2023] This is the official PyTorch implementation for \"Dynamic Focus-aware Positional Queries for Semantic Segmentation\".","allTopics":["semantic-segmentation","positional-encoding","detr"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":1,"starsCount":54,"forksCount":2,"license":"Other","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-03-04T23:42:55.383Z"}},{"type":"Public","name":"EcoFormer","owner":"ziplab","isFork":false,"description":"[NeurIPS 2022 Spotlight] This is the official PyTorch implementation of \"EcoFormer: Energy-Saving Attention with Linear Complexity\"","allTopics":["classification","vision-transformer","efficient-transformers","neurips-2022","pytorch"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":66,"forksCount":1,"license":"Apache License 2.0","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2022-11-15T04:08:04.952Z"}},{"type":"Public","name":"STPT","owner":"ziplab","isFork":false,"description":"","allTopics":[],"primaryLanguage":null,"pullRequestCount":0,"issueCount":0,"starsCount":3,"forksCount":0,"license":null,"participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2022-07-22T01:06:58.671Z"}},{"type":"Public","name":"LIT","owner":"ziplab","isFork":false,"description":"[AAAI 2022] This is the official PyTorch implementation of \"Less is More: Pay Less Attention in Vision Transformers\"","allTopics":["transformers","image-recognition","mlps"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":90,"forksCount":10,"license":"Apache License 2.0","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2022-06-19T03:06:56.440Z"}},{"type":"Public","name":"HVT","owner":"ziplab","isFork":false,"description":"[ICCV 2021] Official implementation of \"Scalable Vision Transformers with Hierarchical Pooling\"","allTopics":["transformers","pytorch"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":30,"forksCount":5,"license":"Apache License 2.0","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2021-12-30T02:50:41.291Z"}},{"type":"Public","name":"Mesa","owner":"ziplab","isFork":false,"description":"This is the official PyTorch implementation for \"Mesa: A Memory-saving Training Framework for Transformers\".","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":1,"starsCount":119,"forksCount":8,"license":"Apache License 2.0","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2021-12-12T06:42:18.694Z"}},{"type":"Public","name":"SAQ","owner":"ziplab","isFork":false,"description":"This is the official PyTorch implementation for \"Sharpness-aware Quantization for Deep Neural Networks\".","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":4,"starsCount":40,"forksCount":7,"license":"Apache License 2.0","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2021-11-25T01:32:41.081Z"}},{"type":"Public","name":"QTool","owner":"ziplab","isFork":false,"description":"Collections of model quantization algorithms. Any issues, please contact Peng Chen (blueardour@gmail.com)","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":5,"starsCount":68,"forksCount":16,"license":"Other","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2021-10-07T07:27:52.365Z"}},{"type":"Public","name":"One-shot-Human-Parsing","owner":"ziplab","isFork":true,"description":"(AAAI 2021) Progressive One-shot Human Parsing","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":0,"forksCount":8,"license":"Apache License 2.0","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2021-06-23T07:07:06.912Z"}}],"repositoryCount":23,"userInfo":null,"searchable":true,"definitions":[],"typeFilters":[{"id":"all","text":"All"},{"id":"public","text":"Public"},{"id":"source","text":"Sources"},{"id":"fork","text":"Forks"},{"id":"archived","text":"Archived"},{"id":"template","text":"Templates"}],"compactMode":false},"title":"ziplab repositories"}