{"payload":{"pageCount":1,"repositories":[{"type":"Public","name":"pytorch-lightning","owner":"Lightning-AI","isFork":false,"description":"Pretrain, finetune and deploy AI models on multiple GPUs, TPUs with zero code changes.","allTopics":["python","data-science","machine-learning","ai","deep-learning","pytorch","artificial-intelligence"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":55,"issueCount":762,"starsCount":27259,"forksCount":3270,"license":"Apache License 2.0","participation":[41,13,33,25,27,18,38,29,36,25,31,17,28,33,24,41,32,11,17,19,39,17,35,18,13,19,19,15,3,17,13,19,16,24,25,20,21,11,9,36,11,3,1,8,2,8,2,4,13,5,16,9],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-06-13T02:27:32.821Z"}},{"type":"Public","name":"litdata","owner":"Lightning-AI","isFork":false,"description":"Streamline data pipelines for AI. Process datasets across 1000s of machines, and optimize data for blazing fast model training.","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":5,"issueCount":31,"starsCount":224,"forksCount":21,"license":"Apache License 2.0","participation":[0,0,0,1,1,0,2,1,0,0,0,0,1,0,2,0,2,2,4,5,15,3,8,1,6,2,2,3,1,1,3,9,7,9,9,21,30,11,7,6,1,11,1,3,4,3,4,4,2,4,6,4],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-06-13T00:40:05.600Z"}},{"type":"Public","name":"LitServe","owner":"Lightning-AI","isFork":false,"description":"Deploy AI models at scale. High-throughput serving engine for AI/ML models that uses the latest state-of-the-art model deployment techniques.","allTopics":["api","ai","serving"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":1,"issueCount":13,"starsCount":115,"forksCount":11,"license":"Apache License 2.0","participation":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,1,0,0,0,0,11,0,0,0,18,0,1,3,3,1,2,5,17,23,9,5,8,16,6,23,6],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-06-13T00:33:19.653Z"}},{"type":"Public","name":"litgpt","owner":"Lightning-AI","isFork":false,"description":"Pretrain, finetune, deploy 20+ LLMs on your own data. Uses state-of-the-art techniques: flash attention, FSDP, 4-bit, LoRA, and more.","allTopics":["ai","deep-learning","artificial-intelligence","large-language-models","llm","llms","llm-inference"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":24,"issueCount":180,"starsCount":7830,"forksCount":786,"license":"Apache License 2.0","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-06-13T05:00:11.296Z"}},{"type":"Public","name":"torchmetrics","owner":"Lightning-AI","isFork":false,"description":"Torchmetrics - Machine learning metrics for distributed, scalable PyTorch applications.","allTopics":["python","data-science","machine-learning","deep-learning","metrics","pytorch","analyses"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":18,"issueCount":74,"starsCount":1989,"forksCount":388,"license":"Apache License 2.0","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-06-12T22:33:38.554Z"}},{"type":"Public","name":"lightning-thunder","owner":"Lightning-AI","isFork":false,"description":"Make PyTorch models up to 40% faster! Thunder is a source to source compiler for PyTorch. It enables using different hardware executors at once; across one or thousands of GPUs.","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":19,"issueCount":133,"starsCount":1056,"forksCount":59,"license":"Apache License 2.0","participation":[4,20,9,7,13,12,18,17,19,31,36,21,44,19,22,26,25,10,10,20,14,34,29,31,56,53,28,10,5,5,29,26,22,38,29,35,54,55,33,32,60,25,9,27,16,7,21,34,12,29,32,26],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-06-13T01:10:49.788Z"}},{"type":"Public","name":"lightning-Habana","owner":"Lightning-AI","isFork":false,"description":"Lightning support for Intel Habana accelerators.","allTopics":["python","machine-learning","deep-learning","pytorch"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":3,"issueCount":1,"starsCount":20,"forksCount":5,"license":"Apache License 2.0","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-06-12T14:46:20.190Z"}},{"type":"Public","name":"utilities","owner":"Lightning-AI","isFork":false,"description":"Common Python utilities and GitHub Actions in Lightning Ecosystem","allTopics":["ci-cd","developer-tools","github-actions"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":3,"issueCount":3,"starsCount":46,"forksCount":13,"license":"Apache License 2.0","participation":[1,4,4,2,4,1,1,1,1,0,0,2,1,11,0,9,7,6,0,2,2,0,6,2,5,2,4,2,0,0,0,1,1,2,1,2,4,5,3,6,10,2,1,1,3,2,2,1,2,2,3,1],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-06-10T20:10:10.868Z"}},{"type":"Public","name":"ecosystem-ci","owner":"Lightning-AI","isFork":false,"description":"Automate issue discovery for your projects against Lightning nightly and releases.","allTopics":["lightweight","deep-learning","integration-testing","ci-cd","compatibility-testing","python"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":9,"issueCount":6,"starsCount":45,"forksCount":14,"license":"Apache License 2.0","participation":[0,0,0,0,0,6,1,1,3,1,1,1,1,1,3,5,1,0,1,1,1,1,1,0,0,1,0,0,2,1,0,0,0,3,0,0,3,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-06-13T00:58:32.037Z"}},{"type":"Public","name":"tutorials","owner":"Lightning-AI","isFork":false,"description":"Collection of Pytorch lightning tutorial form as rich scripts automatically transformed to ipython notebooks.","allTopics":["machine-learning","lightning","deep-learning","jupyter-notebook","tutorials","notebooks","python-scripts"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":27,"issueCount":20,"starsCount":272,"forksCount":74,"license":"Apache License 2.0","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-04-06T19:28:40.590Z"}},{"type":"Public archive","name":"lightning-Graphcore","owner":"Lightning-AI","isFork":false,"description":"","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":6,"issueCount":2,"starsCount":7,"forksCount":4,"license":"Apache License 2.0","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-02-01T08:42:11.222Z"}},{"type":"Public","name":"lit-llama","owner":"Lightning-AI","isFork":false,"description":"Implementation of the LLaMA language model based on nanoGPT. Supports flash attention, Int8 and GPTQ 4bit quantization, LoRA and LLaMA-Adapter fine-tuning, pre-training. Apache 2.0-licensed.","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":13,"issueCount":107,"starsCount":5860,"forksCount":505,"license":"Apache License 2.0","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-01-05T15:22:04.489Z"}},{"type":"Public","name":"Lightning-multinode-templates","owner":"Lightning-AI","isFork":false,"description":"Multinode templates for Pytorch Lightning","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":1,"issueCount":1,"starsCount":6,"forksCount":1,"license":"Apache License 2.0","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-11-26T22:58:25.778Z"}},{"type":"Public template","name":"deep-learning-project-template","owner":"Lightning-AI","isFork":false,"description":"Pytorch Lightning code guideline for conferences","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":3,"issueCount":11,"starsCount":1216,"forksCount":267,"license":"Apache License 2.0","participation":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-09-20T12:05:43.852Z"}},{"type":"Public","name":"lm-evaluation-harness","owner":"Lightning-AI","isFork":true,"description":"A framework for few-shot evaluation of autoregressive language models.","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":3,"forksCount":1454,"license":"MIT License","participation":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-06-05T15:01:54.210Z"}},{"type":"Public archive","name":"forked-pdb","owner":"Lightning-AI","isFork":false,"description":"Python pdb for multiple processes","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":28,"forksCount":6,"license":"Apache License 2.0","participation":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2022-11-05T16:48:56.029Z"}},{"type":"Public","name":"engineering-class","owner":"Lightning-AI","isFork":false,"description":"Lightning Bits: Engineering for Researchers repo","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":129,"forksCount":13,"license":null,"participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2022-08-29T19:32:52.674Z"}}],"repositoryCount":17,"userInfo":null,"searchable":true,"definitions":[],"typeFilters":[{"id":"all","text":"All"},{"id":"public","text":"Public"},{"id":"source","text":"Sources"},{"id":"fork","text":"Forks"},{"id":"archived","text":"Archived"},{"id":"template","text":"Templates"}],"compactMode":false},"title":"Repositories"}