{"payload":{"header_redesign_enabled":false,"results":[{"id":"502482803","archived":false,"color":"#3572A5","followers":8752,"has_funding_file":false,"hl_name":"bigscience-workshop/petals","hl_trunc_description":"🌸 Run LLMs at home, BitTorrent-style. Fine-tuning and inference up to 10x faster than offloading","language":"Python","mirror":false,"owned_by_organization":true,"public":true,"repo":{"repository":{"id":502482803,"name":"petals","owner_id":82455566,"owner_login":"bigscience-workshop","updated_at":"2024-04-29T20:13:42.990Z","has_issues":true}},"sponsorable":false,"topics":["nlp","bloom","distributed-systems","machine-learning","deep-learning","chatbot","pytorch","falcon","transformer","neural-networks","llama","gpt","pretrained-models","language-models","volunteer-computing","pipeline-parallelism","guanaco","tensor-parallelism","large-language-models","llama2"],"type":"Public","help_wanted_issues_count":9,"good_first_issue_issues_count":5,"starred_by_current_user":false}],"type":"repositories","page":1,"page_count":1,"elapsed_millis":61,"errors":[],"result_count":1,"facets":[],"protected_org_logins":[],"topics":null,"query_id":"","logged_in":false,"sign_up_path":"/signup?source=code_search_results","sign_in_path":"/login?return_to=https%3A%2F%2Fgithub.com%2Fsearch%3Fq%3Drepo%253Abigscience-workshop%252Fpetals%2B%2Blanguage%253APython","metadata":null,"csrf_tokens":{"/bigscience-workshop/petals/star":{"post":"WQ5u8I5ueEUWVr4pfBTrGuOEGaolJ8jjDHxwAg60y7aIbSNixgWIzDzfeHNf8uBC9tOWnWZGHmtZeM3-9O2Uzg"},"/bigscience-workshop/petals/unstar":{"post":"9ie9nIvTGKT-DNjnlEU9IsTOIkpUf18ZW9BBDz7S_QCcX-awjQC3cpSmRDcudgPPgf9OD8g7nC3sWD5VZAAFDQ"},"/sponsors/batch_deferred_sponsor_buttons":{"post":"NNncVy724QndLxuAc1o_4g0EMwzl9cbLW9vZsaNkYEQfNV3PrBj5418F3_9Cpvzz8pMosNn5agAeeOFOC-QgMg"}}},"title":"Repository search results"}