{"payload":{"header_redesign_enabled":false,"results":[{"id":"180572200","archived":false,"color":"#3572A5","followers":2921,"has_funding_file":false,"hl_name":"dbiir/UER-py","hl_trunc_description":"Open Source Pre-training Model Framework in PyTorch & Pre-trained Model Zoo","language":"Python","mirror":false,"owned_by_organization":true,"public":true,"repo":{"repository":{"id":180572200,"name":"UER-py","owner_id":13671736,"owner_login":"dbiir","updated_at":"2024-05-09T11:12:55.984Z","has_issues":true}},"sponsorable":false,"topics":["natural-language-processing","model-zoo","pytorch","classification","bart","chinese","gpt","pegasus","ner","clue","albert","bert","fine-tuning","roberta","elmo","pre-training","gpt-2","t5","unilm","xlm-roberta"],"type":"Public","help_wanted_issues_count":3,"good_first_issue_issues_count":0,"starred_by_current_user":false}],"type":"repositories","page":1,"page_count":1,"elapsed_millis":61,"errors":[],"result_count":1,"facets":[],"protected_org_logins":[],"topics":null,"query_id":"","logged_in":false,"sign_up_path":"/signup?source=code_search_results","sign_in_path":"/login?return_to=https%3A%2F%2Fgithub.com%2Fsearch%3Fq%3Drepo%253Adbiir%252FUER-py%2B%2Blanguage%253APython","metadata":null,"csrf_tokens":{"/dbiir/UER-py/star":{"post":"F8vMpj0I8o9P7l_D_t6qRdVWLnuukpb-AXf0rMrCHYo6UzQVnyI5xBplajzKl2uH8K93k_EL7_yqKa0dpPCZ2A"},"/dbiir/UER-py/unstar":{"post":"g2RZEmc4wiuot8r_Dti4iLP2wYu1pRHc1IHkLVyjJJ0bf7N4umB_mVAw0jY2Wo670u7FlY6URrz3gc4aeWBIaQ"},"/sponsors/batch_deferred_sponsor_buttons":{"post":"qP964AXBq7GggRQA2FPFPPawUhYpe5SWQGMBN43Vc-f8yGqT3zYnUU4iwGBsPQD90BW3oa3JKU1MZ6TkFWNdiA"}}},"title":"Repository search results"}