{"payload":{"header_redesign_enabled":false,"results":[{"id":"465794584","archived":false,"color":"#DA5B0B","followers":710,"has_funding_file":false,"hl_name":"Denis2054/Transformers-for-NLP-2nd-Edition","hl_trunc_description":"Transformer models from BERT to GPT-4, environments from Hugging Face to OpenAI. Fine-tuning, training, and prompt engineering examples. …","language":"Jupyter Notebook","mirror":false,"owned_by_organization":false,"public":true,"repo":{"repository":{"id":465794584,"name":"Transformers-for-NLP-2nd-Edition","owner_id":30811222,"owner_login":"Denis2054","updated_at":"2024-01-04T09:54:36.111Z","has_issues":true}},"sponsorable":false,"topics":["python","nlp","machine-learning","natural-language-processing","deep-learning","transformers","pytorch","openai","bert","trax","huggingface-transformers","roberta-model","dall-e","gpt-4","chatgpt","dall-e-api","chatgpt-api","gpt-3-5-turbo","gpt-4-api"],"type":"Public","help_wanted_issues_count":0,"good_first_issue_issues_count":0,"starred_by_current_user":false}],"type":"repositories","page":1,"page_count":1,"elapsed_millis":91,"errors":[],"result_count":1,"facets":[],"protected_org_logins":[],"topics":null,"query_id":"","logged_in":false,"sign_up_path":"/signup?source=code_search_results","sign_in_path":"/login?return_to=https%3A%2F%2Fgithub.com%2Fsearch%3Fq%3Drepo%253ADenis2054%252FTransformers-for-NLP-2nd-Edition%2B%2Blanguage%253A%2522Jupyter%2BNotebook%2522","metadata":null,"csrf_tokens":{"/Denis2054/Transformers-for-NLP-2nd-Edition/star":{"post":"MrDl89rG8JneZ5RdrygnRvIHEPwVYEVWEujG089ua3F1xkTynVFwAQxUggQ4iK0XHBguFcVmHsDER6lAC-9lqA"},"/Denis2054/Transformers-for-NLP-2nd-Edition/unstar":{"post":"7pxN9B-3ASNIKhZn0YezAJVOnEY2Z5DPPIOkUtuOdwse_4WyJ7Czo0rQ8DuNDYHkETWGno1IbAzUnBaCEwSrTQ"},"/sponsors/batch_deferred_sponsor_buttons":{"post":"3zwtxriY2qp6LAc_HKExv14sd5iY-r837QGGhW_yBMmVmKT2vyp7TYgc3b_UfKghaHfHzqG8Xb48lYGNzrKOTA"}}},"title":"Repository search results"}