{"payload":{"header_redesign_enabled":false,"results":[{"id":"698174643","archived":false,"color":"#3572A5","followers":88,"has_funding_file":false,"hl_name":"PKU-YuanGroup/Hallucination-Attack","hl_trunc_description":"Attack to induce LLMs within hallucinations","language":"Python","mirror":false,"owned_by_organization":true,"public":true,"repo":{"repository":{"id":698174643,"name":"Hallucination-Attack","owner_id":135824553,"owner_login":"PKU-YuanGroup","updated_at":"2024-05-17T08:48:15.526Z","has_issues":true}},"sponsorable":false,"topics":["nlp","machine-learning","deep-learning","ai-safety","adversarial-attacks","hallucinations","llm","llm-safety"],"type":"Public","help_wanted_issues_count":0,"good_first_issue_issues_count":0,"starred_by_current_user":false}],"type":"repositories","page":1,"page_count":1,"elapsed_millis":61,"errors":[],"result_count":1,"facets":[],"protected_org_logins":[],"topics":null,"query_id":"","logged_in":false,"sign_up_path":"/signup?source=code_search_results","sign_in_path":"/login?return_to=https%3A%2F%2Fgithub.com%2Fsearch%3Fq%3Drepo%253APKU-YuanGroup%252FHallucination-Attack%2B%2Blanguage%253APython","metadata":null,"csrf_tokens":{"/PKU-YuanGroup/Hallucination-Attack/star":{"post":"WzqMgEfGwXyHV_PTAfoRE5rWTAm22bQj35O44Hs8LOd2okK8H41QhrX-wg6gfXc2dUj8e-ojAyosNJZ6vuxkxg"},"/PKU-YuanGroup/Hallucination-Attack/unstar":{"post":"I5Rxg82VcMVamT3WHHgHYZL7F8w2pEjYYuDQn62v0kWxwdf9hA7vZfILrs5m6E_4lE3IoDh843BYDkdJGp9ZyA"},"/sponsors/batch_deferred_sponsor_buttons":{"post":"KHrg80380IX5VfFi83HBGtKJSdqD68ECyRtJaGpQElXJHicXw-0YP9ruD6kqpadgZDmuxb-ogdVVQaPh7YBD-Q"}}},"title":"Repository search results"}