{"package":"akto-llm-guard","ecosystem":"pypi","exists":true,"latest_version":"0.3.17","repository":"","license":"The MIT License (MIT)\n        \n        Copyright (c) Akto. All rights reserved.\n        \n        Per","description":"LLM-Guard is a comprehensive tool designed to fortify the security of Large Language Models (LLMs). By offering sanitization, detection of harmful language, prevention of data leakage, and resistance against prompt injection attacks, LLM-Guard ensures that your interactions with LLMs remain safe and secure.","downloads_weekly":125,"deprecated":false,"health":{"score":43},"_cache":"db_only_bot","_partial":true,"_response_ms":1,"_powered_by":"depscope.dev — bot fast path (DB-only)","recommendation":{"action":"review"}}