{"package":"flash-attn-layer-norm","ecosystem":"conda","latest_version":"2.8.3","description":"Flash Attention: Fast and Memory-Efficient Exact Attention","license":"BSD-3-Clause","license_risk":"permissive","commercial_use_notes":"Permissive: commercial closed-source use OK; preserve the copyright notice.","homepage":"https://github.com/Dao-AILab/flash-attention","repository":"","downloads_weekly":1258,"health":{"score":59,"risk":"high","breakdown":{"maintenance":20,"popularity":6,"security":25,"maturity":6,"community":2},"deprecated":false,"max_score":100},"vulnerabilities":{"count":0,"critical":0,"high":0,"medium":0,"low":0,"details":[]},"versions":{"latest":"2.8.3","total_count":6,"recent":["2.6.3","2.7.0.post2","2.7.2","2.7.4","2.8.2","2.8.3"]},"metadata":{"deprecated":false,"deprecated_message":null,"maintainers_count":1,"first_published":"2024-11-19 00:22:28.802000+00:00","last_published":"2026-02-04 04:43:37.042000+00:00","dependencies_count":0,"dependencies":[]},"github_stats":null,"bundle":null,"typescript":null,"known_issues":{"bugs_count":0,"bugs_severity":{},"status_breakdown":{},"link":null,"scope":"none"},"historical_compromise":null,"recommendation":{"action":"use_with_caution","issues":["Moderate health score (59/100) — verify manually"],"use_version":"2.8.3","version_hint":null,"summary":"flash-attn-layer-norm@2.8.3 low health (59/100) — consider alternatives"},"version_scoped":null,"requested_version":null,"_cache":"miss","_response_ms":494,"_powered_by":"depscope.dev — free package intelligence for AI agents","typosquat":{"is_suspected":false},"maintainer_trust":{"available":false},"malicious":{"is_malicious":false},"scorecard":{"available":false},"quality":{"available":false},"version_history_summary":{"total_versions":6,"first_release_age_days":529,"last_release_days_ago":87,"avg_days_between_releases":106,"release_velocity":"active"}}