{"package":"bit-ttt-engine","ecosystem":"pypi","latest_version":"0.8.0","description":"Fast local LLM inference with TTT (Test-Time Training) and LoRA — the model that learns while it runs","license":"MIT","license_risk":"permissive","commercial_use_notes":"Permissive: commercial closed-source use OK; preserve the copyright notice.","homepage":"https://github.com/imonoonoko/Bit-TTT-Engine","repository":"https://github.com/imonoonoko/Bit-TTT-Engine","downloads_weekly":3,"health":{"score":50,"risk":"high","breakdown":{"maintenance":20,"popularity":0,"security":25,"maturity":3,"community":2},"deprecated":false,"max_score":100},"vulnerabilities":{"count":0,"critical":0,"high":0,"medium":0,"low":0,"details":[]},"versions":{"latest":"0.8.0","total_count":5,"recent":["0.6.0","0.6.1","0.6.2","0.7.0","0.8.0"]},"metadata":{"deprecated":false,"deprecated_message":null,"maintainers_count":1,"first_published":null,"last_published":"2026-02-06T11:01:39.335422Z","dependencies_count":6,"dependencies":["tokenizers>=0.19","huggingface-hub>=0.20","bit-ttt-engine[server]; extra == \"all\"","fastapi>=0.100; extra == \"server\"","uvicorn>=0.20; extra == \"server\"","sse-starlette>=1.6; extra == \"server\""]},"github_stats":null,"bundle":null,"typescript":null,"known_issues":{"bugs_count":0,"bugs_severity":{},"status_breakdown":{},"link":null,"scope":"none"},"historical_compromise":null,"recommendation":{"action":"safe_to_use","issues":[],"use_version":"0.8.0","version_hint":null,"summary":"bit-ttt-engine@0.8.0 is safe to use (health: 50/100)"},"version_scoped":null,"requested_version":null,"_cache":"miss","_response_ms":797,"_powered_by":"depscope.dev — free package intelligence for AI agents","typosquat":{"is_suspected":false},"maintainer_trust":{"available":false},"malicious":{"is_malicious":false},"scorecard":{"available":false},"quality":{"available":false},"version_history_summary":{"total_versions":5,"first_release_age_days":null,"last_release_days_ago":84,"avg_days_between_releases":null,"release_velocity":"active"}}