{"package":"arbis-llmwrap","ecosystem":"pypi","latest_version":"0.3.7","description":"Decorator to wrap LLM calls for production use with flexible prompt binding.","license":"MIT","license_risk":"permissive","commercial_use_notes":"Permissive: commercial closed-source use OK; preserve the copyright notice.","homepage":"https://pypi.org/project/arbis-llmwrap/","repository":"","downloads_weekly":302,"health":{"score":59,"risk":"high","breakdown":{"maintenance":25,"popularity":3,"security":25,"maturity":6,"community":0},"deprecated":false,"max_score":100},"vulnerabilities":{"count":0,"critical":0,"high":0,"medium":0,"low":0,"details":[]},"versions":{"latest":"0.3.7","total_count":7,"recent":["0.3.1","0.3.2","0.3.3","0.3.4","0.3.5","0.3.6","0.3.7"]},"metadata":{"deprecated":false,"deprecated_message":null,"maintainers_count":0,"first_published":null,"last_published":"2026-04-08T02:23:07.546928Z","dependencies_count":6,"dependencies":["requests>=2.31","cryptography>=42.0","cython>=3.0; extra == \"dev\"","wheel; extra == \"dev\"","openai<2.31.0,>=2.30.0; extra == \"integration\"","jiter<0.14.0,>=0.13.0; extra == \"integration\""]},"github_stats":null,"bundle":null,"typescript":null,"known_issues":{"bugs_count":0,"bugs_severity":{},"status_breakdown":{},"link":null,"scope":"none"},"historical_compromise":null,"recommendation":{"action":"use_with_caution","issues":["Moderate health score (59/100) — verify manually"],"use_version":"0.3.7","version_hint":null,"summary":"arbis-llmwrap@0.3.7 low health (59/100) — consider alternatives"},"version_scoped":null,"_meta":{"endpoint":"check","tier":"full","philosophy":"DepScope is free. Use the cheapest endpoint that answers your real question.","cheaper_alternatives":[{"endpoint":"/api/exists/pypi/arbis-llmwrap","tokens_estimated":12,"use_when":"you only need to know if the package exists (hallucination guard)"},{"endpoint":"/api/health/pypi/arbis-llmwrap","tokens_estimated":80,"use_when":"you only need a 0-100 score for go/no-go (>=70 = safe)"},{"endpoint":"/api/prompt/pypi/arbis-llmwrap","tokens_estimated":280,"use_when":"you want a plain-text LLM-friendly brief instead of JSON"},{"endpoint":"POST /api/check_bulk","tokens_estimated":60,"use_when":"you have 5+ packages to check; sends one round-trip instead of N"}],"docs":"https://depscope.dev/integrate","hint_bulk":"You've called /api/check 54 times in 60s. Save bandwidth + tokens with POST /api/check_bulk (1 round-trip for N pkgs)."},"requested_version":null,"_cache":"miss","_response_ms":643,"_powered_by":"depscope.dev — free package intelligence for AI agents","typosquat":{"is_suspected":false},"maintainer_trust":{"available":false},"malicious":{"is_malicious":false},"scorecard":{"available":false},"quality":{"available":false},"version_history_summary":{"total_versions":7,"first_release_age_days":null,"last_release_days_ago":25,"avg_days_between_releases":null,"release_velocity":"active"}}