{"package":"llm-dialog-manager","ecosystem":"pypi","latest_version":"0.5.3","description":"A Python package for managing LLM chat conversation history","license":"MIT","license_risk":"permissive","commercial_use_notes":"Permissive: commercial closed-source use OK; preserve the copyright notice.","homepage":"https://pypi.org/project/llm-dialog-manager/","repository":"","downloads_weekly":5185,"health":{"score":56,"risk":"high","breakdown":{"maintenance":10,"popularity":6,"security":25,"maturity":15,"community":0},"deprecated":false,"max_score":100},"vulnerabilities":{"count":0,"critical":0,"high":0,"medium":0,"low":0,"details":[]},"versions":{"latest":"0.5.3","total_count":2496,"recent":["0.1.2480","0.2.1","0.2.2","0.2.4","0.2.6","0.2.7","0.2.14","0.3.1","0.3.2","0.3.4","0.3.5","0.4.1","0.4.2","0.4.3","0.4.4","0.4.5","0.4.6","0.4.7","0.5.0","0.5.3"]},"metadata":{"deprecated":false,"deprecated_message":null,"maintainers_count":0,"first_published":null,"last_published":"2025-05-17T17:52:12.250011Z","dependencies_count":21,"dependencies":["openai>=1.54.2","anthropic>=0.39.0","google-generativeai>=0.1.0","python-dotenv>=1.0.0","typing-extensions>=4.0.0","uuid>=1.30","pytest>=8.0.0; extra == \"dev\"","pytest-asyncio>=0.21.1; extra == \"dev\"","pytest-cov>=4.1.0; extra == \"dev\"","black>=23.9.1; extra == \"dev\"","isort>=5.12.0; extra == \"dev\"","pytest>=6.0; extra == \"test\"","pytest-asyncio>=0.14.0; extra == \"test\"","pytest-cov>=2.0; extra == \"test\"","black>=22.0; extra == \"lint\"","isort>=5.0; extra == \"lint\"","pytest>=8.0.0; extra == \"all\"","pytest-asyncio>=0.21.1; extra == \"all\"","pytest-cov>=4.1.0; extra == \"all\"","black>=23.9.1; extra == \"all\"","isort>=5.12.0; extra == \"all\""]},"github_stats":null,"bundle":null,"typescript":null,"known_issues":{"bugs_count":0,"bugs_severity":{},"status_breakdown":{},"link":null,"scope":"none"},"historical_compromise":null,"recommendation":{"action":"use_with_caution","issues":["Moderate health score (56/100) — verify manually"],"use_version":"0.5.3","version_hint":null,"summary":"llm-dialog-manager@0.5.3 low health (56/100) — consider alternatives"},"version_scoped":null,"_meta":{"endpoint":"check","tier":"full","philosophy":"DepScope is free. Use the cheapest endpoint that answers your real question.","cheaper_alternatives":[{"endpoint":"/api/exists/pypi/llm-dialog-manager","tokens_estimated":12,"use_when":"you only need to know if the package exists (hallucination guard)"},{"endpoint":"/api/health/pypi/llm-dialog-manager","tokens_estimated":80,"use_when":"you only need a 0-100 score for go/no-go (>=70 = safe)"},{"endpoint":"/api/prompt/pypi/llm-dialog-manager","tokens_estimated":280,"use_when":"you want a plain-text LLM-friendly brief instead of JSON"},{"endpoint":"POST /api/check_bulk","tokens_estimated":60,"use_when":"you have 5+ packages to check; sends one round-trip instead of N"}],"docs":"https://depscope.dev/integrate","hint_bulk":"You've called /api/check 98 times in 60s. Save bandwidth + tokens with POST /api/check_bulk (1 round-trip for N pkgs)."},"requested_version":null,"_cache":"hit","_response_ms":0,"_powered_by":"depscope.dev — free package intelligence for AI agents","typosquat":{"is_suspected":false},"maintainer_trust":{"available":false},"malicious":{"is_malicious":false},"scorecard":{"available":false},"quality":{"available":false},"version_history_summary":{"total_versions":20,"first_release_age_days":null,"last_release_days_ago":351,"avg_days_between_releases":null,"release_velocity":"moderate"}}