{"package":"any-llm-sdk","ecosystem":"pypi","latest_version":"1.13.0","description":null,"license":"Apache-2.0","homepage":"https://pypi.org/project/any-llm-sdk/","repository":"https://github.com/mozilla-ai/any-llm","downloads_weekly":74478,"health":{"score":75,"risk":"moderate","breakdown":{"maintenance":25,"popularity":10,"security":25,"maturity":15,"community":0},"deprecated":false,"max_score":100},"vulnerabilities":{"count":0,"critical":0,"high":0,"medium":0,"low":0,"details":[]},"versions":{"latest":"1.13.0","total_count":66,"recent":["1.4.2","1.4.3","1.5.0","1.6.0","1.6.1","1.6.2","1.7.0","1.8.0","1.8.1","1.8.2","1.8.3","1.8.4","1.8.5","1.8.6","1.9.0","1.10.0","1.11.0","1.11.1","1.12.1","1.13.0"]},"metadata":{"deprecated":false,"deprecated_message":null,"maintainers_count":0,"first_published":null,"last_published":"2026-03-23T10:27:28.306341Z","dependencies_count":38,"dependencies":["pydantic<3,>2","openai>=1.99.3","openresponses-types>=2.3.0.post1","anthropic>=0.83.0","rich","httpx","typing_extensions>=4.4.0","any-llm-sdk[anthropic,azure,azureanthropic,azureopenai,bedrock,cerebras,cohere,databricks,deepseek,fireworks,gateway,gemini,groq,huggingface,inception,llama,llamacpp,llamafile,lmstudio,minimax,mistral,moonshot,mzai,nebius,ollama,openai,openrouter,perplexity,platform,portkey,sagemaker,sambanova,together,vertexai,vertexaianthropic,vllm,voyage,watsonx,xai,zai]; extra == \"all\"","any-llm-platform-client>=0.3.0; extra == \"platform\"","opentelemetry-sdk>=1.40.0; extra == \"platform\"","opentelemetry-exporter-otlp-proto-http>=1.40.0; extra == \"platform\"","mistralai>=1.9.3; extra == \"mistral\"","google-genai; extra == \"gemini\"","google-genai; extra == \"vertexai\"","anthropic[vertex]>=0.83.0; extra == \"vertexaianthropic\"","anthropic>=0.83.0; extra == \"azureanthropic\"","huggingface-hub; extra == \"huggingface\"","cohere; extra == \"cohere\"","cerebras_cloud_sdk>=1.23.0; extra == \"cerebras\"","groq; extra == \"groq\"","boto3; extra == \"bedrock\"","azure-ai-inference; extra == \"azure\"","ibm-watsonx-ai; python_version < \"3.14\" and extra == \"watsonx\"","together>=1.5.34; extra == \"together\"","ollama>=0.5.1; extra == \"ollama\"","voyageai; python_version < \"3.14\" and extra == \"voyage\"","xai-sdk>=1.0.1; extra == \"xai\"","boto3; extra == \"sagemaker\"","fastapi>=0.115.0; extra == \"gateway\"","uvicorn[standard]>=0.30.0; extra == \"gateway\"","sqlalchemy>=2.0.0; extra == \"gateway\"","alembic>=1.13.0; extra == \"gateway\"","pydantic-settings>=2.0.0; extra == \"gateway\"","pyyaml>=6.0; extra == \"gateway\"","click>=8.1.0; extra == \"gateway\"","python-dotenv>=1.0.0; extra == \"gateway\"","psycopg2-binary>=2.9.9; extra == \"gateway\"","prometheus-client>=0.20.0; extra == \"gateway\""]},"recommendation":{"action":"safe_to_use","issues":[],"use_version":"1.13.0","version_hint":null,"summary":"any-llm-sdk@1.13.0 is safe to use (health: 75/100)"},"requested_version":null,"_cache":"miss","_response_ms":459,"_powered_by":"depscope.dev — free package intelligence for AI agents"}