{"package":"pyspark","ecosystem":"pypi","latest_version":"4.1.1","description":"Apache Spark Python API","license":"Apache-2.0","homepage":"https://github.com/apache/spark/tree/master/python","repository":"https://github.com/apache/spark/tree/master/python","downloads_weekly":0,"health":{"score":57,"risk":"high","breakdown":{"maintenance":15,"popularity":0,"security":25,"maturity":15,"community":2},"deprecated":false,"max_score":100},"vulnerabilities":{"count":0,"critical":0,"high":0,"medium":0,"low":0,"details":[]},"versions":{"latest":"4.1.1","total_count":67,"recent":["3.5.4","3.5.5","3.5.6","3.5.7","3.5.8","4.0.0.dev1","4.0.0.dev2","4.0.0","4.0.1","4.0.2","4.1.0.dev1","4.1.0.dev2","4.1.0.dev3","4.1.0.dev4","4.1.0","4.1.1","4.2.0.dev1","4.2.0.dev2","4.2.0.dev3","4.2.0.dev4"]},"metadata":{"deprecated":false,"deprecated_message":null,"maintainers_count":1,"first_published":null,"last_published":"2026-01-09T09:38:38.280630Z","dependencies_count":24,"dependencies":["py4j<0.10.9.10,>=0.10.9.7","numpy>=1.21; extra == \"ml\"","numpy>=1.21; extra == \"mllib\"","pandas>=2.2.0; extra == \"sql\"","pyarrow>=15.0.0; extra == \"sql\"","numpy>=1.21; extra == \"sql\"","pandas>=2.2.0; extra == \"pandas-on-spark\"","pyarrow>=15.0.0; extra == \"pandas-on-spark\"","numpy>=1.21; extra == \"pandas-on-spark\"","pandas>=2.2.0; extra == \"connect\"","pyarrow>=15.0.0; extra == \"connect\"","grpcio>=1.76.0; extra == \"connect\"","grpcio-status>=1.76.0; extra == \"connect\"","googleapis-common-protos>=1.71.0; extra == \"connect\"","zstandard>=0.25.0; extra == \"connect\"","numpy>=1.21; extra == \"connect\"","pandas>=2.2.0; extra == \"pipelines\"","pyarrow>=15.0.0; extra == \"pipelines\"","numpy>=1.21; extra == \"pipelines\"","grpcio>=1.76.0; extra == \"pipelines\"","grpcio-status>=1.76.0; extra == \"pipelines\"","googleapis-common-protos>=1.71.0; extra == \"pipelines\"","zstandard>=0.25.0; extra == \"pipelines\"","pyyaml>=3.11; extra == \"pipelines\""]},"recommendation":{"action":"safe_to_use","issues":[],"use_version":"4.1.1","version_hint":null,"summary":"pyspark@4.1.1 is safe to use (health: 57/100)"},"requested_version":null,"_cache":"miss","_response_ms":351,"_powered_by":"depscope.dev — free package intelligence for AI agents"}