{"package":"AIBias","ecosystem":"cran","latest_version":"0.1.1","description":"Longitudinal Bias Auditing for Sequential Decision Systems. Provides tools for detecting, quantifying, and visualizing algorithmic bias as a longitudinal process in repeated decision systems. Existing fairness metrics treat bias as a single-period snapshot; this package operationalizes the view that bias in sequential systems must be measured over time. Impl","license":"MIT + file LICENSE","license_risk":"unknown","commercial_use_notes":"verify manually — license not parseable / not declared.","homepage":"https://github.com/causalfragility-lab/AIBias","repository":"https://github.com/causalfragility-lab/AIBias","downloads_weekly":153,"health":{"score":55,"risk":"high","breakdown":{"maintenance":25,"popularity":3,"security":25,"maturity":0,"community":2},"deprecated":false,"max_score":100},"vulnerabilities":{"count":0,"critical":0,"high":0,"medium":0,"low":0,"details":[]},"versions":{"latest":"0.1.1","total_count":0,"recent":[]},"metadata":{"deprecated":false,"deprecated_message":null,"maintainers_count":1,"first_published":"2026-04-06 23:08:14","last_published":"2026-04-06T21:30:16+00:00","dependencies_count":7,"dependencies":["dplyr","tidyr","ggplot2","rlang","cli","purrr","tibble"]},"github_stats":null,"bundle":null,"typescript":null,"known_issues":{"bugs_count":0,"bugs_severity":{},"status_breakdown":{},"link":null,"scope":"none"},"historical_compromise":null,"recommendation":{"action":"safe_to_use","issues":[],"use_version":"0.1.1","version_hint":null,"summary":"AIBias@0.1.1 is safe to use (health: 55/100)"},"version_scoped":null,"requested_version":null,"_cache":"miss","_response_ms":475,"_powered_by":"depscope.dev — free package intelligence for AI agents","typosquat":{"is_suspected":false},"maintainer_trust":{"available":false},"malicious":{"is_malicious":false},"scorecard":{"available":false},"quality":{"available":false},"co_used_with":[{"package":"attempt","occurrences":3}]}