Path: blob/master/drivers/android/binder/range_alloc/mod.rs
29537 views
// SPDX-License-Identifier: GPL-2.012// Copyright (C) 2025 Google LLC.34use kernel::{page::PAGE_SIZE, prelude::*, seq_file::SeqFile, task::Pid};56mod tree;7use self::tree::{FromArrayAllocs, ReserveNewTreeAlloc, TreeRangeAllocator};89mod array;10use self::array::{ArrayRangeAllocator, EmptyArrayAlloc};1112enum DescriptorState<T> {13Reserved(Reservation),14Allocated(Allocation<T>),15}1617impl<T> DescriptorState<T> {18fn new(is_oneway: bool, debug_id: usize, pid: Pid) -> Self {19DescriptorState::Reserved(Reservation {20debug_id,21is_oneway,22pid,23})24}2526fn pid(&self) -> Pid {27match self {28DescriptorState::Reserved(inner) => inner.pid,29DescriptorState::Allocated(inner) => inner.reservation.pid,30}31}3233fn is_oneway(&self) -> bool {34match self {35DescriptorState::Reserved(inner) => inner.is_oneway,36DescriptorState::Allocated(inner) => inner.reservation.is_oneway,37}38}39}4041#[derive(Clone)]42struct Reservation {43debug_id: usize,44is_oneway: bool,45pid: Pid,46}4748impl Reservation {49fn allocate<T>(self, data: Option<T>) -> Allocation<T> {50Allocation {51data,52reservation: self,53}54}55}5657struct Allocation<T> {58reservation: Reservation,59data: Option<T>,60}6162impl<T> Allocation<T> {63fn deallocate(self) -> (Reservation, Option<T>) {64(self.reservation, self.data)65}6667fn debug_id(&self) -> usize {68self.reservation.debug_id69}7071fn take(&mut self) -> Option<T> {72self.data.take()73}74}7576/// The array implementation must switch to the tree if it wants to go beyond this number of77/// ranges.78const TREE_THRESHOLD: usize = 8;7980/// Represents a range of pages that have just become completely free.81#[derive(Copy, Clone)]82pub(crate) struct FreedRange {83pub(crate) start_page_idx: usize,84pub(crate) end_page_idx: usize,85}8687impl FreedRange {88fn interior_pages(offset: usize, size: usize) -> FreedRange {89FreedRange {90// Divide round up91start_page_idx: offset.div_ceil(PAGE_SIZE),92// Divide round down93end_page_idx: (offset + size) / PAGE_SIZE,94}95}96}9798struct Range<T> {99offset: usize,100size: usize,101state: DescriptorState<T>,102}103104impl<T> Range<T> {105fn endpoint(&self) -> usize {106self.offset + self.size107}108}109110pub(crate) struct RangeAllocator<T> {111inner: Impl<T>,112}113114enum Impl<T> {115Empty(usize),116Array(ArrayRangeAllocator<T>),117Tree(TreeRangeAllocator<T>),118}119120impl<T> RangeAllocator<T> {121pub(crate) fn new(size: usize) -> Self {122Self {123inner: Impl::Empty(size),124}125}126127pub(crate) fn free_oneway_space(&self) -> usize {128match &self.inner {129Impl::Empty(size) => size / 2,130Impl::Array(array) => array.free_oneway_space(),131Impl::Tree(tree) => tree.free_oneway_space(),132}133}134135pub(crate) fn count_buffers(&self) -> usize {136match &self.inner {137Impl::Empty(_size) => 0,138Impl::Array(array) => array.count_buffers(),139Impl::Tree(tree) => tree.count_buffers(),140}141}142143pub(crate) fn debug_print(&self, m: &SeqFile) -> Result<()> {144match &self.inner {145Impl::Empty(_size) => Ok(()),146Impl::Array(array) => array.debug_print(m),147Impl::Tree(tree) => tree.debug_print(m),148}149}150151/// Try to reserve a new buffer, using the provided allocation if necessary.152pub(crate) fn reserve_new(&mut self, mut args: ReserveNewArgs<T>) -> Result<ReserveNew<T>> {153match &mut self.inner {154Impl::Empty(size) => {155let empty_array = match args.empty_array_alloc.take() {156Some(empty_array) => ArrayRangeAllocator::new(*size, empty_array),157None => {158return Ok(ReserveNew::NeedAlloc(ReserveNewNeedAlloc {159args,160need_empty_array_alloc: true,161need_new_tree_alloc: false,162need_tree_alloc: false,163}))164}165};166167self.inner = Impl::Array(empty_array);168self.reserve_new(args)169}170Impl::Array(array) if array.is_full() => {171let allocs = match args.new_tree_alloc {172Some(ref mut allocs) => allocs,173None => {174return Ok(ReserveNew::NeedAlloc(ReserveNewNeedAlloc {175args,176need_empty_array_alloc: false,177need_new_tree_alloc: true,178need_tree_alloc: true,179}))180}181};182183let new_tree =184TreeRangeAllocator::from_array(array.total_size(), &mut array.ranges, allocs);185186self.inner = Impl::Tree(new_tree);187self.reserve_new(args)188}189Impl::Array(array) => {190let offset =191array.reserve_new(args.debug_id, args.size, args.is_oneway, args.pid)?;192Ok(ReserveNew::Success(ReserveNewSuccess {193offset,194oneway_spam_detected: false,195_empty_array_alloc: args.empty_array_alloc,196_new_tree_alloc: args.new_tree_alloc,197_tree_alloc: args.tree_alloc,198}))199}200Impl::Tree(tree) => {201let alloc = match args.tree_alloc {202Some(alloc) => alloc,203None => {204return Ok(ReserveNew::NeedAlloc(ReserveNewNeedAlloc {205args,206need_empty_array_alloc: false,207need_new_tree_alloc: false,208need_tree_alloc: true,209}));210}211};212let (offset, oneway_spam_detected) =213tree.reserve_new(args.debug_id, args.size, args.is_oneway, args.pid, alloc)?;214Ok(ReserveNew::Success(ReserveNewSuccess {215offset,216oneway_spam_detected,217_empty_array_alloc: args.empty_array_alloc,218_new_tree_alloc: args.new_tree_alloc,219_tree_alloc: None,220}))221}222}223}224225/// Deletes the allocations at `offset`.226pub(crate) fn reservation_abort(&mut self, offset: usize) -> Result<FreedRange> {227match &mut self.inner {228Impl::Empty(_size) => Err(EINVAL),229Impl::Array(array) => array.reservation_abort(offset),230Impl::Tree(tree) => {231let freed_range = tree.reservation_abort(offset)?;232if tree.is_empty() {233self.inner = Impl::Empty(tree.total_size());234}235Ok(freed_range)236}237}238}239240/// Called when an allocation is no longer in use by the kernel.241///242/// The value in `data` will be stored, if any. A mutable reference is used to avoid dropping243/// the `T` when an error is returned.244pub(crate) fn reservation_commit(&mut self, offset: usize, data: &mut Option<T>) -> Result {245match &mut self.inner {246Impl::Empty(_size) => Err(EINVAL),247Impl::Array(array) => array.reservation_commit(offset, data),248Impl::Tree(tree) => tree.reservation_commit(offset, data),249}250}251252/// Called when the kernel starts using an allocation.253///254/// Returns the size of the existing entry and the data associated with it.255pub(crate) fn reserve_existing(&mut self, offset: usize) -> Result<(usize, usize, Option<T>)> {256match &mut self.inner {257Impl::Empty(_size) => Err(EINVAL),258Impl::Array(array) => array.reserve_existing(offset),259Impl::Tree(tree) => tree.reserve_existing(offset),260}261}262263/// Call the provided callback at every allocated region.264///265/// This destroys the range allocator. Used only during shutdown.266pub(crate) fn take_for_each<F: Fn(usize, usize, usize, Option<T>)>(&mut self, callback: F) {267match &mut self.inner {268Impl::Empty(_size) => {}269Impl::Array(array) => array.take_for_each(callback),270Impl::Tree(tree) => tree.take_for_each(callback),271}272}273}274275/// The arguments for `reserve_new`.276#[derive(Default)]277pub(crate) struct ReserveNewArgs<T> {278pub(crate) size: usize,279pub(crate) is_oneway: bool,280pub(crate) debug_id: usize,281pub(crate) pid: Pid,282pub(crate) empty_array_alloc: Option<EmptyArrayAlloc<T>>,283pub(crate) new_tree_alloc: Option<FromArrayAllocs<T>>,284pub(crate) tree_alloc: Option<ReserveNewTreeAlloc<T>>,285}286287/// The return type of `ReserveNew`.288pub(crate) enum ReserveNew<T> {289Success(ReserveNewSuccess<T>),290NeedAlloc(ReserveNewNeedAlloc<T>),291}292293/// Returned by `reserve_new` when the reservation was successul.294pub(crate) struct ReserveNewSuccess<T> {295pub(crate) offset: usize,296pub(crate) oneway_spam_detected: bool,297298// If the user supplied an allocation that we did not end up using, then we return it here.299// The caller will kfree it outside of the lock.300_empty_array_alloc: Option<EmptyArrayAlloc<T>>,301_new_tree_alloc: Option<FromArrayAllocs<T>>,302_tree_alloc: Option<ReserveNewTreeAlloc<T>>,303}304305/// Returned by `reserve_new` to request the caller to make an allocation before calling the method306/// again.307pub(crate) struct ReserveNewNeedAlloc<T> {308args: ReserveNewArgs<T>,309need_empty_array_alloc: bool,310need_new_tree_alloc: bool,311need_tree_alloc: bool,312}313314impl<T> ReserveNewNeedAlloc<T> {315/// Make the necessary allocations for another call to `reserve_new`.316pub(crate) fn make_alloc(mut self) -> Result<ReserveNewArgs<T>> {317if self.need_empty_array_alloc && self.args.empty_array_alloc.is_none() {318self.args.empty_array_alloc = Some(EmptyArrayAlloc::try_new(TREE_THRESHOLD)?);319}320if self.need_new_tree_alloc && self.args.new_tree_alloc.is_none() {321self.args.new_tree_alloc = Some(FromArrayAllocs::try_new(TREE_THRESHOLD)?);322}323if self.need_tree_alloc && self.args.tree_alloc.is_none() {324self.args.tree_alloc = Some(ReserveNewTreeAlloc::try_new()?);325}326Ok(self.args)327}328}329330331