trinity.buffer.storage.queue module#

Ray Queue storage

trinity.buffer.storage.queue.is_database_url(path: str) bool[source]#
trinity.buffer.storage.queue.is_json_file(path: str) bool[source]#
class trinity.buffer.storage.queue.PriorityFunction[source]#

Bases: ABC

Each priority_fn,
Args:

item: List[Experience], assume that all experiences in it have the same model_version and use_count priority_fn_args: Dict, the arguments for priority_fn

Returns:

priority: float put_into_queue: bool, decide whether to put item into queue

Note that put_into_queue takes effect both for new item from the explorer and for item sampled from the buffer.

abstract classmethod default_config() Dict[source]#

Return the default config.

class trinity.buffer.storage.queue.LinearDecayPriority(decay: float = 2.0)[source]#

Bases: PriorityFunction

Calculate priority by linear decay.

Priority is calculated as model_version - decay * use_count. The item is always put back into the queue for reuse (as long as `reuse_cooldown_time is not None).

__init__(decay: float = 2.0)[source]#
classmethod default_config() Dict[source]#

Return the default config.

class trinity.buffer.storage.queue.LinearDecayUseCountControlPriority(decay: float = 2.0, use_count_limit: int = 3, sigma: float = 0.0)[source]#

Bases: PriorityFunction

Calculate priority by linear decay, use count control, and randomization.

Priority is calculated as model_version - decay * use_count; if sigma is non-zero, priority is further perturbed by random Gaussian noise with standard deviation sigma. The item will be put back into the queue only if use count does not exceed use_count_limit.

__init__(decay: float = 2.0, use_count_limit: int = 3, sigma: float = 0.0)[source]#
classmethod default_config() Dict[source]#

Return the default config.

class trinity.buffer.storage.queue.QueueBuffer[source]#

Bases: ABC

abstract async put(exps: List[Experience]) None[source]#

Put a list of experiences into the queue.

abstract async get() List[Experience][source]#

Get a list of experience from the queue.

abstract qsize() int[source]#

Get the current size of the queue.

abstract async close() None[source]#

Close the queue.

abstract stopped() bool[source]#

Check if there is no more data to read.

classmethod get_queue(config: StorageConfig) QueueBuffer[source]#

Get a queue instance based on the storage configuration.

class trinity.buffer.storage.queue.AsyncQueue(capacity: int)[source]#

Bases: Queue, QueueBuffer

__init__(capacity: int)[source]#

Initialize the async queue with a specified capacity.

Parameters:

capacity (int) – The maximum number of items the queue can hold.

async close() None[source]#

Close the queue.

stopped() bool[source]#

Check if there is no more data to read.

class trinity.buffer.storage.queue.AsyncPriorityQueue(capacity: int, reuse_cooldown_time: float | None = None, priority_fn: str = 'linear_decay', priority_fn_args: dict | None = None)[source]#

Bases: QueueBuffer

An asynchronous priority queue that manages a fixed-size buffer of experience items. Items are prioritized using a user-defined function and reinserted after a cooldown period.

capacity#

Maximum number of items the queue can hold. This value is automatically adjusted to be at most twice the read batch size.

Type:

int

reuse_cooldown_time#

Delay before reusing an item (set to infinity to disable).

Type:

float

priority_fn#

Function used to determine the priority of an item.

Type:

callable

priority_groups#

Maps priorities to deques of items with the same priority.

Type:

SortedDict

__init__(capacity: int, reuse_cooldown_time: float | None = None, priority_fn: str = 'linear_decay', priority_fn_args: dict | None = None)[source]#

Initialize the async priority queue.

Parameters:
  • capacity (int) – The maximum number of items the queue can store.

  • reuse_cooldown_time (float) – Time to wait before reusing an item. Set to None to disable reuse.

  • priority_fn (str) – Name of the function to use for determining item priority.

  • kwargs – Additional keyword arguments for the priority function.

async put(item: List[Experience]) None[source]#

Put a list of experiences into the queue.

async get() List[Experience][source]#

Retrieve the highest-priority item from the queue.

Returns:

The highest-priority item (list of experiences).

Return type:

List[Experience]

Notes

  • After retrieval, the item is optionally reinserted after a cooldown period.

qsize()[source]#

Get the current size of the queue.

async close() None[source]#

Close the queue.

stopped() bool[source]#

Check if there is no more data to read.

class trinity.buffer.storage.queue.QueueStorage(config: StorageConfig)[source]#

Bases: object

An wrapper of a async queue.

__init__(config: StorageConfig) None[source]#
async acquire() int[source]#
async release() int[source]#

Release the queue.

length() int[source]#

The length of the queue.

async put_batch(exp_list: List) None[source]#

Put batch of experience.

async get_batch(batch_size: int, timeout: float) List[source]#

Get batch of experience.

classmethod get_wrapper(config: StorageConfig)[source]#

Get the queue actor.