Closed Giovannibriglia closed 2 months ago
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
#
import hydra
from hydra.core.hydra_config import HydraConfig
from omegaconf import DictConfig, OmegaConf
from benchmarl.hydra_config import load_experiment_from_hydra
from benchmarl.experiment import Callback
from tensordict import TensorDictBase
from benchmarl.models import Model
from typing import List
def get_model_from_policy(policy):
model = policy.module[0]
while not isinstance(model, Model):
model = model[0]
return model
class TaskNameModelAdder(Callback):
def on_setup(self):
self.group = "agents"
policy = self.experiment.group_policies[self.group]
model = get_model_from_policy(policy)
model.task_name = self.experiment.task_name
def on_batch_collected(self, rollouts: List[TensorDictBase]):
policy = self.experiment.group_policies[self.group]
model = get_model_from_policy(policy)
print(model.task_name)
@hydra.main(version_base=None, config_path="conf", config_name="config")
def hydra_experiment(cfg: DictConfig) -> None:
"""Runs an experiment loading its config from hydra.
This function is decorated as ``@hydra.main`` and is called by running
.. code-block:: console
python benchmarl/run.py algorithm=mappo task=vmas/balance
Args:
cfg (DictConfig): the hydra config dictionary
"""
hydra_choices = HydraConfig.get().runtime.choices
task_name = hydra_choices.task
algorithm_name = hydra_choices.algorithm
print(f"\nAlgorithm: {algorithm_name}, Task: {task_name}")
print("\nLoaded config:\n")
print(OmegaConf.to_yaml(cfg))
experiment = load_experiment_from_hydra(
cfg, task_name=task_name, callbacks=[TaskNameModelAdder()]
)
experiment.run()
if __name__ == "__main__":
hydra_experiment()
this will add it after init but before the training.
You could then have something in the model forward that is like
if hasattr(self,"task_name") and self.task_name_not_init:
inite_task_name()
task_name_not_init = False
the best solution for having it at init might be to have it as one of the model parameters.
That way you could automatically populate it before running
@hydra.main(version_base=None, config_path="conf", config_name="config")
def hydra_experiment(cfg: DictConfig) -> None:
"""Runs an experiment loading its config from hydra.
This function is decorated as ``@hydra.main`` and is called by running
.. code-block:: console
python benchmarl/run.py algorithm=mappo task=vmas/balance
Args:
cfg (DictConfig): the hydra config dictionary
"""
hydra_choices = HydraConfig.get().runtime.choices
task_name = hydra_choices.task
algorithm_name = hydra_choices.algorithm
print(f"\nAlgorithm: {algorithm_name}, Task: {task_name}")
print("\nLoaded config:\n")
print(OmegaConf.to_yaml(cfg))
cfg.model.task_name = task_name
experiment = load_experiment_from_hydra(
cfg, task_name=task_name, callbacks=[TaskNameModelAdder()]
)
experiment.run()
if __name__ == "__main__":
hydra_experiment()
I wanted to confirm if it's okay that I only run experiments where I get task_name
with Hydra using the terminal command python benchmarl/run.py algorithm=causaliql task=vmas/flocking
. It's fine, just for asking :)
Additionally, I have already implemented and tested my action filter, but I had to apply a 'hack' related to the self.action_mask_spec
variable. Specifically, I found problem (self.action_mask_spec=None
) in these lines:(https://github.com/facebookresearch/BenchMARL/blob/dc793b5dce2eeae94be03e7ca72b10b4b16c66db/benchmarl/algorithms/iql.py#L103)
https://github.com/facebookresearch/BenchMARL/blob/dc793b5dce2eeae94be03e7ca72b10b4b16c66db/benchmarl/algorithms/iql.py#L125
I initialized self.action_mask_spec
as True
because it otherwise remains None
and doesn't update correctly. Did I miss any necessary configurations?
In my MLP, in forward, I pass actions_mask
like this:
...
tensordict.set(self.out_key, res)
actions_mask = # computation
tensordict['agents'].set('action_mask', actions_mask)
return tensordict
Is this related to this issue?
action_mask_spec
is a TensorSpec
(not a bool) and should be defined in your task class
Is this related to this issue?
action_mask_spec
is aTensorSpec
(not a bool) and should be defined in your task class
No, I should open another one, you're right.
Is this related to this issue?
action_mask_spec
is aTensorSpec
(not a bool) and should be defined in your task classNo, I should open another one, you're right.
were you able to solve it?
Hi,
Thank you for your fantastic work!
I’m currently looking to access task_name within the models, as I need this variable to implement a task-specific action filter. Ideally, I’d like to retrieve task_name in the model’s init, for example in the MLP model here.
Could you provide guidance on how to achieve this?
Thank you :)