It seems the 0.2.9 update broke the model.generator wrapper. You can reproduce this by running e.g. the cross-prompt intervention tutorial in a Google Colab:
!pip install nnsight==0.2.9
from nnsight import LanguageModel
model = LanguageModel('openai-community/gpt2', device_map='cpu')
with model.generate(max_new_tokens=3) as tracer:
with tracer.invoke("Madison square garden is located in the city of New") as invoker:
embeddings = model.transformer.wte.output.save()
original = model.generator.output.save()
with tracer.invoke("_ _ _ _ _ _ _ _ _ _") as invoker:
model.transformer.wte.output = embeddings
intervened = model.generator.output.save()
print(model.tokenizer.batch_decode(original))
print(model.tokenizer.batch_decode(intervened))
AttributeError Traceback (most recent call last)
in <cell line: 7>()
5 model = LanguageModel('openai-community/gpt2', device_map='cpu')
6
----> 7 with model.generate(max_new_tokens=3) as tracer:
8
9 with tracer.invoke("Madison square garden is located in the city of New") as invoker:
4 frames
/usr/local/lib/python3.10/dist-packages/nnsight/contexts/Runner.py in exit(self, exc_type, exc_val, exc_tb)
39 """On exit, run and generate using the model whether locally or on the server."""
40 if isinstance(exc_val, BaseException):
---> 41 raise exc_val
42
43 if self.remote:
in <cell line: 7>()
7 with model.generate(max_new_tokens=3) as tracer:
8
----> 9 with tracer.invoke("Madison square garden is located in the city of New") as invoker:
10
11 embeddings = model.transformer.wte.output.save()
in <cell line: 7>()
10
11 embeddings = model.transformer.wte.output.save()
---> 12 original = model.generator.output.save()
13
14 with tracer.invoke("") as invoker:
/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py in getattr(self, name)
1686 if name in modules:
1687 return modules[name]
-> 1688 raise AttributeError(f"'{type(self).name}' object has no attribute '{name}'")
1689
1690 def setattr(self, name: str, value: Union[Tensor, 'Module']) -> None:
AttributeError: 'WrapperModule' object has no attribute 'output'
It seems the 0.2.9 update broke the
model.generator
wrapper. You can reproduce this by running e.g. the cross-prompt intervention tutorial in a Google Colab:AttributeError Traceback (most recent call last)
4 frames
/usr/local/lib/python3.10/dist-packages/nnsight/contexts/Runner.py in exit(self, exc_type, exc_val, exc_tb) 39 """On exit, run and generate using the model whether locally or on the server.""" 40 if isinstance(exc_val, BaseException): ---> 41 raise exc_val 42 43 if self.remote:
/usr/local/lib/python3.10/dist-packages/nnsight/contexts/Invoker.py in exit(self, exc_type, exc_val, exc_tb) 82 def exit(self, exc_type, exc_val, exc_tb) -> None: 83 if isinstance(exc_val, BaseException): ---> 84 raise exc_val 85 86 self.tracer._invoker = None
/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py in getattr(self, name) 1686 if name in modules: 1687 return modules[name] -> 1688 raise AttributeError(f"'{type(self).name}' object has no attribute '{name}'") 1689 1690 def setattr(self, name: str, value: Union[Tensor, 'Module']) -> None:
AttributeError: 'WrapperModule' object has no attribute 'output'