openai / gym

A toolkit for developing and comparing reinforcement learning algorithms.
https://www.gymlibrary.dev
Other
34.73k stars 8.61k forks source link

Creating GL Context #468

Closed melrobin closed 7 years ago

melrobin commented 7 years ago

I am having problems getting a started with gym. A very simple program fails when trying to load the GL context and I really do not know how to fix it:

[melrobin@scorpion ~]$ python test_gym.py [2017-01-23 01:36:51,002] Making new env: CartPole-v0 Traceback (most recent call last): File "test_gym.py", line 5, in env.render() File "/home/melrobin/packages/gym/gym/core.py", line 174, in render return self._render(mode=mode, close=close) File "/home/melrobin/packages/gym/gym/envs/classic_control/cartpole.py", line 113, in _render from gym.envs.classic_control import rendering File "/home/melrobin/packages/gym/gym/envs/classic_control/rendering.py", line 23, in from pyglet.gl import File "/usr/lib/python2.7/site-packages/pyglet/gl/init.py", line 236, in import pyglet.window File "/usr/lib/python2.7/site-packages/pyglet/window/init.py", line 1817, in gl._create_shadow_window() File "/usr/lib/python2.7/site-packages/pyglet/gl/init.py", line 205, in _create_shadow_window _shadow_window = Window(width=1, height=1, visible=False) File "/usr/lib/python2.7/site-packages/pyglet/window/xlib/init.py", line 163, in init super(XlibWindow, self).init(args, **kwargs) File "/usr/lib/python2.7/site-packages/pyglet/window/init.py", line 516, in init context = config.create_context(gl.current_context) File "/usr/lib/python2.7/site-packages/pyglet/gl/xlib.py", line 188, in create_context return XlibContext13(self, share) File "/usr/lib/python2.7/site-packages/pyglet/gl/xlib.py", line 296, in init super(XlibContext13, self).init(config, share) File "/usr/lib/python2.7/site-packages/pyglet/gl/xlib.py", line 199, in init raise gl.ContextException('Could not create GL context') pyglet.gl.ContextException: Could not create GL context

The program is a simple program that I cut and pasted: import gym env = gym.make('CartPole-v0') env.reset() for _ in range(1000): env.render() env.step(env.action_space.sample()) # take a random action

My graphics card is an NVIDIA GTX 980.

tlbtlbtlb commented 7 years ago

Your X server isn't configured to run OpenGL. You can test by running glxgears or glxinfo at the command prompt (the first one should show spinning gears, the second will give a dump of OpenGL details). If you're using Ubuntu, see https://en.wikibooks.org/wiki/OpenGL_Programming/Installation/Linux

melrobin commented 7 years ago

OK, this was indeed the problem. I ran the program on the console with no issues.

livey commented 6 years ago

use nvidia-xconfig solved the problem http://uk.download.nvidia.com/XFree86/Linux-x86_64/256.44/README/editxconfig.html

codars commented 6 years ago

@livey Could you be more specific on how you solved the problem?

livey commented 6 years ago

@CodArs-van just use the command nvidia-xconfig

JobJob commented 6 years ago

mac users see https://github.com/openai/gym/issues/462#issuecomment-432259137

pachiko commented 5 years ago

Hi, sorry to reopen this issue. But I am still seeing this error although I run this on terminal.

`import click import numpy as np import gym from simplepg.simple_utils import include_bias, weighted_sample

def point_get_action(theta, ob, rng=np.random): ob_1 = include_bias(ob) mean = theta.dot(ob_1) return rng.normal(loc=mean, scale=1.)

def cartpole_get_action(theta, ob, rng=np.random): ob_1 = include_bias(ob) logits = ob_1.dot(theta.T) return weighted_sample(logits, rng=rng)

@click.command() @click.argument("env_id", type=str, default="Point-v0") def main(env_id):

Register the environment

rng = np.random.RandomState(42)

if env_id == 'CartPole-v0':
    env = gym.make('CartPole-v0')
    get_action = cartpole_get_action
    obs_dim = env.observation_space.shape[0]
    action_dim = env.action_space.n
elif env_id == 'Point-v0':
    from simplepg import point_env
    env = gym.make('Point-v0')
    get_action = point_get_action
    obs_dim = env.observation_space.shape[0]
    action_dim = env.action_space.shape[0]
else:
    raise ValueError(
        "Unsupported environment: must be one of 'CartPole-v0', 'Point-v0'")

env.seed(42)

# Initialize parameters
theta = rng.normal(scale=0.01, size=(action_dim, obs_dim + 1))

while True:
    ob = env.reset()
    done = False
    # Only render the first trajectory
    # Collect a new trajectory
    rewards = []
    while not done:
        action = get_action(theta, ob, rng=rng)
        next_ob, rew, done, _ = env.step(action)
        ob = next_ob
        env.render()
        rewards.append(rew)

    print("Episode reward: %.2f" % np.sum(rewards))

if name == "main": main()`

Here is the output: Traceback (most recent call last): File "simplepg/rollout.py", line 80, in <module> main() File "/opt/conda/envs/deeprlbootcamp/lib/python3.5/site-packages/click/core.py", line 722, in __call__ return self.main(*args, **kwargs) File "/opt/conda/envs/deeprlbootcamp/lib/python3.5/site-packages/click/core.py", line 697, in main rv = self.invoke(ctx) File "/opt/conda/envs/deeprlbootcamp/lib/python3.5/site-packages/click/core.py", line 895, in invoke return ctx.invoke(self.callback, **ctx.params) File "/opt/conda/envs/deeprlbootcamp/lib/python3.5/site-packages/click/core.py", line 535, in invoke return callback(*args, **kwargs) File "simplepg/rollout.py", line 72, in main env.render() File "/opt/conda/envs/deeprlbootcamp/lib/python3.5/site-packages/gym/core.py", line 150, in render return self._render(mode=mode, close=close) File "/opt/conda/envs/deeprlbootcamp/lib/python3.5/site-packages/gym/core.py", line 286, in _render return self.env.render(mode, close) File "/opt/conda/envs/deeprlbootcamp/lib/python3.5/site-packages/gym/core.py", line 150, in render return self._render(mode=mode, close=close) File "/root/code/bootcamp_pg/simplepg/point_env.py", line 69, in _render from gym.envs.classic_control import rendering File "/opt/conda/envs/deeprlbootcamp/lib/python3.5/site-packages/gym/envs/classic_control/rendering.py", line 23, in <module> from pyglet.gl import * File "/opt/conda/envs/deeprlbootcamp/lib/python3.5/site-packages/pyglet/gl/__init__.py", line 236, in <module> import pyglet.window File "/opt/conda/envs/deeprlbootcamp/lib/python3.5/site-packages/pyglet/window/__init__.py", line 1816, in <module> gl._create_shadow_window() File "/opt/conda/envs/deeprlbootcamp/lib/python3.5/site-packages/pyglet/gl/__init__.py", line 205, in _create_shadow_window _shadow_window = Window(width=1, height=1, visible=False) File "/opt/conda/envs/deeprlbootcamp/lib/python3.5/site-packages/pyglet/window/xlib/__init__.py", line 163, in __init__ super(XlibWindow, self).__init__(*args, **kwargs) File "/opt/conda/envs/deeprlbootcamp/lib/python3.5/site-packages/pyglet/window/__init__.py", line 515, in __init__ context = config.create_context(gl.current_context) File "/opt/conda/envs/deeprlbootcamp/lib/python3.5/site-packages/pyglet/gl/xlib.py", line 186, in create_context return XlibContextARB(self, share) File "/opt/conda/envs/deeprlbootcamp/lib/python3.5/site-packages/pyglet/gl/xlib.py", line 296, in __init__ super(XlibContext13, self).__init__(config, share) File "/opt/conda/envs/deeprlbootcamp/lib/python3.5/site-packages/pyglet/gl/xlib.py", line 199, in __init__ raise gl.ContextException('Could not create GL context') pyglet.gl.ContextException: Could not create GL context

FYI, this code was taken from https://drive.google.com/open?id=0B1BwaUH2mk-EbktzeURrNktQVjQ

JonathanLehner commented 2 years ago

after running nvidia-xconfig you need to reboot or reload the config

Touutae-lab commented 2 years ago

Your X server isn't configured to run OpenGL. You can test by running glxgears or glxinfo at the command prompt (the first one should show spinning gears, the second will give a dump of OpenGL details). If you're using Ubuntu, see https://en.wikibooks.org/wiki/OpenGL_Programming/Installation/Linux

i still have the same problem Couldn't Create GL context even i can run that command