Skip to content
This repository has been archived by the owner on Feb 22, 2020. It is now read-only.

Commit

Permalink
fix(encoder): fix gpu limitation in inception
Browse files Browse the repository at this point in the history
  • Loading branch information
Larryjianfeng committed Jul 25, 2019
1 parent 89d8b70 commit e644e39
Show file tree
Hide file tree
Showing 2 changed files with 5 additions and 4 deletions.
5 changes: 3 additions & 2 deletions gnes/encoder/image/inception.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
from PIL import Image

from ..base import BaseImageEncoder
from ...helper import batching, batch_iterator
from ...helper import batching, batch_iterator, get_first_available_gpu


class TFInceptionEncoder(BaseImageEncoder):
Expand All @@ -42,7 +42,8 @@ def post_init(self):
import tensorflow as tf
from .inception_cores.inception_v4 import inception_v4
from .inception_cores.inception_utils import inception_arg_scope

import os
os.environ['CUDA_VISIBLE_DEVICES'] = get_first_available_gpu()
g = tf.Graph()
with g.as_default():
arg_scope = inception_arg_scope()
Expand Down
4 changes: 2 additions & 2 deletions gnes/helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,9 +55,9 @@ def get_first_available_gpu():
return r[0]
raise ValueError
except ImportError:
return 0
return -1
except ValueError:
return 0
return -1


class FileLock:
Expand Down

0 comments on commit e644e39

Please sign in to comment.