Skip to content

Commit

Permalink
chore: remove clip_hg (#786)
Browse files Browse the repository at this point in the history
* chore: remove clip_hg

* fix: add dependency for m-clip

* fix: transformers version

* fix: modify dependency key

* fix: update requirements

Co-authored-by: felix-wang <[email protected]>

* fix: update requirements

Co-authored-by: felix-wang <[email protected]>

* fix: update requirements

Co-authored-by: felix-wang <[email protected]>

* fix: tmp set grpcio version

Co-authored-by: felix-wang <[email protected]>
  • Loading branch information
ZiniuYu and numb3r3 authored Jul 29, 2022
1 parent 1db43b4 commit c458dd6
Show file tree
Hide file tree
Showing 7 changed files with 8 additions and 307 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/cd.yml
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ jobs:
python -m pip install wheel
pip install --no-cache-dir "client/[test]"
pip install --no-cache-dir "server/[onnx]"
pip install --no-cache-dir "server/[huggingface]"
pip install --no-cache-dir "server/[transformers]"
- name: Test
id: test
run: |
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ jobs:
python -m pip install wheel pytest pytest-cov
pip install --no-cache-dir "client/[test]"
pip install --no-cache-dir "server/[onnx]"
pip install --no-cache-dir "server/[huggingface]"
pip install --no-cache-dir "server/[transformers]"
- name: Test
id: test
run: |
Expand Down
241 changes: 0 additions & 241 deletions server/clip_server/executors/clip_hg.py

This file was deleted.

12 changes: 0 additions & 12 deletions server/clip_server/hg-flow.yml

This file was deleted.

5 changes: 4 additions & 1 deletion server/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,9 @@
'jina>=3.6.0',
'prometheus-client',
'open_clip_torch>=1.3.0',
'grpcio>=1.46.0,<1.48.0', # tmp requires
'grpcio-reflection>=1.46.0,<1.48.0', # tmp requires
'grpcio-health-checking>=1.46.0,<1.48.0', # tmp requires
],
extras_require={
'onnx': [
Expand All @@ -58,7 +61,7 @@
]
+ (['onnxruntime-gpu>=1.8.0'] if sys.platform != 'darwin' else []),
'tensorrt': ['nvidia-tensorrt'],
'huggingface': ['transformers>=4.16.2'],
'transformers': ['transformers>=4.16.2'],
},
classifiers=[
'Development Status :: 5 - Production/Stable',
Expand Down
28 changes: 2 additions & 26 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,15 +16,13 @@ def random_port():
return random_port


@pytest.fixture(scope='session', params=['onnx', 'torch', 'hg', 'onnx_custom'])
@pytest.fixture(scope='session', params=['onnx', 'torch', 'onnx_custom'])
def make_flow(port_generator, request):
if request.param != 'onnx_custom':
if request.param == 'onnx':
from clip_server.executors.clip_onnx import CLIPEncoder
elif request.param == 'torch':
from clip_server.executors.clip_torch import CLIPEncoder
else:
from clip_server.executors.clip_hg import CLIPEncoder
from clip_server.executors.clip_torch import CLIPEncoder

f = Flow(port=port_generator()).add(name=request.param, uses=CLIPEncoder)
else:
Expand Down Expand Up @@ -56,25 +54,3 @@ def make_trt_flow(port_generator, request):
f = Flow(port=port_generator()).add(name=request.param, uses=CLIPEncoder)
with f:
yield f


@pytest.fixture(scope='session', params=['hg'])
def make_hg_flow(port_generator, request):
from clip_server.executors.clip_hg import CLIPEncoder

f = Flow(port=port_generator()).add(name=request.param, uses=CLIPEncoder)
with f:
yield f


@pytest.fixture(scope='session', params=['hg'])
def make_hg_flow_no_default(port_generator, request):
from clip_server.executors.clip_hg import CLIPEncoder

f = Flow(port=port_generator()).add(
name=request.param,
uses=CLIPEncoder,
uses_with={'preprocessing': False},
)
with f:
yield f
25 changes: 0 additions & 25 deletions tests/test_hg.py

This file was deleted.

0 comments on commit c458dd6

Please sign in to comment.