Skip to content

Commit

Permalink
fix: upgrade jina (#788)
Browse files Browse the repository at this point in the history
* fix: update in codes

* fix: cicd

* docs: update the pymodules

* fix: disable tokenizer parallelism

* fix: bump grpcio

* fix: revert tokenizer chagne

* fix: conflict
  • Loading branch information
numb3r3 authored Jul 29, 2022
1 parent c458dd6 commit ab00c2a
Show file tree
Hide file tree
Showing 11 changed files with 30 additions and 38 deletions.
3 changes: 1 addition & 2 deletions .github/workflows/force-docker-build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -67,8 +67,7 @@ jobs:
echo "TAG_ALIAS=\
jinaai/clip_executor:latest${PIP_TAG}${ENGINE_TAG}, \
jinaai/clip_executor:${CAS_VERSION}${PIP_TAG}${ENGINE_TAG}, \
jinaai/clip_executor:${CAS_MINOR_VERSION}${PIP_TAG}${ENGINE_TAG}, \
jinaai/clip_executor:${CAS_MAJOR_VERSION}${PIP_TAG}${ENGINE_TAG} \
jinaai/clip_executor:${CAS_MINOR_VERSION}${PIP_TAG}${ENGINE_TAG} \
" >> $GITHUB_ENV
elif [[ "${{ github.event.inputs.triggered_by }}" == "MANUAL" ]]; then
Expand Down
2 changes: 0 additions & 2 deletions .github/workflows/force-hub-push.yml
Original file line number Diff line number Diff line change
Expand Up @@ -54,13 +54,11 @@ jobs:
-t latest \
-t ${CAS_VERSION} \
-t ${CAS_MINOR_VERSION} \
-t ${CAS_MAJOR_VERSION} \
" >> $GITHUB_ENV
echo "GPU_TAG_ALIAS=\
-t latest-gpu \
-t ${CAS_VERSION}-gpu \
-t ${CAS_MINOR_VERSION}-gpu \
-t ${CAS_MAJOR_VERSION}-gpu \
" >> $GITHUB_ENV
elif [[ "${{ github.event.inputs.triggered_by }}" == "MANUAL" ]]; then
Expand Down
7 changes: 3 additions & 4 deletions Dockerfiles/base.Dockerfile
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# !!! An ARG declared before a FROM is outside of a build stage, so it can’t be used in any instruction after a FROM
ARG JINA_VERSION=3.6.0
ARG JINA_VERSION=3.7.0

FROM jinaai/jina:${JINA_VERSION}-py38-standard

Expand All @@ -24,12 +24,11 @@ WORKDIR /cas
RUN if [ "${BACKEND_TAG}" != "torch" ]; then python3 -m pip install --no-cache-dir "./[${BACKEND_TAG}]" ; fi \
&& python3 -m pip install --no-cache-dir .

RUN CLIP_PATH=$(python -c "import clip_server;print(clip_server.__path__[0])") \
&& echo "\
RUN echo "\
jtype: CLIPEncoder\n\
metas:\n\
py_modules:\n\
- $CLIP_PATH/executors/clip_$BACKEND_TAG.py\n\
- clip_server.executors.clip_$BACKEND_TAG\n\
" > /tmp/config.yml


Expand Down
7 changes: 2 additions & 5 deletions Dockerfiles/cuda.Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ ARG CUDA_VERSION=11.4.2
FROM nvcr.io/nvidia/cuda:${CUDA_VERSION}-cudnn8-runtime-ubuntu20.04
ENV DEBIAN_FRONTEND=noninteractive

ARG JINA_VERSION=3.6.0
ARG JINA_VERSION=3.7.0
ARG BACKEND_TAG=torch

# constant, wont invalidate cache
Expand Down Expand Up @@ -32,14 +32,11 @@ RUN if [ "${BACKEND_TAG}" != "torch" ]; then python3 -m pip install --no-cache-d

RUN echo "\
jtype: CLIPEncoder\n\
with:\n\
device: cuda\n\
metas:\n\
py_modules:\n\
- clip_server/executors/clip_$BACKEND_TAG.py\n\
- clip_server.executors.clip_$BACKEND_TAG\n\
" > /tmp/config.yml


ENTRYPOINT ["jina", "executor", "--uses", "/tmp/config.yml"]


Expand Down
11 changes: 5 additions & 6 deletions Dockerfiles/tensorrt.Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ ARG TENSORRT_VERSION=22.04

FROM nvcr.io/nvidia/tensorrt:${TENSORRT_VERSION}-py3

ARG JINA_VERSION=3.6.0
ARG JINA_VERSION=3.7.0
ARG BACKEND_TAG=tensorrt

# constant, wont invalidate cache
Expand All @@ -25,14 +25,13 @@ WORKDIR /cas

RUN python3 -m pip install --no-cache-dir "./[$BACKEND_TAG]"

RUN CLIP_PATH=$(python -c "import clip_server;print(clip_server.__path__[0])") \
&& echo '\

RUN echo "\
jtype: CLIPEncoder\n\
metas:\n\
py_modules:\n\
- $CLIP_PATH/executors/clip_$BACKEND_TAG.py\n\
' > /tmp/config.yml

- clip_server.executors.clip_$BACKEND_TAG\n\
" > /tmp/config.yml


ENTRYPOINT ["jina", "executor", "--uses", "/tmp/config.yml"]
Expand Down
2 changes: 1 addition & 1 deletion client/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@
long_description_content_type='text/markdown',
zip_safe=False,
setup_requires=['setuptools>=18.0', 'wheel'],
install_requires=['jina>=3.3.21', 'docarray[common]>=0.10.3', 'packaging'],
install_requires=['jina>=3.7.0', 'docarray[common]>=0.13.0', 'packaging'],
extras_require={
'test': [
'pytest',
Expand Down
22 changes: 11 additions & 11 deletions docs/user-guides/server.md
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,7 @@ executors:
jtype: CLIPEncoder
metas:
py_modules:
- executors/clip_torch.py
- clip_server.executors.clip_torch
```
````

Expand All @@ -146,7 +146,7 @@ executors:
jtype: CLIPEncoder
metas:
py_modules:
- executors/clip_onnx.py
- clip_server.executors.clip_onnx
```
````

Expand All @@ -164,7 +164,7 @@ executors:
jtype: CLIPEncoder
metas:
py_modules:
- executors/clip_tensorrt.py
- clip_server.executors.clip_tensorrt
```
````

Expand Down Expand Up @@ -192,7 +192,7 @@ executors:
with:
metas:
py_modules:
- executors/clip_torch.py
- clip_server.executors.clip_torch
```
````
Expand All @@ -215,7 +215,7 @@ executors:
with:
metas:
py_modules:
- executors/clip_torch.py
- clip_server.executors.clip_torch
```
````
Expand All @@ -238,7 +238,7 @@ executors:
with:
metas:
py_modules:
- executors/clip_torch.py
- clip_server.executors.clip_torch
```
````
Expand Down Expand Up @@ -293,7 +293,7 @@ executors:
device: cpu
metas:
py_modules:
- executors/clip_torch.py
- clip_server.executors.clip_torch
```

To use custom model in ONNX runtime, one can do:
Expand All @@ -316,7 +316,7 @@ executors:
model_path: 'custom-model'
metas:
py_modules:
- executors/clip_onnx.py
- clip_server.executors.clip_onnx
```

```{warning}
Expand Down Expand Up @@ -344,7 +344,7 @@ executors:
jtype: CLIPEncoder
metas:
py_modules:
- executors/clip_torch.py
- clip_server.executors.clip_torch
```

(flow-config)=
Expand Down Expand Up @@ -379,7 +379,7 @@ executors:
jtype: CLIPEncoder
metas:
py_modules:
- executors/clip_torch.py
- clip_server.executors.clip_torch
```

## Environment variables
Expand Down Expand Up @@ -461,7 +461,7 @@ executors:
jtype: CLIPEncoder
metas:
py_modules:
- executors/clip_torch.py
- clip_server.executors.clip_torch
monitoring: true
port_monitoring: 9091
```
Expand Down
2 changes: 1 addition & 1 deletion server/clip_server/onnx-flow.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,5 +8,5 @@ executors:
jtype: CLIPEncoder
metas:
py_modules:
- executors/clip_onnx.py
- clip_server.executors.clip_onnx
replicas: 1
2 changes: 1 addition & 1 deletion server/clip_server/tensorrt-flow.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,6 @@ executors:
jtype: CLIPEncoder
metas:
py_modules:
- executors/clip_tensorrt.py
- clip_server.executors.clip_tensorrt
timeout_ready: 3000000
replicas: 1
2 changes: 1 addition & 1 deletion server/clip_server/torch-flow.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,5 +8,5 @@ executors:
jtype: CLIPEncoder
metas:
py_modules:
- executors/clip_torch.py
- clip_server.executors.clip_torch
replicas: 1
8 changes: 4 additions & 4 deletions server/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,16 +43,16 @@
setup_requires=['setuptools>=18.0', 'wheel'],
install_requires=[
'protobuf>=3.13.0,<=3.20.1',
'grpcio>=1.46.0,<1.48.0',
'grpcio-reflection>=1.46.0,<1.48.0',
'grpcio-health-checking>=1.46.0,<1.48.0',
'ftfy',
'torch',
'regex',
'torchvision',
'jina>=3.6.0',
'jina>=3.7.0',
'prometheus-client',
'open_clip_torch>=1.3.0',
'grpcio>=1.46.0,<1.48.0', # tmp requires
'grpcio-reflection>=1.46.0,<1.48.0', # tmp requires
'grpcio-health-checking>=1.46.0,<1.48.0', # tmp requires
],
extras_require={
'onnx': [
Expand Down

0 comments on commit ab00c2a

Please sign in to comment.