Skip to content

Commit

Permalink
fix: provide default value for tracing_context
Browse files Browse the repository at this point in the history
  • Loading branch information
Girish Chandrashekar committed Nov 28, 2022
1 parent 784a7c2 commit 971c2b2
Show file tree
Hide file tree
Showing 3 changed files with 29 additions and 17 deletions.
14 changes: 9 additions & 5 deletions server/clip_server/executors/clip_onnx.py
Original file line number Diff line number Diff line change
@@ -1,20 +1,20 @@
import os
import warnings
from multiprocessing.pool import ThreadPool
from typing import Optional, Dict
from functools import partial
from multiprocessing.pool import ThreadPool
from typing import Dict, Optional

import onnxruntime as ort
from clip_server.executors.helper import (
split_img_txt_da,
preproc_image,
preproc_text,
set_rank,
split_img_txt_da,
)
from clip_server.model import clip
from clip_server.model.clip_onnx import CLIPOnnxModel
from clip_server.model.tokenization import Tokenizer
from jina import Executor, requests, DocumentArray
from jina import DocumentArray, Executor, requests
from opentelemetry.trace import NoOpTracer, Span


Expand Down Expand Up @@ -135,7 +135,11 @@ async def rank(self, docs: 'DocumentArray', parameters: Dict, **kwargs):

@requests
async def encode(
self, docs: 'DocumentArray', tracing_context, parameters: Dict = {}, **kwargs
self,
docs: 'DocumentArray',
tracing_context=None,
parameters: Dict = {},
**kwargs,
):
with self.tracer.start_as_current_span(
'encode', context=tracing_context
Expand Down
16 changes: 10 additions & 6 deletions server/clip_server/executors/clip_tensorrt.py
Original file line number Diff line number Diff line change
@@ -1,19 +1,19 @@
import warnings
from multiprocessing.pool import ThreadPool
from typing import Optional, Dict
from functools import partial
from multiprocessing.pool import ThreadPool
from typing import Dict, Optional

import numpy as np
from clip_server.executors.helper import (
split_img_txt_da,
preproc_image,
preproc_text,
set_rank,
split_img_txt_da,
)
from clip_server.model import clip
from clip_server.model.tokenization import Tokenizer
from clip_server.model.clip_trt import CLIPTensorRTModel
from jina import Executor, requests, DocumentArray
from clip_server.model.tokenization import Tokenizer
from jina import DocumentArray, Executor, requests
from opentelemetry.trace import NoOpTracer, Span


Expand Down Expand Up @@ -109,7 +109,11 @@ async def rank(self, docs: 'DocumentArray', parameters: Dict, **kwargs):

@requests
async def encode(
self, docs: 'DocumentArray', tracing_context, parameters: Dict = {}, **kwargs
self,
docs: 'DocumentArray',
tracing_context=None,
parameters: Dict = {},
**kwargs,
):
with self.tracer.start_as_current_span(
'encode', context=tracing_context
Expand Down
16 changes: 10 additions & 6 deletions server/clip_server/executors/clip_torch.py
Original file line number Diff line number Diff line change
@@ -1,22 +1,22 @@
import os
import warnings
from multiprocessing.pool import ThreadPool
from typing import Optional, Dict
from functools import partial
from opentelemetry.trace import NoOpTracer, Span
from multiprocessing.pool import ThreadPool
from typing import Dict, Optional

import numpy as np
import torch
from clip_server.executors.helper import (
split_img_txt_da,
preproc_image,
preproc_text,
set_rank,
split_img_txt_da,
)
from clip_server.model import clip
from clip_server.model.clip_model import CLIPModel
from clip_server.model.tokenization import Tokenizer
from jina import Executor, requests, DocumentArray
from jina import DocumentArray, Executor, requests
from opentelemetry.trace import NoOpTracer, Span


class CLIPEncoder(Executor):
Expand Down Expand Up @@ -120,7 +120,11 @@ async def rank(self, docs: 'DocumentArray', parameters: Dict, **kwargs):

@requests
async def encode(
self, docs: 'DocumentArray', tracing_context, parameters: Dict = {}, **kwargs
self,
docs: 'DocumentArray',
tracing_context=None,
parameters: Dict = {},
**kwargs,
):
with self.tracer.start_as_current_span(
'encode', context=tracing_context
Expand Down

0 comments on commit 971c2b2

Please sign in to comment.