Skip to content

Commit

Permalink
fix: remove is_init_value
Browse files Browse the repository at this point in the history
  • Loading branch information
OrangeSodahub committed Nov 16, 2022
1 parent 21f2bc6 commit 1ebe483
Show file tree
Hide file tree
Showing 2 changed files with 1 addition and 11 deletions.
11 changes: 1 addition & 10 deletions server/clip_server/model/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -387,7 +387,7 @@ def __init__(
and self.flash_attention
else nn.MultiheadAttention(d_model, n_head)
)
self.ln_attn = LayerNorm(d_model) if scale_attn else nn.Identity()
self.ln_attn = norm_layer(d_model) if scale_attn else nn.Identity()

self.ln_2 = norm_layer(d_model)
mlp_width = int(d_model * mlp_ratio)
Expand Down Expand Up @@ -456,7 +456,6 @@ def __init__(
width: int = 512,
heads: int = 8,
layers: int = 12,
ls_init_value: float = None,
output_dim: int = 512,
act_layer: Callable = nn.GELU,
norm_layer: Callable = LayerNorm,
Expand All @@ -475,8 +474,6 @@ def __init__(
width=width,
layers=layers,
heads=heads,
# TODO: adapt this
# ls_init_value=ls_init_value,
act_layer=act_layer,
norm_layer=norm_layer,
)
Expand Down Expand Up @@ -544,7 +541,6 @@ def __init__(
layers: int,
heads: int,
mlp_ratio: float,
ls_init_value: float = None,
output_dim: int = 512,
act_layer: Callable = nn.GELU,
norm_layer: Callable = LayerNorm,
Expand Down Expand Up @@ -576,8 +572,6 @@ def __init__(
layers,
heads,
mlp_ratio,
# TODO: adapt this
# ls_init_value=ls_init_value,
act_layer=act_layer,
norm_layer=norm_layer,
)
Expand Down Expand Up @@ -744,8 +738,6 @@ def _build_vision_tower(
layers=vision_cfg.layers,
heads=vision_heads,
mlp_ratio=vision_cfg.mlp_ratio,
# TODO: adapt this
# ls_init_value=vision_cfg.ls_init_value,
output_dim=embed_dim,
act_layer=act_layer,
norm_layer=norm_layer,
Expand Down Expand Up @@ -784,7 +776,6 @@ def _build_text_tower(
width=text_cfg.width,
heads=text_cfg.heads,
layers=text_cfg.layers,
ls_init_value=text_cfg.ls_init_value,
output_dim=embed_dim,
act_layer=act_layer,
norm_layer=norm_layer,
Expand Down
1 change: 0 additions & 1 deletion server/clip_server/model/pretrained_text_encoder.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
import torch.nn as nn
from torch import TensorType

import transformers
from transformers import AutoModel, AutoTokenizer, AutoConfig, PretrainedConfig
from transformers.modeling_outputs import (
BaseModelOutput,
Expand Down

0 comments on commit 1ebe483

Please sign in to comment.