Skip to content

Commit

Permalink
Merge branch 'data-index-creation-container' into pulumi
Browse files Browse the repository at this point in the history
  • Loading branch information
rchan26 committed Jan 3, 2024
2 parents 5cf74ca + 0552862 commit 9ee8c68
Show file tree
Hide file tree
Showing 5 changed files with 40 additions and 21 deletions.
2 changes: 1 addition & 1 deletion reginald/models/models/chat_completion.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ def __init__(
"""
logging.info(f"Setting up AzureOpenAI LLM (model {model_name})")
super().__init__(*args, **kwargs)
self.api_base = get_env_var("OPENAI_AZURE_API_BASE")
self.api_base = get_env_var("OPENAI_AZURE_API_BASE", secret_value=False)
self.api_key = get_env_var("OPENAI_AZURE_API_KEY")
self.api_type = "azure"
self.api_version = "2023-03-15-preview"
Expand Down
2 changes: 1 addition & 1 deletion reginald/models/models/llama_index.py
Original file line number Diff line number Diff line change
Expand Up @@ -845,7 +845,7 @@ def __init__(

# deployment name can be found in the Azure AI Studio portal
self.deployment_name = model_name
self.openai_api_base = get_env_var("OPENAI_AZURE_API_BASE")
self.openai_api_base = get_env_var("OPENAI_AZURE_API_BASE", secret_value=False)
self.openai_api_key = get_env_var("OPENAI_AZURE_API_KEY")
self.openai_api_version = "2023-09-15-preview"
self.temperature = 0.7
Expand Down
40 changes: 26 additions & 14 deletions reginald/parser_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def __init__(self, create_index_only: bool = False, *args, **kwargs):
"-m",
type=str,
help=("Select which type of model to use " "Default is 'hello'."),
default=lambda: get_env_var("REGINALD_MODEL"),
default=lambda: get_env_var("REGINALD_MODEL", secret_value=False),
choices=MODELS,
)
self.add_argument(
Expand All @@ -49,7 +49,7 @@ def __init__(self, create_index_only: bool = False, *args, **kwargs):
"to the model name on OpenAI."
"(ignored if using 'hello' model types)."
),
default=lambda: get_env_var("REGINALD_MODEL_NAME"),
default=lambda: get_env_var("REGINALD_MODEL_NAME", secret_value=False),
)
self.add_argument(
"--mode",
Expand All @@ -59,7 +59,7 @@ def __init__(self, create_index_only: bool = False, *args, **kwargs):
"(ignored if not using llama-index). "
"Default is 'chat'."
),
default=lambda: get_env_var("LLAMA_INDEX_MODE"),
default=lambda: get_env_var("LLAMA_INDEX_MODE", secret_value=False),
choices=["chat", "query"],
)
self.add_argument(
Expand All @@ -71,7 +71,9 @@ def __init__(self, create_index_only: bool = False, *args, **kwargs):
"Default is False."
),
action=argparse.BooleanOptionalAction,
default=lambda: get_env_var("LLAMA_INDEX_FORCE_NEW_INDEX"),
default=lambda: get_env_var(
"LLAMA_INDEX_FORCE_NEW_INDEX", secret_value=False
),
)
self.add_argument(
"--is-path",
Expand All @@ -82,7 +84,7 @@ def __init__(self, create_index_only: bool = False, *args, **kwargs):
"Default is False."
),
action=argparse.BooleanOptionalAction,
default=lambda: get_env_var("LLAMA_INDEX_IS_PATH"),
default=lambda: get_env_var("LLAMA_INDEX_IS_PATH", secret_value=False),
)
self.add_argument(
"--n-gpu-layers",
Expand All @@ -93,7 +95,9 @@ def __init__(self, create_index_only: bool = False, *args, **kwargs):
"(ignored if not using llama-index-llama-cpp). "
"Default is 0."
),
default=lambda: int(get_env_var("LLAMA_INDEX_N_GPU_LAYERS")),
default=lambda: int(
get_env_var("LLAMA_INDEX_N_GPU_LAYERS", secret_value=False)
),
)
self.add_argument(
"--device",
Expand All @@ -104,7 +108,7 @@ def __init__(self, create_index_only: bool = False, *args, **kwargs):
"(ignored if not using llama-index-hf model). "
"Default is 'auto'."
),
default=lambda: get_env_var("LLAMA_INDEX_DEVICE"),
default=lambda: get_env_var("LLAMA_INDEX_DEVICE", secret_value=False),
)

# data index arguments
Expand All @@ -116,7 +120,7 @@ def __init__(self, create_index_only: bool = False, *args, **kwargs):
"Location for data (ignored if not using llama-index). "
"Default is 'data' in the root of the repo."
),
default=lambda: get_env_var("LLAMA_INDEX_DATA_DIR")
default=lambda: get_env_var("LLAMA_INDEX_DATA_DIR", secret_value=False)
or (pathlib.Path(__file__).parent.parent / "data").resolve(),
)
self.add_argument(
Expand All @@ -129,7 +133,7 @@ def __init__(self, create_index_only: bool = False, *args, **kwargs):
"Currently supports 'handbook', 'wikis', 'public', or 'all_data'. "
"Default is 'all_data'."
),
default=lambda: get_env_var("LLAMA_INDEX_WHICH_INDEX"),
default=lambda: get_env_var("LLAMA_INDEX_WHICH_INDEX", secret_value=False),
choices=["handbook", "wikis", "public", "all_data"],
)
self.add_argument(
Expand All @@ -141,7 +145,9 @@ def __init__(self, create_index_only: bool = False, *args, **kwargs):
"(ignored if not using llama-index). "
"Default is 4096."
),
default=lambda: int(get_env_var("LLAMA_INDEX_MAX_INPUT_SIZE")),
default=lambda: int(
get_env_var("LLAMA_INDEX_MAX_INPUT_SIZE", secret_value=False)
),
)
self.add_argument(
"--k",
Expand All @@ -151,7 +157,7 @@ def __init__(self, create_index_only: bool = False, *args, **kwargs):
"(ignored if not using llama-index). "
"Default is 3."
),
default=lambda: int(get_env_var("LLAMA_INDEX_K")),
default=lambda: int(get_env_var("LLAMA_INDEX_K", secret_value=False)),
)
self.add_argument(
"--chunk-size",
Expand All @@ -162,7 +168,9 @@ def __init__(self, create_index_only: bool = False, *args, **kwargs):
"(ignored if not using llama-index). "
"Default is computed by ceil(max_input_size / k)."
),
default=lambda: int(get_env_var("LLAMA_INDEX_CHUNK_SIZE")),
default=lambda: int(
get_env_var("LLAMA_INDEX_CHUNK_SIZE", secret_value=False)
),
)
self.add_argument(
"--chunk-overlap-ratio",
Expand All @@ -173,7 +181,9 @@ def __init__(self, create_index_only: bool = False, *args, **kwargs):
"(ignored if not using llama-index). "
"Default is 0.1."
),
default=lambda: float(get_env_var("LLAMA_INDEX_CHUNK_OVERLAP_RATIO")),
default=lambda: float(
get_env_var("LLAMA_INDEX_CHUNK_OVERLAP_RATIO", secret_value=False)
),
)
self.add_argument(
"--num-output",
Expand All @@ -184,7 +194,9 @@ def __init__(self, create_index_only: bool = False, *args, **kwargs):
"(ignored if not using llama-index). "
"Default is 512."
),
default=lambda: int(get_env_var("LLAMA_INDEX_NUM_OUTPUT")),
default=lambda: int(
get_env_var("LLAMA_INDEX_NUM_OUTPUT", secret_value=False)
),
)


Expand Down
2 changes: 1 addition & 1 deletion reginald/slack_bot/setup_bot.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,7 @@ async def main():
"Select the emoji for the model. By default, looks for the REGINALD_EMOJI "
"environment variable or uses the rocket emoji"
),
default=lambda: get_env_var("REGINALD_EMOJI") or "rocket",
default=lambda: get_env_var("REGINALD_EMOJI", secret_value=False) or "rocket",
)
args = get_args(parser)

Expand Down
15 changes: 11 additions & 4 deletions reginald/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import os


def get_env_var(var: str, log: bool = True) -> str | None:
def get_env_var(var: str, log: bool = True, secret_value: bool = True) -> str | None:
"""
Get environment variable. Logs provided if log is True.
Expand All @@ -12,6 +12,10 @@ def get_env_var(var: str, log: bool = True) -> str | None:
Name of environment variable
log : bool, optional
Whether or not to log if reading was successful, by default True
secret_value : bool, optional
Whether or not the value is a secret, by default True.
If True, the value will not be logged.
Ignored if log is False.
Returns
-------
Expand All @@ -23,11 +27,14 @@ def get_env_var(var: str, log: bool = True) -> str | None:
value = os.getenv(var)

if log:
if value is None:
if value is not None:
if secret_value:
logging.info(f"Got environment variable '{var}' successfully")
else:
logging.info(f"Got environment variable '{var}' successfully: {value}")
else:
logging.warn(
f"Environment variable '{var}' not found. Can ignore if using default values."
)
else:
logging.info(f"Got environment variable '{var}' successfully")

return value

0 comments on commit 9ee8c68

Please sign in to comment.