From f275e9e89cded739717c58d5f145f6440bfd912f Mon Sep 17 00:00:00 2001 From: Prashanth Reddy Munagala Date: Wed, 2 Aug 2023 14:56:05 +0530 Subject: [PATCH 1/7] added Poe-api --- pandasai/llm/poe_api.py | 113 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 113 insertions(+) create mode 100644 pandasai/llm/poe_api.py diff --git a/pandasai/llm/poe_api.py b/pandasai/llm/poe_api.py new file mode 100644 index 000000000..d26595022 --- /dev/null +++ b/pandasai/llm/poe_api.py @@ -0,0 +1,113 @@ +"""Poe-Api LLMs + +This module provides a family of commercially / non-commercially available +LLMs maintained by Quora +Example: + Use below example to call POEAPI supporrted models + >>> from pandasai.llm.poe_api import POEAPI +""" +import os +import requests +from typing import Optional + +from pandasai.prompts.base import Prompt +from .base import LLM + + +class POEAPI(LLM): + """POEAPI LLMs API + Base LLM class is extended to support POEAPILLM. When this class will be + initialized all the additional parameters like temp, top_p, top_k etc should + be inside **kwargs while instantiating the class. That is to be done only if + the user wants to override the existing configurations. Below example shows how + we can use override certain configurations to change model's behaviour. + Example: + >>> import pandas as pd + >>> from pandasai import PandasAI + >>> from pandasai.llm.POEAPI import POEAPILLM + >>> model_name = 'ggml-replit-code-v1-3b.bin' + >>> additional_params = {'temp': 0, 'max_tokens': 50} + >>> model = POEAPILLM(model_name, allow_download=True, **additional_params) + >>> df_ai = PandasAI(model) + >>> response = df_ai(df, prompt='What is the sum of the GDP in this table?') + There is an optional parameter called model_path which sets where to + download the model and if it is not set then it will download the model + inside the folder: home//.local/share/nomic.ai/POEAPI/ + Note: Please note that right now Pandas AI only supports models for POEAPI. However + it might not work as chatGPT when it comes to performance, hence for now users using + this module have to tune the existing prompts to get better results. + """ + + temp: Optional[float] = 0 + top_p: Optional[float] = 0.1 + top_k: Optional[int] = 40 + n_batch: Optional[int] = 8 + n_threads: Optional[int] = 4 + n_predict: Optional[int] = 256 + max_tokens: Optional[int] = 200 + repeat_last_n: Optional[int] = 64 + repeat_penalty: Optional[float] = 1.18 + + _model_repo_url = "https://POEAPI.io/models/models.json" + _supported_models = [ + metadata["filename"] for metadata in requests.get(_model_repo_url).json() + ] + + def __init__( + self, + bot_name: str, + **kwargs, + ) -> None: + self.bot_name = bot_name + """ + POEAPI client for using Pandas AI + Args: + model_name: The name of the model. + model_folder_path: The folder inside the model weights are present + allow_download: If True will trigger download the specified model + n_threads: Number of CPU threads to be used while running the model + download_chunk_size: The chunk size set for downloading the model + """ + + + # automatically create the default folder and download the model + + try: + from poe import Client + + self.poe_api = Client( + + + ) + except ImportError: + raise ImportError( + "Unable to import poe-api python package " + "Please install it with `pip install -U poe-api`" + ) + + self.default_parameters = { + "max_tokens": self.max_tokens, + "n_predict": self.n_predict, + "top_k": self.top_k, + "top_p": self.top_p, + "temp": self.temp, + "n_batch": self.n_batch, + "repeat_penalty": self.repeat_penalty, + "repeat_last_n": self.repeat_last_n, + } + + # this will override all the parameters with all the pre-existing ones + self.params = {**self.default_parameters, **kwargs} + + + + @property + def type(self) -> str: + return "POEAPI" + + def call(self, instruction: Prompt, value: str, suffix: str = "") -> str: + prompt = str(instruction) + prompt = prompt + value + suffix + for chunk in self.poe_api.send_message(self.bot_name ,prompt,): + pass + return chunk['text'] \ No newline at end of file From 3671bd665713e40be023d8d86ae785b842a79e25 Mon Sep 17 00:00:00 2001 From: Prashanth Reddy Munagala Date: Wed, 2 Aug 2023 15:01:34 +0530 Subject: [PATCH 2/7] added Poe-api --- pandasai/llm/poe_api.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/pandasai/llm/poe_api.py b/pandasai/llm/poe_api.py index d26595022..8f81d7101 100644 --- a/pandasai/llm/poe_api.py +++ b/pandasai/llm/poe_api.py @@ -56,9 +56,11 @@ class POEAPI(LLM): def __init__( self, bot_name: str, + token : str, **kwargs, ) -> None: self.bot_name = bot_name + self.token = token """ POEAPI client for using Pandas AI Args: @@ -75,7 +77,7 @@ def __init__( try: from poe import Client - self.poe_api = Client( + self.poe_api = Client(token=self.token ) From d8db86948c3a9d667a7f7b770a9e6c69c73a7b27 Mon Sep 17 00:00:00 2001 From: Prashanth Reddy Munagala Date: Wed, 2 Aug 2023 15:13:15 +0530 Subject: [PATCH 3/7] update poe-api --- pandasai/llm/poe_api.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/pandasai/llm/poe_api.py b/pandasai/llm/poe_api.py index 8f81d7101..8be521b80 100644 --- a/pandasai/llm/poe_api.py +++ b/pandasai/llm/poe_api.py @@ -48,10 +48,10 @@ class POEAPI(LLM): repeat_last_n: Optional[int] = 64 repeat_penalty: Optional[float] = 1.18 - _model_repo_url = "https://POEAPI.io/models/models.json" - _supported_models = [ - metadata["filename"] for metadata in requests.get(_model_repo_url).json() - ] + # _model_repo_url = "https://POEAPI.io/models/models.json" + # _supported_models = [ + # metadata["filename"] for metadata in requests.get(_model_repo_url).json() + # ] def __init__( self, @@ -75,9 +75,9 @@ def __init__( # automatically create the default folder and download the model try: - from poe import Client + import poe - self.poe_api = Client(token=self.token + self.poe_api_bot = poe.Client(token=self.token ) @@ -110,6 +110,6 @@ def type(self) -> str: def call(self, instruction: Prompt, value: str, suffix: str = "") -> str: prompt = str(instruction) prompt = prompt + value + suffix - for chunk in self.poe_api.send_message(self.bot_name ,prompt,): + for chunk in self.poe_api_bot.send_message(self.bot_name ,prompt,): pass return chunk['text'] \ No newline at end of file From 78b8fd278218afe7bd6a310ff635bfbca68f90dc Mon Sep 17 00:00:00 2001 From: Prashanth Reddy Munagala Date: Wed, 2 Aug 2023 15:25:02 +0530 Subject: [PATCH 4/7] update poe-api --- README.md | 36 ++++++++++++++++++++++++++++++++++++ docs/requirements.txt | 2 ++ tests/llms/test_poeapi.py | 18 ++++++++++++++++++ 3 files changed, 56 insertions(+) create mode 100644 tests/llms/test_poeapi.py diff --git a/README.md b/README.md index 73576691e..ad0505664 100644 --- a/README.md +++ b/README.md @@ -152,6 +152,42 @@ pandas_ai.generate_features(df) pandas_ai.plot_histogram(df, column="gdp") ``` +### POE-API As LLM Reference + +``` +To use Pandas-ai without using OPENAI API use Poe-api + +Get Your TOKEN from poe.com by just creating account don't need to add any billing + +To get Token click Inspect -> Application -> Cookies -> p-b value +BOTS REFERENCE : + +"chinchilla": "ChatGPT", + "a2": "Claude-instant", + "capybara": "Assistant", + "a2_100k": "Claude-instant-100k", + "llama_2_7b_chat": "Llama-2-7b", + "llama_2_13b_chat": "Llama-2-13b", + "a2_2": "Claude-2-100k", + "llama_2_70b_chat": "Llama-2-70b", + "agouti": "ChatGPT-16k", + "beaver": "GPT-4", + "vizcacha": "GPT-4-32k", + "acouchy": "Google-PaLM" + +If you want to use ChatGPT use bot name as chinchilla + +Example usage + +from pandasai import PandasAI +from pandasai.llm.poe_api import POEAPI + +llm = POEAPI(bot_name='chinchilla',token = '') +pandas_ai = PandasAI(llm) + +``` + + Learn more about the shortcuts [here](https://pandas-ai.readthedocs.io/en/latest/shortcuts/). ## 🔒 Privacy & Security diff --git a/docs/requirements.txt b/docs/requirements.txt index 17c7b43de..9b5f40d3e 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -64,3 +64,5 @@ watchdog==2.1.9 # via mkdocs zipp==3.8.0 # via importlib-metadata + +poe-api diff --git a/tests/llms/test_poeapi.py b/tests/llms/test_poeapi.py new file mode 100644 index 000000000..5aa07ac77 --- /dev/null +++ b/tests/llms/test_poeapi.py @@ -0,0 +1,18 @@ +from unittest.mock import patch +from pandasai.llm.poe_api import POEAPI + + +class TestPOEAPI(unittest.TestCase): + """Unit tests for the base GPT4All LLM class""" + + def setUp(self): + self.bot_name = "chinchilla" + self.token = '' + + self.poe_api_bot = POEAPI( + model_name=self.model_name, + ) + + def test_type(self, ): + + assert self.poe_api_bot.type == "POEAPI" \ No newline at end of file From 8f65bc6c45f4ba0453972401c79643d0416c4361 Mon Sep 17 00:00:00 2001 From: Prashanth Reddy Munagala Date: Wed, 2 Aug 2023 15:29:24 +0530 Subject: [PATCH 5/7] update poe-api --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index 3bbc15390..5bbce981c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -19,6 +19,7 @@ google-generativeai = { version = "^0.1.0rc2", optional = true } google-cloud-aiplatform = { version = "^1.26.1", optional = true } langchain = { version = "^0.0.199", optional = true} beautifulsoup4 = { version = "^4.12.2", optional = true } +poe-api = "0.5.2" [tool.poetry.group.dev.dependencies] black = "^23.3.0" From 28d256ae0a8d1c225b0b610e39ff7f568aaca448 Mon Sep 17 00:00:00 2001 From: Prashanth Reddy Munagala Date: Thu, 3 Aug 2023 16:03:32 +0530 Subject: [PATCH 6/7] update test --- tests/llms/test_poeapi.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/llms/test_poeapi.py b/tests/llms/test_poeapi.py index 5aa07ac77..26796f18e 100644 --- a/tests/llms/test_poeapi.py +++ b/tests/llms/test_poeapi.py @@ -3,7 +3,7 @@ class TestPOEAPI(unittest.TestCase): - """Unit tests for the base GPT4All LLM class""" + """Unit tests for the base POE LLM class""" def setUp(self): self.bot_name = "chinchilla" From 39b72d26f19f9e9d34e438caa487a59e1c2f4434 Mon Sep 17 00:00:00 2001 From: Prashanth Reddy Munagala Date: Thu, 3 Aug 2023 16:24:56 +0530 Subject: [PATCH 7/7] update code --- pandasai/llm/poe_api.py | 60 +++++++---------------------------------- 1 file changed, 10 insertions(+), 50 deletions(-) diff --git a/pandasai/llm/poe_api.py b/pandasai/llm/poe_api.py index 8be521b80..1c06212cb 100644 --- a/pandasai/llm/poe_api.py +++ b/pandasai/llm/poe_api.py @@ -16,42 +16,20 @@ class POEAPI(LLM): """POEAPI LLMs API - Base LLM class is extended to support POEAPILLM. When this class will be - initialized all the additional parameters like temp, top_p, top_k etc should - be inside **kwargs while instantiating the class. That is to be done only if - the user wants to override the existing configurations. Below example shows how + Base LLM class is extended to support POEAPILLM.Below example shows how we can use override certain configurations to change model's behaviour. Example: >>> import pandas as pd >>> from pandasai import PandasAI - >>> from pandasai.llm.POEAPI import POEAPILLM - >>> model_name = 'ggml-replit-code-v1-3b.bin' - >>> additional_params = {'temp': 0, 'max_tokens': 50} - >>> model = POEAPILLM(model_name, allow_download=True, **additional_params) + >>> from pandasai.llm.POEAPI import POEAPI + >>> model = POEAPI(bot_name='', token='') >>> df_ai = PandasAI(model) >>> response = df_ai(df, prompt='What is the sum of the GDP in this table?') - There is an optional parameter called model_path which sets where to - download the model and if it is not set then it will download the model - inside the folder: home//.local/share/nomic.ai/POEAPI/ - Note: Please note that right now Pandas AI only supports models for POEAPI. However - it might not work as chatGPT when it comes to performance, hence for now users using - this module have to tune the existing prompts to get better results. + + """ - temp: Optional[float] = 0 - top_p: Optional[float] = 0.1 - top_k: Optional[int] = 40 - n_batch: Optional[int] = 8 - n_threads: Optional[int] = 4 - n_predict: Optional[int] = 256 - max_tokens: Optional[int] = 200 - repeat_last_n: Optional[int] = 64 - repeat_penalty: Optional[float] = 1.18 - - # _model_repo_url = "https://POEAPI.io/models/models.json" - # _supported_models = [ - # metadata["filename"] for metadata in requests.get(_model_repo_url).json() - # ] + def __init__( self, @@ -64,16 +42,11 @@ def __init__( """ POEAPI client for using Pandas AI Args: - model_name: The name of the model. - model_folder_path: The folder inside the model weights are present - allow_download: If True will trigger download the specified model - n_threads: Number of CPU threads to be used while running the model - download_chunk_size: The chunk size set for downloading the model + bot_name: The name of the model. + token: The token of the Poe API. + """ - - - # automatically create the default folder and download the model - + try: import poe @@ -87,19 +60,6 @@ def __init__( "Please install it with `pip install -U poe-api`" ) - self.default_parameters = { - "max_tokens": self.max_tokens, - "n_predict": self.n_predict, - "top_k": self.top_k, - "top_p": self.top_p, - "temp": self.temp, - "n_batch": self.n_batch, - "repeat_penalty": self.repeat_penalty, - "repeat_last_n": self.repeat_last_n, - } - - # this will override all the parameters with all the pre-existing ones - self.params = {**self.default_parameters, **kwargs}