Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Added Poe-api as LLM reference #425

Closed
wants to merge 7 commits into from
Closed
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
36 changes: 36 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -152,6 +152,42 @@ pandas_ai.generate_features(df)
pandas_ai.plot_histogram(df, column="gdp")
```

### POE-API As LLM Reference

```
To use Pandas-ai without using OPENAI API use Poe-api

Get Your TOKEN from poe.com by just creating account don't need to add any billing

To get Token click Inspect -> Application -> Cookies -> p-b value
BOTS REFERENCE :

"chinchilla": "ChatGPT",
"a2": "Claude-instant",
"capybara": "Assistant",
"a2_100k": "Claude-instant-100k",
"llama_2_7b_chat": "Llama-2-7b",
"llama_2_13b_chat": "Llama-2-13b",
"a2_2": "Claude-2-100k",
"llama_2_70b_chat": "Llama-2-70b",
"agouti": "ChatGPT-16k",
"beaver": "GPT-4",
"vizcacha": "GPT-4-32k",
"acouchy": "Google-PaLM"

If you want to use ChatGPT use bot name as chinchilla

Example usage

from pandasai import PandasAI
from pandasai.llm.poe_api import POEAPI

llm = POEAPI(bot_name='chinchilla',token = '')
pandas_ai = PandasAI(llm)

```


Learn more about the shortcuts [here](https://pandas-ai.readthedocs.io/en/latest/shortcuts/).

## 🔒 Privacy & Security
Expand Down
2 changes: 2 additions & 0 deletions docs/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -64,3 +64,5 @@ watchdog==2.1.9
# via mkdocs
zipp==3.8.0
# via importlib-metadata

poe-api
115 changes: 115 additions & 0 deletions pandasai/llm/poe_api.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,115 @@
"""Poe-Api LLMs

This module provides a family of commercially / non-commercially available
LLMs maintained by Quora
Example:
Use below example to call POEAPI supporrted models
>>> from pandasai.llm.poe_api import POEAPI
"""
import os
import requests
from typing import Optional

from pandasai.prompts.base import Prompt
from .base import LLM


class POEAPI(LLM):
"""POEAPI LLMs API
Base LLM class is extended to support POEAPILLM. When this class will be
initialized all the additional parameters like temp, top_p, top_k etc should
be inside **kwargs while instantiating the class. That is to be done only if
the user wants to override the existing configurations. Below example shows how
we can use override certain configurations to change model's behaviour.
Example:
>>> import pandas as pd
>>> from pandasai import PandasAI
>>> from pandasai.llm.POEAPI import POEAPILLM
>>> model_name = 'ggml-replit-code-v1-3b.bin'
>>> additional_params = {'temp': 0, 'max_tokens': 50}
>>> model = POEAPILLM(model_name, allow_download=True, **additional_params)
>>> df_ai = PandasAI(model)
>>> response = df_ai(df, prompt='What is the sum of the GDP in this table?')
There is an optional parameter called model_path which sets where to
download the model and if it is not set then it will download the model
inside the folder: home/<user-name>/.local/share/nomic.ai/POEAPI/
Note: Please note that right now Pandas AI only supports models for POEAPI. However
it might not work as chatGPT when it comes to performance, hence for now users using
this module have to tune the existing prompts to get better results.
"""

temp: Optional[float] = 0
top_p: Optional[float] = 0.1
top_k: Optional[int] = 40
n_batch: Optional[int] = 8
n_threads: Optional[int] = 4
n_predict: Optional[int] = 256
max_tokens: Optional[int] = 200
repeat_last_n: Optional[int] = 64
repeat_penalty: Optional[float] = 1.18

# _model_repo_url = "https://POEAPI.io/models/models.json"
# _supported_models = [
# metadata["filename"] for metadata in requests.get(_model_repo_url).json()
# ]

def __init__(
self,
bot_name: str,
token : str,
**kwargs,
) -> None:
self.bot_name = bot_name
self.token = token
"""
POEAPI client for using Pandas AI
Args:
model_name: The name of the model.
model_folder_path: The folder inside the model weights are present
allow_download: If True will trigger download the specified model
n_threads: Number of CPU threads to be used while running the model
download_chunk_size: The chunk size set for downloading the model
"""


# automatically create the default folder and download the model

try:
import poe

self.poe_api_bot = poe.Client(token=self.token


)
except ImportError:
raise ImportError(
"Unable to import poe-api python package "
"Please install it with `pip install -U poe-api`"
)

self.default_parameters = {
"max_tokens": self.max_tokens,
"n_predict": self.n_predict,
"top_k": self.top_k,
"top_p": self.top_p,
"temp": self.temp,
"n_batch": self.n_batch,
"repeat_penalty": self.repeat_penalty,
"repeat_last_n": self.repeat_last_n,
}

# this will override all the parameters with all the pre-existing ones
self.params = {**self.default_parameters, **kwargs}



@property
def type(self) -> str:
return "POEAPI"

def call(self, instruction: Prompt, value: str, suffix: str = "") -> str:
prompt = str(instruction)
prompt = prompt + value + suffix
for chunk in self.poe_api_bot.send_message(self.bot_name ,prompt,):
pass
return chunk['text']
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ google-generativeai = { version = "^0.1.0rc2", optional = true }
google-cloud-aiplatform = { version = "^1.26.1", optional = true }
langchain = { version = "^0.0.199", optional = true}
beautifulsoup4 = { version = "^4.12.2", optional = true }
poe-api = "0.5.2"

[tool.poetry.group.dev.dependencies]
black = "^23.3.0"
Expand Down
18 changes: 18 additions & 0 deletions tests/llms/test_poeapi.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
from unittest.mock import patch
from pandasai.llm.poe_api import POEAPI


class TestPOEAPI(unittest.TestCase):
"""Unit tests for the base GPT4All LLM class"""

def setUp(self):
self.bot_name = "chinchilla"
self.token = ''

self.poe_api_bot = POEAPI(
model_name=self.model_name,
)

def test_type(self, ):

assert self.poe_api_bot.type == "POEAPI"