Skip to content
This repository has been archived by the owner on Sep 25, 2024. It is now read-only.

feat: Add service provider Gemini #95

Merged
merged 4 commits into from
Dec 21, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
78 changes: 57 additions & 21 deletions src/options.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import "./options.css";
import { getStorage, setStorage } from "./utils";
import Switch from "./components/Switch";
import FilterRules from "./components/FilterRules";
import { FilterRuleItem } from "./types";
import { FilterRuleItem, ServiceProvider } from "./types";
import { DEFAULT_PROMPT } from "./const";

const TABS = [
Expand All @@ -16,6 +16,9 @@ const TABS = [

function BasicSettings() {
const [model, setModel] = useState<string | undefined>("gpt-3.5-turbo");
const [serviceProvider, setServiceProvider] = useState<ServiceProvider>(
"GPT"
);
const [apiURL, setApiURL] = useState<string | undefined>(
"https://api.openai.com/v1/chat/completions"
);
Expand All @@ -24,6 +27,11 @@ function BasicSettings() {
]);
useEffect(() => {
getStorage<string>("model").then(setModel);
getStorage<ServiceProvider>("serviceProvider").then((value) => {
if (value) {
setServiceProvider(value);
}
});
getStorage<string>("apiURL").then(setApiURL);
getStorage<FilterRuleItem[]>("filterRules").then(setFilterRules);
}, []);
Expand All @@ -33,6 +41,14 @@ function BasicSettings() {
setStorage("model", e.target.value);
}, []);

const updateServiceProvider = useCallback(
(e: ChangeEvent<HTMLSelectElement>) => {
setServiceProvider(e.target.value as ServiceProvider);
setStorage("serviceProvider", e.target.value);
},
[]
);

const updateApiURL = useCallback((e: ChangeEvent<HTMLInputElement>) => {
setApiURL(e.target.value);
setStorage("apiURL", e.target.value);
Expand All @@ -47,38 +63,58 @@ function BasicSettings() {
<div className="flex flex-col gap-y-8 p-4">
<div className="flex flex-col gap-y-2">
<label htmlFor="models" className="text-xl font-medium">
Choose an model
Choose a service provider
</label>

<select
value={model}
onChange={updateModel}
value={serviceProvider}
onChange={updateServiceProvider}
id="models"
className="bg-gray-50 border w-64 border-gray-300 text-gray-900 text-sm rounded-lg
focus:ring-blue-500 focus:border-blue-500 block"
>
<option selected>Choose a model</option>
<option value="gpt-4">GPT 4</option>
<option value="gpt-4-32k">GPT 4 32k</option>
<option value="gpt-3.5-turbo-1106">GPT 3.5 turbo 1106</option>
<option value="gpt-3.5-turbo">GPT 3.5 turbo</option>
<option value="GPT">OpenAI GPT</option>
<option value="Gemini">Google Gemini</option>
</select>
</div>

<div className="flex flex-col gap-y-2">
<label htmlFor="api_url" className="text-xl font-medium">
API URL
</label>

<input
className="bg-gray-50 border w-64 border-gray-300 text-gray-900 text-sm rounded-lg
{serviceProvider === "GPT" && (
<>
<div className="flex flex-col gap-y-2">
<label htmlFor="models" className="text-xl font-medium">
Choose an model
</label>

<select
value={model}
onChange={updateModel}
id="models"
className="bg-gray-50 border w-64 border-gray-300 text-gray-900 text-sm rounded-lg
focus:ring-blue-500 focus:border-blue-500 block"
value={apiURL}
onChange={updateApiURL}
id="api_url"
/>
</div>
>
<option selected>Choose a model</option>
<option value="gpt-4">GPT 4</option>
<option value="gpt-4-32k">GPT 4 32k</option>
<option value="gpt-3.5-turbo-1106">GPT 3.5 turbo 1106</option>
<option value="gpt-3.5-turbo">GPT 3.5 turbo</option>
</select>
</div>

<div className="flex flex-col gap-y-2">
<label htmlFor="api_url" className="text-xl font-medium">
API URL
</label>

<input
className="bg-gray-50 border w-64 border-gray-300 text-gray-900 text-sm rounded-lg
focus:ring-blue-500 focus:border-blue-500 block"
value={apiURL}
onChange={updateApiURL}
id="api_url"
/>
</div>
</>
)}
<div className="flex flex-col gap-y-2">
<label htmlFor="api_url" className="text-xl font-medium">
Filter Rule
Expand Down
96 changes: 68 additions & 28 deletions src/popup.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,17 @@ import Switch from "./components/Switch";
import { ColorPicker } from "./components/ColorPicker";
import { Color, DEFAULT_GROUP, TabColorConfig } from "./const";
import { toast } from "./components/toast";
import { ServiceProvider } from "./types";

const getApiKeyHrefMap = {
Gemini: "https://ai.google.dev/",
GPT: "https://platform.openai.com/api-keys",
};

const Popup = () => {
const [serviceProvider, setServiceProvider] = useState<ServiceProvider>(
"GPT"
);
const [openAIKey, setOpenAIKey] = useState<string | undefined>("");
const [types, setTypes] = useState<string[]>([]);
const [isOn, setIsOn] = useState<boolean | undefined>(true);
Expand Down Expand Up @@ -44,6 +53,11 @@ const Popup = () => {
getStorage<boolean>("colorsEnabled").then((colorsEnabled) => {
if (colorsEnabled !== undefined) setColorsEnabled(colorsEnabled);
});
getStorage<ServiceProvider>("serviceProvider").then((value) => {
if (value) {
setServiceProvider(value);
}
});
}, []);

const updateOpenAIKey = useCallback((e: ChangeEvent<HTMLInputElement>) => {
Expand Down Expand Up @@ -105,34 +119,60 @@ const Popup = () => {

setIsValidating(true);
try {
const response = await fetch(
"https://api.openai.com/v1/engines/davinci/completions",
{
method: "POST",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${openAIKey}`,
},
body: JSON.stringify({
prompt: "This is a test",
max_tokens: 5,
temperature: 0.5,
top_p: 1,
frequency_penalty: 0,
presence_penalty: 0,
stop: ["\n"],
}),
if (serviceProvider === "Gemini") {
const response = await fetch(
"https://generativelanguage.googleapis.com/v1beta3/models/text-bison-001:generateText?key=" +
openAIKey,
{
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({
prompt: { text: "This is a test" },
}),
}
);
if (response.status === 200) {
setIsValidated(true);
} else {
setIsValidated(false);
const txt = await response.text();
toast({
type: "error",
message: "Invalid Genmini Key: " + response.status + " " + txt,
});
}
);
if (response.status === 200) {
setIsValidated(true);
} else {
setIsValidated(false);
const txt = await response.text();
toast({
type: "error",
message: "Invalid OpenAI Key: " + response.status + " " + txt,
});
const response = await fetch(
"https://api.openai.com/v1/engines/davinci/completions",
{
method: "POST",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${openAIKey}`,
},
body: JSON.stringify({
prompt: "This is a test",
max_tokens: 5,
temperature: 0.5,
top_p: 1,
frequency_penalty: 0,
presence_penalty: 0,
stop: ["\n"],
}),
}
);
if (response.status === 200) {
setIsValidated(true);
} else {
setIsValidated(false);
const txt = await response.text();
toast({
type: "error",
message: "Invalid OpenAI Key: " + response.status + " " + txt,
});
}
}
} catch (error) {
setIsValidated(false);
Expand Down Expand Up @@ -176,7 +216,7 @@ const Popup = () => {
className="absolute -top-2 left-2 inline-block bg-white px-1 text-xs font-medium text-gray-900"
htmlFor="openai-key"
>
OpenAI Key
API Key
</label>

<div className="flex items-center gap-x-2">
Expand Down Expand Up @@ -212,7 +252,7 @@ const Popup = () => {
<div className="text-sm text-gray-500 mb-2">
You can get your key from{" "}
<a
href="https://platform.openai.com/api-keys"
href={getApiKeyHrefMap[serviceProvider] || getApiKeyHrefMap.GPT}
target="_blank"
rel="noreferrer"
className="text-primary/lg underline underline-offset-2 hover:text-primary"
Expand Down
66 changes: 66 additions & 0 deletions src/service-provider/gemini.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
import { TabInfo } from "../types";
import Mustache from "mustache";
import { getStorage } from "../utils";
import { DEFAULT_PROMPT } from "../const";

const renderPromptForGemini = async (
tab: TabInfo,
types: string[]
): Promise<{ role: string; parts: [{ text: string }] }[]> => {
const prompt: string = (await getStorage("prompt")) || DEFAULT_PROMPT;
return [
{
role: "user",
parts: [
{
text: "",
},
],
},
{
role: "model",
parts: [
{
text: "You are a brwoser tab group classificator",
},
],
},
{
role: "user",
parts: [
{
text: Mustache.render(prompt, {
tabURL: tab.url,
tabTitle: tab.title,
types: types.join(", "),
}),
},
],
},
];
};

export const fetchGemini = async (
apiKey: string,
tabInfo: TabInfo,
types: string[]
) => {
const response = await fetch(
"https://generativelanguage.googleapis.com/v1beta/models/gemini-pro:generateContent?key=" +
apiKey,
{
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({
contents: await renderPromptForGemini(tabInfo, types),
}),
}
);

const data = await response.json();

const type = data.candidates[0].content.parts[0].text;
return type;
};
56 changes: 56 additions & 0 deletions src/service-provider/gpt.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
import { TabInfo } from "../types";
import Mustache from "mustache";
import { getStorage } from "../utils";
import { DEFAULT_PROMPT } from "../const";

const renderPromptForOpenAI = async (
tab: TabInfo,
types: string[]
): Promise<
[{ role: string; content: string }, { role: string; content: string }]
> => {
const prompt: string = (await getStorage("prompt")) || DEFAULT_PROMPT;
return [
{
role: "system",
content: "You are a brwoser tab group classificator",
},
{
role: "user",
content: Mustache.render(prompt, {
tabURL: tab.url,
tabTitle: tab.title,
types: types.join(", "),
}),
},
];
};

export const fetchGpt = async (
apiKey: string,
tabInfo: TabInfo,
types: string[]
) => {
const apiURL =
(await getStorage("apiURL")) ||
"https://api.openai.com/v1/chat/completions";

const model = (await getStorage("model")) || "gpt-3.5-turbo";

const response = await fetch(apiURL, {
method: "POST",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${apiKey}`,
"Api-Key": apiKey,
},
body: JSON.stringify({
messages: await renderPromptForOpenAI(tabInfo, types),
model,
}),
});

const data = await response.json();
const type = data.choices[0].message.content;
return type;
};
Loading