Skip to content

Commit

Permalink
Merge pull request #19 from noelzappy/update-002
Browse files Browse the repository at this point in the history
Update 002
  • Loading branch information
noelzappy authored Jan 31, 2024
2 parents d107a16 + 47651ee commit 61b3fde
Show file tree
Hide file tree
Showing 7 changed files with 954 additions and 639 deletions.
2 changes: 1 addition & 1 deletion .env.example
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@

# OpennAI credentials
OPENAI_API_KEY= # your OpenAI API key
OPENAI_ORG_ID= # your OpenAI organization ID

40 changes: 20 additions & 20 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -6,23 +6,23 @@
"node": ">= 18.12 <19"
},
"devDependencies": {
"@types/jest": "^29.5.1",
"@types/node": "^20.2.5",
"@types/qrcode-terminal": "^0.12.0",
"@typescript-eslint/eslint-plugin": "^5.59.8",
"@typescript-eslint/parser": "^5.59.8",
"eslint": "^8.41.0",
"eslint-config-prettier": "^8.8.0",
"eslint-plugin-jest": "^27.2.1",
"jest": "^29.5.0",
"nodemon": "^2.0.22",
"pm2": "^5.3.0",
"prettier": "^2.8.8",
"rimraf": "^5.0.1",
"ts-jest": "^29.1.0",
"ts-node": "^10.9.1",
"@types/jest": "^29.5.11",
"@types/node": "^20.11.13",
"@types/qrcode-terminal": "^0.12.2",
"@typescript-eslint/eslint-plugin": "^6.20.0",
"@typescript-eslint/parser": "^6.20.0",
"eslint": "^8.56.0",
"eslint-config-prettier": "^9.1.0",
"eslint-plugin-jest": "^27.6.3",
"jest": "^29.7.0",
"nodemon": "^3.0.3",
"pm2": "^5.3.1",
"prettier": "^3.2.4",
"rimraf": "^5.0.5",
"ts-jest": "^29.1.2",
"ts-node": "^10.9.2",
"tsutils": "^3.21.0",
"typescript": "^5.0.4"
"typescript": "^5.3.3"
},
"scripts": {
"dev": "npx nodemon src/main.ts",
Expand All @@ -40,11 +40,11 @@
"author": "Emmanuel Yeboah <[email protected]>",
"license": "MIT",
"dependencies": {
"dotenv": "^16.0.3",
"openai": "^3.2.1",
"dotenv": "^16.4.1",
"openai": "^4.26.0",
"qrcode-terminal": "^0.12.0",
"tslib": "^2.5.2",
"whatsapp-web.js": "^1.21.0"
"tslib": "^2.6.2",
"whatsapp-web.js": "^1.23.0"
},
"volta": {
"node": "18.12.1"
Expand Down
1 change: 1 addition & 0 deletions src/@types/model.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -28,4 +28,5 @@ export type ChatMessageResponse = {
export type ChatMessage = {
message: string;
systemMessage?: string;
quotedMessage?: string;
};
4 changes: 2 additions & 2 deletions src/configs/constant.ts
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,6 @@ export const REMOVABLE_PREFIXES = [
'ZAPPY,',
];

export const OPENAI_MODEL = 'gpt-3.5-turbo';
export const OPENAI_MODEL = 'gpt-3.5-turbo-1106';

export const DEFAULT_SYSTEM_MESSAGE = `You are ChatGPT, a large language model trained by OpenAI. Answer as concisely as possible.\nCurrent date: ${new Date().toISOString()}`;
export const DEFAULT_SYSTEM_MESSAGE = `You are WhatsApp bot developed by Wordnox.com; Answer as concisely as possible.`;
46 changes: 21 additions & 25 deletions src/configs/openai.ts
Original file line number Diff line number Diff line change
@@ -1,48 +1,44 @@
import process from 'process';
import dotenv from 'dotenv';
import {
Configuration,
OpenAIApi,
ChatCompletionRequestMessage,
ChatCompletionResponseMessage,
} from 'openai';

import OpenAI from 'openai';
import { ChatMessage, ChatMessageResponse } from '../@types/model';
import { OPENAI_MODEL, DEFAULT_SYSTEM_MESSAGE } from './constant';

dotenv.config();

const configuration = new Configuration({
organization: process.env.OPENAI_ORG_ID,
const openai = new OpenAI({
apiKey: process.env.OPENAI_API_KEY,
});

// ChatGPT Client
const openai = new OpenAIApi(configuration);

export default openai;

export const sendMessage = async (
messagePrompt: ChatMessage,
): Promise<ChatMessageResponse> => {
const prompt: ChatCompletionRequestMessage[] = [
{
const prompt: OpenAI.Chat.Completions.ChatCompletionMessageParam[] = [];

if (messagePrompt.quotedMessage) {
prompt.push({
role: 'user',
content: messagePrompt.message,
},
{
role: 'system',
content: messagePrompt.systemMessage || DEFAULT_SYSTEM_MESSAGE,
},
];

const { data } = await openai.createChatCompletion({
content: messagePrompt.quotedMessage,
});
}
prompt.push({
role: 'user',
content: messagePrompt.message,
});
prompt.push({
role: 'system',
content: messagePrompt.systemMessage || DEFAULT_SYSTEM_MESSAGE,
});

const data = await openai.chat.completions.create({
model: OPENAI_MODEL,
messages: prompt,
});

console.log(prompt);

const message: ChatCompletionResponseMessage = data.choices[0].message;
const message = data.choices[0].message;

return {
chatId: data.id,
Expand Down
23 changes: 16 additions & 7 deletions src/handlers/message.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import Logger from '../utils/logger.util';
import { getPrefix } from '../utils/misc';
import { countWords, getPrefix } from '../utils/misc';
import { Chat, Message } from 'whatsapp-web.js';
import { ChatMessage, TPrefix } from '../@types/model';
import { sendMessage } from '../configs/openai';
Expand All @@ -10,11 +10,19 @@ const handler = async (message: Message): Promise<void> => {

const chat: Chat = await message.getChat();



const quotedMessage = await message.getQuotedMessage()




const prefix: TPrefix = getPrefix(message.body);

const prompt: ChatMessage = {
message: prefix.message.trim(),
systemMessage: prefix.systemMessage,
quotedMessage: quotedMessage.body
};

if (!prefix.isPrefix && chat.isGroup) return;
Expand All @@ -23,12 +31,13 @@ const handler = async (message: Message): Promise<void> => {

Logger.info(`Received prompt from ${message.from}: ${prompt.message}`);

// const promptLength = countWords(prompt);
// if (promptLength > 50) {
// return message.reply(
// 'MAXIMUM OF 50 WORDS PER MESSAGE ONLY.\nFor longer messages please visit \nhttps://chat.openai.com/ \nOr contact Zappy for a custom solution.',
// );
// }
const promptLength = countWords(prompt.message);
if (promptLength > 70) {
message.reply(
'MAXIMUM OF 70 WORDS PER MESSAGE ONLY.\nFor longer messages please visit \nhttps://chat.openai.com/ \nOr contact Wordnox.com for a custom solution.',
);
return;
}

const response = await sendMessage(prompt);

Expand Down
Loading

0 comments on commit 61b3fde

Please sign in to comment.