Skip to content

Commit 79e11f0

Browse files
committed
changes
1 parent eea5158 commit 79e11f0

File tree

8 files changed

+74
-3
lines changed

8 files changed

+74
-3
lines changed

.gitignore

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,4 +6,5 @@ out
66
stackRegistry.json
77
dist
88
.env
9-
.vscode/launch.json
9+
.vscode/launch.json
10+
test-workspace/.env

preload-dotenv.js

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
require('dotenv').config();

src/stack/getCaviat.ts

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
export function getCaviat(brief: string) {
2+
// brief contains openai or gpt-4 or gpt-3.5-turbo or gpt4 or gpt3.5turbo
3+
//convert brief to lowercase
4+
brief = brief.toLowerCase();
5+
if (brief.includes('openai') || brief.includes('gpt-4') || brief.includes('gpt4') || brief.includes('gpt-3.5-turbo') || brief.includes('gpt3.5turbo')) {
6+
return `Examples:
7+
import OpenAI from 'openai';
8+
9+
const openai = new OpenAI({
10+
apiKey: process.env.OPENAI_API_KEY
11+
});
12+
13+
async function main() {
14+
const chatCompletion = await openai.chat.completions.create({
15+
messages: [{ role: 'user', content: 'Say this is a test' }],
16+
model: 'gpt-3.5-turbo',
17+
});
18+
}
19+
20+
main();
21+
`;
22+
}
23+
return ``;
24+
}

src/stack/integrations/generic/generateFunction.ts

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@ import { BoilerplateMetadata, Message } from '../lib/types';
33
import { processBoilerplate, readFunctionToString } from '../lib/utils';
44
import { combineSkeleton } from '../../createSkeleton';
55
import { openai } from '../openai/construct';
6+
import { getCaviat } from '../../getCaviat';
67

78
export default async function generateFunction(
89
briefSkeleton: string,
@@ -44,6 +45,7 @@ ${startingBoilerplate}`,
4445
${startingBoilerplate}`,
4546
});
4647
}
48+
const caveats = getCaviat(brief);
4749

4850
try {
4951
const response = await openai.chat.completions.create({
@@ -58,7 +60,11 @@ Ensure that you keep the return type, function name, and parameters the same. Yo
5860
},
5961
{
6062
role: 'user',
61-
content: `Here is the user brief: ${brief}.
63+
content: ``,
64+
},
65+
{
66+
role: 'user',
67+
content: `${caveats}Here is the user brief: ${brief}.
6268
Respond with just what would go in the function file and nothing else. No explanation or words, just the contents of the file. Make sure that the code is runnable if I were to execute it directly.`,
6369
},
6470
],
Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,12 @@
1+
import answerQuestionUsingOpenAI from '../../stacks/answerQuestionUsingOpenAI';
12

23
const question = 'what is the meaning of life?';
34

4-
stack('answer my question using OpenAI', {in: question, out: '42'});
5+
async function main() {
6+
// console.log(await answerQuestionUsingOpenAI("question"));
7+
const response = stack('answer my question using OpenAI', {in: question, out: '42'});;
8+
console.log;
9+
}
10+
11+
main();
12+

test-workspace/package-lock.json

Lines changed: 12 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

test-workspace/package.json

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@
99
"author": "",
1010
"license": "ISC",
1111
"dependencies": {
12+
"dotenv": "^16.3.1",
1213
"robotjs": "^0.6.0",
1314
"ts-node": "^10.9.1",
1415
"typescript": "^5.3.2"
Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,18 @@
1+
import OpenAI from 'openai';
2+
require('dotenv').config();
3+
4+
const openai = new OpenAI({
5+
apiKey: process.env.OPENAI_API_KEY
6+
});
7+
8+
/**
9+
* Brief: answer my question using OpenAI
10+
*/
11+
export default async function answerQuestionUsingOpenAI(input: string): Promise<string> {
12+
const response = await openai.chat.completions.create({
13+
messages: [{ role: 'user', content: input }],
14+
model: 'gpt-3.5-turbo',
15+
});
16+
17+
return response.choices[0].message.content;
18+
}

0 commit comments

Comments
 (0)