-
Notifications
You must be signed in to change notification settings - Fork 14
/
Copy pathjson-output.js
62 lines (49 loc) · 1.89 KB
/
json-output.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
/**
* @file examples/json/json-output.js
* @description This example demonstrates JSON output by specifying JSON output requirements through the prompt.
*
* To run this example, you first need to install the required module by executing:
*
* npm install dotenv
*/
const { LLMInterface } = require('../../src/index.js');
const { simpleprompt } = require('../../src/utils/defaults.js');
const { prettyHeader, prettyResult } = require('../../src/utils/utils.js');
require('dotenv').config({ path: '../../.env' });
// Setup your key and interface
const interfaceName = 'huggingface';
const apiKey = process.env.HUGGINGFACE_API_KEY;
// Example description
const description = `This example demonstrates JSON output by specifying JSON output requirements through the prompt.
To run this example, you first need to install the required modules by executing:
npm install dotenv`;
/**
* Main exampleUsage() function.
*/
async function exampleUsage() {
let prompt = `${simpleprompt} Return 5 results.\n\nProvide the response as a JSON object.\n\nFollow this output format, only responding with the JSON object and nothing else:\n\n{title, reason}`;
prettyHeader(
'JSON Output (Prompt Based) Example',
description,
prompt,
interfaceName,
);
LLMInterface.setApiKey(interfaceName, apiKey);
try {
console.time('Timer');
const response = await LLMInterface.sendMessage(interfaceName, prompt, {
max_tokens: 1024,
});
// since this isn't native, and we aren't repairing it, we can't guarantee the response element will be valid JSON'
if (response.results && typeof response.results !== 'object') {
response.results = JSON.parse(response.results);
}
prettyResult(response.results);
console.log();
console.timeEnd('Timer');
console.log();
} catch (error) {
console.error('Error processing LLMInterface.sendMessage:', error);
}
}
exampleUsage();