Skip to content

Commit 7f14e47

Browse files
authored
feat: Improve CI (#431)
1 parent 6925676 commit 7f14e47

File tree

5 files changed

+9
-11
lines changed

5 files changed

+9
-11
lines changed

e2e/utils.ts

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -90,8 +90,6 @@ export async function runCreateLlama({
9090
...dataSourceArgs,
9191
"--vector-db",
9292
vectorDb,
93-
"--open-ai-key",
94-
process.env.OPENAI_API_KEY,
9593
"--use-pnpm",
9694
"--port",
9795
port,
@@ -103,8 +101,6 @@ export async function runCreateLlama({
103101
tools ?? "none",
104102
"--observability",
105103
"none",
106-
"--llama-cloud-key",
107-
process.env.LLAMA_CLOUD_API_KEY,
108104
];
109105

110106
if (templateUI) {

helpers/providers/openai.ts

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@ import ora from "ora";
33
import { red } from "picocolors";
44
import prompts from "prompts";
55
import { ModelConfigParams, ModelConfigQuestionsParams } from ".";
6+
import { isCI } from "../../questions";
67
import { questionHandlers } from "../../questions/utils";
78

89
const OPENAI_API_URL = "https://api.openai.com/v1";
@@ -30,7 +31,7 @@ export async function askOpenAIQuestions({
3031
},
3132
};
3233

33-
if (!config.apiKey) {
34+
if (!config.apiKey && !isCI) {
3435
const { key } = await prompts(
3536
{
3637
type: "text",

questions/index.ts

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,10 +4,12 @@ import { askProQuestions } from "./questions";
44
import { askSimpleQuestions } from "./simple";
55
import { QuestionArgs, QuestionResults } from "./types";
66

7+
export const isCI = ciInfo.isCI || process.env.PLAYWRIGHT_TEST === "1";
8+
79
export const askQuestions = async (
810
args: QuestionArgs,
911
): Promise<QuestionResults> => {
10-
if (ciInfo.isCI || process.env.PLAYWRIGHT_TEST === "1") {
12+
if (isCI) {
1113
return await getCIQuestionResults(args);
1214
} else if (args.pro) {
1315
// TODO: refactor pro questions to return a result object

questions/questions.ts

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
import { blue, green } from "picocolors";
22
import prompts from "prompts";
3+
import { isCI } from ".";
34
import { COMMUNITY_OWNER, COMMUNITY_REPO } from "../helpers/constant";
45
import { EXAMPLE_FILE } from "../helpers/datasources";
56
import { getAvailableLlamapackOptions } from "../helpers/llama-pack";
@@ -386,7 +387,7 @@ export const askProQuestions = async (program: QuestionArgs) => {
386387

387388
// Ask for LlamaCloud API key when using a LlamaCloud index or LlamaParse
388389
if (isUsingLlamaCloud || program.useLlamaParse) {
389-
if (!program.llamaCloudKey) {
390+
if (!program.llamaCloudKey && !isCI) {
390391
// if already set, don't ask again
391392
// Ask for LlamaCloud API key
392393
const { llamaCloudKey } = await prompts(

templates/types/streaming/fastapi/app/services/file.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -242,13 +242,11 @@ def _add_file_to_llama_cloud_index(
242242
except ImportError as e:
243243
raise ValueError("LlamaCloudFileService is not found") from e
244244

245-
project_id = index._get_project_id()
246-
pipeline_id = index._get_pipeline_id()
247245
# LlamaCloudIndex is a managed index so we can directly use the files
248246
upload_file = (file_name, BytesIO(file_data))
249247
doc_id = LLamaCloudFileService.add_file_to_pipeline(
250-
project_id,
251-
pipeline_id,
248+
index.project.id,
249+
index.pipeline.id,
252250
upload_file,
253251
custom_metadata={},
254252
)

0 commit comments

Comments
 (0)