Skip to content

Commit 1efb63c

Browse files
committed
Replace runtime process.env reads with better-env config
1 parent 9f8e1e6 commit 1efb63c

6 files changed

Lines changed: 32 additions & 294 deletions

File tree

app/src/app/api/cron/luma-sync/route.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ function isAuthorized(request: Request, cronSecret: string): boolean {
1414
}
1515

1616
export async function GET(request: Request) {
17-
const cronSecret = process.env.CRON_SECRET?.trim();
17+
const cronSecret = mainConfig.cron.secret?.trim();
1818

1919
if (!cronSecret) {
2020
return NextResponse.json(

app/src/lib/config.ts

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
import { authConfig } from "./auth/config";
2+
import { cronConfig } from "./cron/config";
23
import { databaseConfig } from "./database/config";
34
import { electricConfig } from "./electric/config";
45
import { instanceConfig } from "./instance/config";
@@ -17,6 +18,11 @@ export const mainConfig = {
1718
resend: {
1819
apiKey: integrationsConfig.resendApiKey,
1920
},
21+
ai: {
22+
gatewayApiKey: integrationsConfig.aiGatewayApiKey,
23+
vercelOidcToken: integrationsConfig.vercelOidcToken,
24+
},
25+
cron: cronConfig,
2026
luma: lumaConfig,
2127
discord: {
2228
botToken: integrationsConfig.discordBotToken,

app/src/lib/cron/config.ts

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
import { configSchema, server } from "better-env/config-schema";
2+
3+
const cronEnvConfig = configSchema("Cron", {
4+
secret: server({
5+
env: "CRON_SECRET",
6+
optional: true,
7+
}),
8+
});
9+
10+
export const cronConfig = {
11+
secret: cronEnvConfig.server.secret,
12+
};

app/src/lib/integrations/config.ts

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,14 @@ const integrationsEnvConfig = configSchema("Integrations", {
55
env: "RESEND_API_KEY",
66
optional: true,
77
}),
8+
aiGatewayApiKey: server({
9+
env: "AI_GATEWAY_API_KEY",
10+
optional: true,
11+
}),
12+
vercelOidcToken: server({
13+
env: "VERCEL_OIDC_TOKEN",
14+
optional: true,
15+
}),
816
discordBotToken: server({
917
env: "DISCORD_BOT_TOKEN",
1018
optional: true,
@@ -17,6 +25,8 @@ const integrationsEnvConfig = configSchema("Integrations", {
1725

1826
export const integrationsConfig = {
1927
resendApiKey: integrationsEnvConfig.server.resendApiKey,
28+
aiGatewayApiKey: integrationsEnvConfig.server.aiGatewayApiKey,
29+
vercelOidcToken: integrationsEnvConfig.server.vercelOidcToken,
2030
discordBotToken: integrationsEnvConfig.server.discordBotToken,
2131
discordReviewChannelId: integrationsEnvConfig.server.discordReviewChannelId,
2232
};

app/src/lib/luma.ts

Lines changed: 0 additions & 291 deletions
This file was deleted.

app/src/workflows/luma-sync/steps/ai.ts

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
import { createGateway, generateObject } from "ai";
2+
import { mainConfig } from "@/lib/config";
23
import type { LumaEvent } from "@/lib/luma";
34
import {
45
aiSuggestedEventSchema,
@@ -9,8 +10,8 @@ import {
910
const AI_MODEL = "openai/gpt-5.3-medium";
1011

1112
function getGatewayModel() {
12-
const gatewayApiKey = process.env.AI_GATEWAY_API_KEY;
13-
const oidcToken = process.env.VERCEL_OIDC_TOKEN;
13+
const gatewayApiKey = mainConfig.ai.gatewayApiKey;
14+
const oidcToken = mainConfig.ai.vercelOidcToken;
1415

1516
if (!gatewayApiKey && !oidcToken) {
1617
throw new Error(

0 commit comments

Comments
 (0)