Open-next Cloudflare / Docker / Setup (#248)

* Add Cloudflare generator with Wrangler and OpenNext support

This update introduces a new Cloudflare generator to streamline configuration and deployment via Wrangler and OpenNext. It registers the necessary templates, modifies project files, and adds Cloudflare-specific scripts and dependencies to the package.json. Additionally, .hbs files are updated in .prettierignore for formatting consistency.

* Add GitHub username prompt and improve setup scripts

Introduce a prompt for GitHub username to personalize project setup. Enhance the setup scripts by adding PNPM verification, configuring `upstream` remote, and removing the `origin` remote. Adjust health check and error handling for better reliability.

* Add Dockerfile generator to turbo generators

Introduced a new generator to create Dockerfile configurations for standalone Next.js applications. This includes modifying `next.config.mjs` for standalone output, updating dependencies in `package.json`, and adding a Dockerfile template. The generator is now registered in the turbo setup.

* Add console-based logger implementation. This is required for edge environments such as Cloudflare.

* Remove deprecated Supabase client utilities

The `server-actions-client`, `route-handler-client`, and `server-component-client` utilities have been removed in favor of `getSupabaseServerClient`. This simplifies and consolidates the API, ensuring consistency across server-side usage. Version bumped to 2.9.0 to reflect breaking changes.
This commit is contained in:
Giancarlo Buomprisco
2025-04-29 09:12:08 +07:00
committed by GitHub
parent 76bfeddd32
commit 4cfb4f936f
20 changed files with 441 additions and 230 deletions

View File

@@ -1,2 +1,3 @@
database.types.ts
playwright-report
*.hbs

View File

@@ -0,0 +1,37 @@
import { NextResponse } from 'next/server';
import { getSupabaseServerAdminClient } from '@kit/supabase/server-admin-client';
/**
* Healthcheck endpoint for the web app. If this endpoint returns a 200, the web app will be considered healthy.
* If this endpoint returns a 500, the web app will be considered unhealthy.
* This endpoint can be used by Docker to determine if the web app is healthy and should be restarted.
*/
export async function GET() {
const isDbHealthy = await getSupabaseHealthCheck();
return NextResponse.json({
services: {
database: isDbHealthy,
// add other services here
},
});
}
/**
* Quick check to see if the database is healthy by querying the config table
* @returns true if the database is healthy, false otherwise
*/
async function getSupabaseHealthCheck() {
try {
const client = getSupabaseServerAdminClient();
const { error } = await client.rpc('is_set', {
field_name: 'billing_provider',
});
return !error;
} catch {
return false;
}
}

View File

@@ -1,6 +1,6 @@
{
"name": "next-supabase-saas-kit-turbo",
"version": "2.8.0",
"version": "2.9.0",
"private": true,
"sideEffects": false,
"engines": {
@@ -11,6 +11,7 @@
"name": "MakerKit"
},
"scripts": {
"turbo": "turbo",
"preinstall": "pnpm run --filter scripts requirements",
"postinstall": "manypkg fix",
"build": "turbo build --cache-dir=.turbo",

View File

@@ -0,0 +1,9 @@
const Logger = {
info: console.info,
error: console.error,
warn: console.warn,
debug: console.debug,
fatal: console.error,
}
export { Logger };

View File

@@ -2,8 +2,9 @@ import { createRegistry } from '../registry';
import { Logger as LoggerInstance } from './logger';
// Define the type for the logger provider. Currently supporting 'pino'.
type LoggerProvider = 'pino';
type LoggerProvider = 'pino' | 'console';
// Use pino as the default logger provider
const LOGGER = (process.env.LOGGER ?? 'pino') as LoggerProvider;
// Create a registry for logger implementations
@@ -16,6 +17,13 @@ loggerRegistry.register('pino', async () => {
return PinoLogger;
});
// Register the 'console' logger implementation
loggerRegistry.register('console', async () => {
const { Logger: ConsoleLogger } = await import('./impl/console');
return ConsoleLogger;
});
/**
* @name getLogger
* @description Retrieves the logger implementation based on the LOGGER environment variable using the registry API.

View File

@@ -13,9 +13,6 @@
"./server-client": "./src/clients/server-client.ts",
"./server-admin-client": "./src/clients/server-admin-client.ts",
"./middleware-client": "./src/clients/middleware-client.ts",
"./server-actions-client": "./src/clients/server-actions-client.ts",
"./route-handler-client": "./src/clients/route-handler-client.ts",
"./server-component-client": "./src/clients/server-component-client.ts",
"./browser-client": "./src/clients/browser-client.ts",
"./check-requires-mfa": "./src/check-requires-mfa.ts",
"./require-user": "./src/require-user.ts",

View File

@@ -1,65 +0,0 @@
import 'server-only';
import { cookies } from 'next/headers';
import { createClient } from '@supabase/supabase-js';
import type { CookieOptions } from '@supabase/ssr';
import { createServerClient } from '@supabase/ssr';
import { Database } from '../database.types';
import {
getServiceRoleKey,
warnServiceRoleKeyUsage,
} from '../get-service-role-key';
import { getSupabaseClientKeys } from '../get-supabase-client-keys';
const serviceRoleKey = getServiceRoleKey();
const keys = getSupabaseClientKeys();
/**
* @name getSupabaseRouteHandlerClient
* @deprecated Use `getSupabaseServerClient` instead.
* @description Get a Supabase client for use in the Route Handler Routes
*/
export function getSupabaseRouteHandlerClient<GenericSchema = Database>(
params = {
admin: false,
},
) {
if (params.admin) {
warnServiceRoleKeyUsage();
return createClient<GenericSchema>(keys.url, serviceRoleKey, {
auth: {
persistSession: false,
autoRefreshToken: false,
detectSessionInUrl: false,
},
});
}
return createServerClient<GenericSchema>(keys.url, keys.anonKey, {
cookies: getCookiesStrategy(),
});
}
function getCookiesStrategy() {
return {
set: async (name: string, value: string, options: CookieOptions) => {
const cookieStore = await cookies();
cookieStore.set({ name, value, ...options });
},
get: async (name: string) => {
const cookieStore = await cookies();
return cookieStore.get(name)?.value;
},
remove: async (name: string, options: CookieOptions) => {
const cookieStore = await cookies();
cookieStore.set({ name, value: '', ...options });
},
};
}

View File

@@ -1,76 +0,0 @@
import 'server-only';
import { cookies } from 'next/headers';
import { createClient } from '@supabase/supabase-js';
import { createServerClient } from '@supabase/ssr';
import { Database } from '../database.types';
import {
getServiceRoleKey,
warnServiceRoleKeyUsage,
} from '../get-service-role-key';
import { getSupabaseClientKeys } from '../get-supabase-client-keys';
const keys = getSupabaseClientKeys();
const serviceRoleKey = getServiceRoleKey();
function createServerSupabaseClient<
GenericSchema extends Database = Database,
>() {
return createServerClient<GenericSchema>(keys.url, keys.anonKey, {
cookies: getCookiesStrategy(),
});
}
/**
* @name getSupabaseServerComponentClient
* @deprecated Use `getSupabaseServerClient` instead.
* @param params
*/
export function getSupabaseServerActionClient<
GenericSchema extends Database = Database,
>(params?: { admin: boolean }) {
const keys = getSupabaseClientKeys();
const admin = params?.admin ?? false;
if (admin) {
warnServiceRoleKeyUsage();
return createClient<GenericSchema>(keys.url, serviceRoleKey, {
auth: {
persistSession: false,
detectSessionInUrl: false,
autoRefreshToken: false,
},
});
}
return createServerSupabaseClient();
}
function getCookiesStrategy() {
return {
get: async (name: string) => {
const cookieStore = await cookies();
const cookie = cookieStore.get(name);
return cookie?.value;
},
set: async (name: string, value: string, options: object) => {
const cookieStore = await cookies();
cookieStore.set({ name, value, ...options });
},
remove: async (name: string, options: object) => {
const cookieStore = await cookies();
cookieStore.set({
name,
value: '',
...options,
});
},
};
}

View File

@@ -1,53 +0,0 @@
import 'server-only';
import { cookies } from 'next/headers';
import { createClient } from '@supabase/supabase-js';
import { createServerClient } from '@supabase/ssr';
import { Database } from '../database.types';
import {
getServiceRoleKey,
warnServiceRoleKeyUsage,
} from '../get-service-role-key';
import { getSupabaseClientKeys } from '../get-supabase-client-keys';
const serviceRoleKey = getServiceRoleKey();
const keys = getSupabaseClientKeys();
/**
* @name getSupabaseServerComponentClient
* @description Get a Supabase client for use in the Server Components
*/
export function getSupabaseServerComponentClient<GenericSchema = Database>(
params = {
admin: false,
},
) {
if (params.admin) {
warnServiceRoleKeyUsage();
return createClient<GenericSchema>(keys.url, serviceRoleKey, {
auth: {
persistSession: false,
autoRefreshToken: false,
detectSessionInUrl: false,
},
});
}
return createServerClient<GenericSchema>(keys.url, keys.anonKey, {
cookies: getCookiesStrategy(),
});
}
function getCookiesStrategy() {
return {
get: async (name: string) => {
const cookieStore = await cookies();
return cookieStore.get(name)?.value;
},
};
}

View File

@@ -5,6 +5,7 @@
"scripts": {
"dev": "node ./src/dev.mjs",
"checks": "node ./src/checks.mjs",
"requirements": "node ./src/requirements.mjs"
"requirements": "node ./src/requirements.mjs",
"license": "node ./src/license.mjs"
}
}

View File

@@ -1,8 +1,6 @@
import { execSync } from 'child_process';
import { readFileSync } from 'fs';
import path from 'path';
const endpoint = 'https://makerkit.dev/api/license/check';
@@ -47,6 +45,21 @@ async function checkLicense() {
searchParams.append('email', gitEmail);
}
try {
const makerkitConfig =
JSON.parse(
readFileSync(path.resolve(process.cwd(), '../../.makerkitrc'), 'utf-8'),
) || {};
if (makerkitConfig.projectName) {
searchParams.append('projectName', makerkitConfig.projectName);
}
if (makerkitConfig.username) {
searchParams.append('projectUsername', makerkitConfig.username);
}
} catch {}
const res = await fetch(`${endpoint}?${searchParams.toString()}`);
if (res.status === 200) {

View File

@@ -1,5 +1,7 @@
import type { PlopTypes } from '@turbo/gen';
import { createCloudflareGenerator } from './templates/cloudflare/generator';
import { createDockerGenerator } from './templates/docker/generator';
import { createEnvironmentVariablesGenerator } from './templates/env/generator';
import { createKeystaticAdminGenerator } from './templates/keystatic/generator';
import { createPackageGenerator } from './templates/package/generator';
@@ -13,6 +15,8 @@ const generators = [
createEnvironmentVariablesGenerator,
createEnvironmentVariablesValidatorGenerator,
createSetupGenerator,
createCloudflareGenerator,
createDockerGenerator,
];
export default function generator(plop: PlopTypes.NodePlopAPI): void {

View File

@@ -0,0 +1,90 @@
import type { PlopTypes } from '@turbo/gen';
import { execSync } from 'node:child_process';
import packageJson from '../../../../package.json';
export function createCloudflareGenerator(plop: PlopTypes.NodePlopAPI) {
plop.setGenerator('cloudflare', {
description: 'Cloudflare generator',
actions: [
{
type: 'add',
templateFile: 'templates/cloudflare/wrangler.jsonc.hbs',
path: 'apps/web/wrangler.jsonc',
data: {
name: packageJson.name,
},
},
{
type: 'add',
templateFile: 'templates/cloudflare/open-next.config.ts.hbs',
path: 'apps/web/open-next.config.ts',
},
{
type: 'add',
templateFile: 'templates/cloudflare/dev.vars.hbs',
path: 'apps/web/.dev.vars',
},
{
type: 'modify',
path: 'apps/web/next.config.mjs',
async transform(content) {
content += `
import { initOpenNextCloudflareForDev } from '@opennextjs/cloudflare';
void initOpenNextCloudflareForDev();
`;
return content;
},
},
{
type: 'modify',
path: 'apps/web/package.json',
async transform(content) {
const pkg = JSON.parse(content);
const deps = ['wrangler', '@opennextjs/cloudflare'];
const getVersion = async (dep: string) => {
const res = await fetch(
`https://registry.npmjs.org/-/package/${dep}/dist-tags`,
);
const json = await res.json();
return json.latest;
};
for (const dep of deps) {
const version = await getVersion(dep);
pkg.devDependencies![dep] = `^${version}`;
}
pkg.scripts['preview'] =
'opennextjs-cloudflare build && opennextjs-cloudflare preview';
pkg.scripts['deploy'] =
'opennextjs-cloudflare build && opennextjs-cloudflare deploy';
pkg.scripts['cf-typegen'] =
'wrangler types --env-interface CloudflareEnv cloudflare-env.d.ts';
return JSON.stringify(pkg, null, 2);
},
},
async () => {
/**
* Install deps and format everything
*/
execSync('pnpm i', {
stdio: 'inherit',
});
execSync(
`pnpm run format:fix`,
);
return 'Package scaffolded';
},
],
prompts: [],
});
}

View File

@@ -0,0 +1,3 @@
import { defineCloudflareConfig } from "@opennextjs/cloudflare";
export default defineCloudflareConfig();

View File

@@ -0,0 +1,54 @@
/**
* For more details on how to configure Wrangler, refer to:
* https://developers.cloudflare.com/workers/wrangler/configuration/
*/
{
"$schema": "node_modules/wrangler/config-schema.json",
"name": "{{name}}",
"main": ".open-next/worker.js",
"keep_vars": true,
"compatibility_date": "2025-04-02",
"compatibility_flags": [
"nodejs_compat"
],
// Minification helps to keep the Worker bundle size down and improve start up time.
"minify": true,
"assets": {
"binding": "ASSETS",
"directory": ".open-next/assets"
},
"observability": {
"enabled": true
},
/**
* Smart Placement
* Docs: https://developers.cloudflare.com/workers/configuration/smart-placement/#smart-placement
*/
"placement": { "mode": "smart" },
/**
* Bindings
* Bindings allow your Worker to interact with resources on the Cloudflare Developer Platform, including
* databases, object storage, AI inference, real-time communication and more.
* https://developers.cloudflare.com/workers/runtime-apis/bindings/
*/
/**
* Environment Variables
* https://developers.cloudflare.com/workers/wrangler/configuration/#environment-variables
*/
"vars": {},
/**
* Note: Use secrets to store sensitive data.
* https://developers.cloudflare.com/workers/configuration/secrets/
*/
/**
* Service Bindings (communicate between multiple Workers)
* https://developers.cloudflare.com/workers/wrangler/configuration/#service-bindings
*/
// "services": [{ "binding": "MY_SERVICE", "service": "my-service" }]
"d1_databases": [],
"kv_namespaces": [],
"r2_buckets": []
}

View File

@@ -0,0 +1,69 @@
# syntax=docker.io/docker/dockerfile:1
FROM node:20-alpine AS base
# Install dependencies only when needed
FROM base AS deps
# Check https://github.com/nodejs/docker-node/tree/b4117f9333da4138b03a546ec926ef50a31506c3#nodealpine to understand why libc6-compat might be needed.
RUN apk add --no-cache libc6-compat
WORKDIR /app
# Install global dependencies
RUN corepack enable pnpm
RUN npm install -g turbo
# Copy the entire project for dependency installation
# This ensures all packages are available for resolution
COPY . .
# Install dependencies using lockfile
RUN pnpm install --frozen-lockfile
RUN npm rebuild lightingcss --build-from-source --verbose
# Rebuild the source code only when needed
FROM base AS builder
WORKDIR /app
# Install global dependencies for the build
RUN corepack enable pnpm
RUN npm install -g turbo
# Copy over everything including dependencies
COPY --from=deps /app ./
# Disable telemetry during the build
ENV NEXT_TELEMETRY_DISABLED=1
# Build the project
RUN turbo run build --filter=web...
# Production image, copy all the files and run next
FROM base AS runner
WORKDIR /app
ENV NODE_ENV=production
ENV NEXT_TELEMETRY_DISABLED=1
# Create a non-root user
RUN addgroup --system --gid 1001 nodejs
RUN adduser --system --uid 1001 nextjs
# Copy public assets
COPY --from=builder /app/apps/web/public ./apps/web/public
# Leverage output traces to reduce image size (standalone output)
COPY --from=builder --chown=nextjs:nodejs /app/apps/web/.next/standalone ./
COPY --from=builder --chown=nextjs:nodejs /app/apps/web/.next/static ./apps/web/.next/static
# Switch to non-root user
USER nextjs
# Set server port and host
EXPOSE 3000
ENV PORT=3000
ENV HOSTNAME="0.0.0.0"
HEALTHCHECK --interval=90s --timeout=5s --retries=3 \
CMD curl -f http://localhost:3000/healthcheck || exit 1
# Start the server
CMD ["node", "apps/web/server.js"]

View File

@@ -0,0 +1,80 @@
import type { PlopTypes } from '@turbo/gen';
import { execSync } from 'node:child_process';
import * as os from 'node:os';
export function createDockerGenerator(plop: PlopTypes.NodePlopAPI) {
plop.setGenerator('docker', {
description: 'Dockerfile generator',
actions: [
{
type: 'modify',
path: 'apps/web/next.config.mjs',
transform(content) {
// Check if the output is already set to standalone
if (content.includes('output: "standalone"')) {
return content;
}
return content.replace(
'const config = {',
'const config = { output: "standalone",',
);
},
},
{
type: 'modify',
path: 'apps/web/package.json',
transform(content) {
const pkg = JSON.parse(content);
const deps = getDeps();
if (deps.length === 0) {
return content;
}
for (const dep of deps) {
pkg['devDependencies'][dep] = 'latest';
}
return JSON.stringify(pkg, null, 2);
},
},
{
type: 'add',
templateFile: 'templates/docker/Dockerfile.hbs',
path: 'Dockerfile',
},
async () => {
execSync('pnpm i', {
stdio: 'inherit',
})
execSync('pnpm format:fix', {
stdio: 'inherit',
});
return 'Dockerfile generated';
},
],
prompts: [],
});
}
function getDeps() {
const arch = os.arch();
if (arch === 'arm64') {
return [
'lightningcss-linux-arm64-musl',
'@tailwindcss/oxide-linux-arm64-musl',
];
} else if (arch === 'x64') {
const isMusl = process.config?.variables?.hasOwnProperty('musl');
return isMusl
? ['lightningcss-linux-x64-musl', '@tailwindcss/oxide-linux-x64-musl']
: ['lightningcss-linux-x64-gnu', '@tailwindcss/oxide-linux-x64-gnu'];
} else {
return [];
}
}

View File

@@ -70,18 +70,18 @@ export function createPackageGenerator(plop: PlopTypes.NodePlopAPI) {
return JSON.stringify(pkg, null, 2);
},
},
async (answers) => {
async () => {
/**
* Install deps and format everything
*/
execSync('pnpm manypkg fix', {
execSync('pnpm i', {
stdio: 'inherit',
});
execSync(
`pnpm prettier --write packages/${
(answers as { name: string }).name
}/** --list-different`,
`pnpm run format:fix`,
);
return 'Package scaffolded';
},
],

View File

@@ -1,10 +1,17 @@
import type { PlopTypes } from '@turbo/gen';
import { execSync } from 'node:child_process';
import { writeFileSync } from 'node:fs';
export function createSetupGenerator(plop: PlopTypes.NodePlopAPI) {
plop.setGenerator('setup', {
description: 'Setup your Makerkit project',
prompts: [
{
type: 'input',
name: 'username',
message:
'What is your GitHub username? Please make sure you enter the same username you used to activate your Makerkit license.',
},
{
type: 'input',
name: 'projectName',
@@ -15,9 +22,21 @@ export function createSetupGenerator(plop: PlopTypes.NodePlopAPI) {
name: 'setupHealthCheck',
message: 'Do you want to setup a pre-commit hook for health checks?',
default: false,
}
},
],
actions: [
() => {
execSync(`pnpm run --filter scripts requirements`);
return 'Requirements checked';
},
async (answers: any) => {
execSync(`git config user.username "${answers.username}"`, {
stdio: 'inherit',
});
return 'Git user name set';
},
{
type: 'modify',
path: 'package.json',
@@ -32,10 +51,15 @@ export function createSetupGenerator(plop: PlopTypes.NodePlopAPI) {
},
async (answers: any) => {
try {
createMakerkitConfig({
projectName: answers.projectName,
username: answers.username,
});
setupRemote();
setupPreCommit({ setupHealthCheck: answers.setupHealthCheck });
return 'Project setup complete';
return 'Project setup complete. Start developing your project!';
} catch (error) {
console.error('Project setup failed. Aborting package generation.');
process.exit(1);
@@ -45,9 +69,23 @@ export function createSetupGenerator(plop: PlopTypes.NodePlopAPI) {
});
}
function setupPreCommit(params: {
setupHealthCheck: boolean;
function createMakerkitConfig(params: {
projectName: string;
username: string;
}) {
const config = `{
"projectName": "${params.projectName}",
"username": "${params.username}"
}`
writeFileSync('.makerkitrc', config, {
encoding: 'utf-8',
});
execSync('git add .makerkitrc');
}
function setupPreCommit(params: { setupHealthCheck: boolean }) {
try {
const filePath = '.git/hooks/pre-commit';
@@ -55,7 +93,7 @@ function setupPreCommit(params: {
? `pnpm run lint:fix\npnpm run typecheck\n`.trim()
: ``;
const licenseCommand = `pnpm run --filter license dev`;
const licenseCommand = `pnpm run --filter scripts license`;
const fileContent = `#!/bin/bash\n${healthCheckCommands}${licenseCommand}`;
// write file
@@ -76,37 +114,37 @@ function setupPreCommit(params: {
function setupRemote() {
try {
// Setup remote upstream
const getRemoteUrl = execSync('git remote get-url origin', {
stdio: 'inherit',
});
const currentRemote = getRemoteUrl.toString().trim();
const currentRemote = execSync('git remote get-url origin').toString();
console.log(`Setting upstream remote to ${currentRemote} ...`);
if (currentRemote && currentRemote.includes('github.com')) {
execSync(`git remote remove origin`, {
stdio: 'inherit',
});
execSync(`git remote set-url upstream ${currentRemote}`, {
execSync(`git remote add upstream ${currentRemote}`, {
stdio: 'inherit',
});
} else {
console.error('Your current remote is not GitHub');
}
} catch (error) {
console.error(error);
console.info('No current remote found. Skipping upstream remote setup.');
}
// Run license script
try {
execSync('turbo run --filter license dev', {
execSync('pnpm run --filter scripts license', {
stdio: 'inherit',
});
} catch (error) {
console.error(`License script failed. Aborting package generation. Error: ${error}`);
console.error(
`License check failed. Aborting package generation. Error: ${error}`,
);
process.exit(1);
}
}