docs: fix api-reference with proxy (#7952)
This commit is contained in:
@@ -1,38 +0,0 @@
|
||||
import "../../globals.css"
|
||||
import Navbar from "@/components/Navbar"
|
||||
import Providers from "../../../providers"
|
||||
import { WideLayout } from "docs-ui"
|
||||
import { Inter, Roboto_Mono } from "next/font/google"
|
||||
import clsx from "clsx"
|
||||
|
||||
export const metadata = {
|
||||
title: "Medusa API Reference",
|
||||
description: "Check out Medusa's API reference",
|
||||
}
|
||||
|
||||
const inter = Inter({
|
||||
subsets: ["latin"],
|
||||
variable: "--font-inter",
|
||||
weight: ["400", "500"],
|
||||
})
|
||||
|
||||
const robotoMono = Roboto_Mono({
|
||||
subsets: ["latin"],
|
||||
variable: "--font-roboto-mono",
|
||||
})
|
||||
|
||||
export default function RootLayout({
|
||||
children,
|
||||
}: {
|
||||
children: React.ReactNode
|
||||
}) {
|
||||
return (
|
||||
<WideLayout
|
||||
ProvidersComponent={Providers}
|
||||
NavbarComponent={Navbar}
|
||||
bodyClassName={clsx(inter.variable, robotoMono.variable)}
|
||||
>
|
||||
{children}
|
||||
</WideLayout>
|
||||
)
|
||||
}
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 253 KiB |
@@ -1,65 +0,0 @@
|
||||
import AreaProvider from "@/providers/area"
|
||||
import AdminContentV2 from "../../_mdx/admin.mdx"
|
||||
import StoreContentV2 from "../../_mdx/store.mdx"
|
||||
import ClientLibrariesV2 from "../../_mdx/client-libraries.mdx"
|
||||
import Section from "@/components/Section"
|
||||
import Tags from "@/components/Tags"
|
||||
import type { Area } from "@/types/openapi"
|
||||
import DividedLayout from "@/layouts/Divided"
|
||||
import { capitalize } from "docs-ui"
|
||||
import PageTitleProvider from "@/providers/page-title"
|
||||
|
||||
type ReferencePageProps = {
|
||||
params: {
|
||||
area: Area
|
||||
}
|
||||
}
|
||||
|
||||
const ReferencePage = async ({ params: { area } }: ReferencePageProps) => {
|
||||
return (
|
||||
<AreaProvider area={area}>
|
||||
<PageTitleProvider>
|
||||
<h1 className="!text-h2 block lg:hidden">
|
||||
Medusa V2 {capitalize(area)} API Reference
|
||||
</h1>
|
||||
<DividedLayout
|
||||
mainContent={
|
||||
<Section>
|
||||
<h1 className="!text-h2 hidden lg:block">
|
||||
Medusa V2 {capitalize(area)} API Reference
|
||||
</h1>
|
||||
{area.includes("admin") && <AdminContentV2 />}
|
||||
{area.includes("store") && <StoreContentV2 />}
|
||||
</Section>
|
||||
}
|
||||
codeContent={<ClientLibrariesV2 />}
|
||||
className="flex-col-reverse"
|
||||
/>
|
||||
<Tags />
|
||||
</PageTitleProvider>
|
||||
</AreaProvider>
|
||||
)
|
||||
}
|
||||
|
||||
export default ReferencePage
|
||||
|
||||
export function generateMetadata({ params: { area } }: ReferencePageProps) {
|
||||
return {
|
||||
title: `Medusa ${capitalize(area)} API Reference`,
|
||||
description: `REST API reference for the Medusa ${area} API. This reference includes code snippets and examples for Medusa JS Client and cURL.`,
|
||||
metadataBase: process.env.NEXT_PUBLIC_BASE_URL,
|
||||
}
|
||||
}
|
||||
|
||||
export const dynamicParams = false
|
||||
|
||||
export async function generateStaticParams() {
|
||||
return [
|
||||
{
|
||||
area: "admin",
|
||||
},
|
||||
{
|
||||
area: "store",
|
||||
},
|
||||
]
|
||||
}
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 253 KiB |
@@ -1,152 +0,0 @@
|
||||
import OpenAPIParser from "@readme/openapi-parser"
|
||||
import algoliasearch from "algoliasearch"
|
||||
import type { ExpandedDocument, Operation } from "../../../types/openapi"
|
||||
import path from "path"
|
||||
import getPathsOfTag from "../../../utils/get-paths-of-tag"
|
||||
import getSectionId from "../../../utils/get-section-id"
|
||||
import { NextResponse } from "next/server"
|
||||
import { JSDOM } from "jsdom"
|
||||
import getUrl from "../../../utils/get-url"
|
||||
import { capitalize } from "docs-ui"
|
||||
|
||||
export async function GET() {
|
||||
const algoliaClient = algoliasearch(
|
||||
process.env.NEXT_PUBLIC_ALGOLIA_APP_ID || "",
|
||||
process.env.ALGOLIA_WRITE_API_KEY || ""
|
||||
)
|
||||
const index = algoliaClient.initIndex(
|
||||
process.env.NEXT_PUBLIC_API_ALGOLIA_INDEX_NAME || ""
|
||||
)
|
||||
|
||||
// retrieve tags and their operations to index them
|
||||
const indices: Record<string, any>[] = []
|
||||
for (const area of ["store", "admin"]) {
|
||||
const defaultIndexData = {
|
||||
version: ["current"],
|
||||
lang: "en",
|
||||
_tags: ["api", `${area}-v2`],
|
||||
}
|
||||
// find and parse static headers from pages
|
||||
const dom = await JSDOM.fromURL(getUrl(area))
|
||||
const headers = dom.window.document.querySelectorAll("h2")
|
||||
headers.forEach((header) => {
|
||||
if (!header.textContent) {
|
||||
return
|
||||
}
|
||||
|
||||
const objectID = getSectionId([header.textContent])
|
||||
const url = getUrl(area, objectID)
|
||||
indices.push({
|
||||
objectID: getObjectId(area, `${objectID}-mdx-section`),
|
||||
hierarchy: getHierarchy(area, [header.textContent]),
|
||||
type: `content`,
|
||||
content: header.textContent,
|
||||
url,
|
||||
url_without_variables: url,
|
||||
url_without_anchor: url,
|
||||
...defaultIndexData,
|
||||
})
|
||||
})
|
||||
|
||||
// find and index tag and operations
|
||||
const baseSpecs = (await OpenAPIParser.parse(
|
||||
path.join(process.cwd(), `specs/${area}/openapi.yaml`)
|
||||
)) as ExpandedDocument
|
||||
|
||||
await Promise.all(
|
||||
baseSpecs.tags?.map(async (tag) => {
|
||||
const tagName = getSectionId([tag.name])
|
||||
const url = getUrl(area, tagName)
|
||||
indices.push({
|
||||
objectID: getObjectId(area, tagName),
|
||||
hierarchy: getHierarchy(area, [tag.name]),
|
||||
type: "lvl1",
|
||||
content: null,
|
||||
url,
|
||||
url_without_variables: url,
|
||||
url_without_anchor: url,
|
||||
...defaultIndexData,
|
||||
})
|
||||
const paths = await getPathsOfTag(tagName, area)
|
||||
|
||||
Object.values(paths.paths).forEach((path) => {
|
||||
Object.values(path).forEach((op) => {
|
||||
const operation = op as Operation
|
||||
const operationName = getSectionId([
|
||||
tag.name,
|
||||
operation.operationId,
|
||||
])
|
||||
const url = getUrl(area, operationName)
|
||||
indices.push({
|
||||
objectID: getObjectId(area, operationName),
|
||||
hierarchy: getHierarchy(area, [tag.name, operation.summary]),
|
||||
type: "content",
|
||||
content: operation.summary,
|
||||
content_camel: operation.summary,
|
||||
url,
|
||||
url_without_variables: url,
|
||||
url_without_anchor: url,
|
||||
...defaultIndexData,
|
||||
})
|
||||
|
||||
// index its description
|
||||
const operationDescriptionId = getSectionId([
|
||||
tag.name,
|
||||
operation.operationId,
|
||||
operation.description.substring(
|
||||
0,
|
||||
Math.min(20, operation.description.length)
|
||||
),
|
||||
])
|
||||
|
||||
indices.push({
|
||||
objectID: getObjectId(area, operationDescriptionId),
|
||||
hierarchy: getHierarchy(area, [
|
||||
tag.name,
|
||||
operation.summary,
|
||||
operation.description,
|
||||
]),
|
||||
type: "content",
|
||||
content: operation.description,
|
||||
content_camel: operation.description,
|
||||
url,
|
||||
url_without_variables: url,
|
||||
url_without_anchor: url,
|
||||
...defaultIndexData,
|
||||
})
|
||||
})
|
||||
})
|
||||
}) || []
|
||||
)
|
||||
}
|
||||
|
||||
if (indices.length) {
|
||||
await index.saveObjects(indices, {
|
||||
autoGenerateObjectIDIfNotExist: true,
|
||||
})
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
message: "done",
|
||||
})
|
||||
}
|
||||
|
||||
function getObjectId(area: string, objectName: string): string {
|
||||
return `${area}_${objectName}`
|
||||
}
|
||||
|
||||
function getHierarchy(area: string, levels: string[]): Record<string, string> {
|
||||
const heirarchy: Record<string, string> = {
|
||||
lvl0: `${capitalize(area)} API Reference`,
|
||||
}
|
||||
|
||||
let counter = 1
|
||||
levels.forEach((level) => {
|
||||
heirarchy[`lvl${counter}`] = level
|
||||
counter++
|
||||
})
|
||||
|
||||
return heirarchy
|
||||
}
|
||||
|
||||
export const dynamic = "force-dynamic"
|
||||
@@ -1,37 +0,0 @@
|
||||
import { NextResponse } from "next/server"
|
||||
import path from "path"
|
||||
import OpenAPIParser from "@readme/openapi-parser"
|
||||
import getPathsOfTag from "@/utils/get-paths-of-tag"
|
||||
import type { ExpandedDocument } from "@/types/openapi"
|
||||
|
||||
export async function GET(request: Request) {
|
||||
const { searchParams } = new URL(request.url)
|
||||
const area = searchParams.get("area")
|
||||
const expand = searchParams.get("expand")
|
||||
if (area !== "admin" && area !== "store") {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
message: `area ${area} is not allowed`,
|
||||
},
|
||||
{
|
||||
status: 400,
|
||||
}
|
||||
)
|
||||
}
|
||||
const baseSpecs = (await OpenAPIParser.parse(
|
||||
path.join(process.cwd(), "specs", area, "openapi.yaml")
|
||||
)) as ExpandedDocument
|
||||
|
||||
if (expand) {
|
||||
const paths = await getPathsOfTag(expand, area)
|
||||
if (paths) {
|
||||
baseSpecs.expandedTags = {}
|
||||
baseSpecs.expandedTags[expand] = paths.paths
|
||||
}
|
||||
}
|
||||
|
||||
return NextResponse.json(baseSpecs, {
|
||||
status: 200,
|
||||
})
|
||||
}
|
||||
@@ -1,29 +0,0 @@
|
||||
import { existsSync, readFileSync } from "fs"
|
||||
import { NextResponse } from "next/server"
|
||||
import path from "path"
|
||||
|
||||
type DownloadParams = {
|
||||
params: {
|
||||
area: string
|
||||
}
|
||||
}
|
||||
|
||||
export function GET(request: Request, { params }: DownloadParams) {
|
||||
const { area } = params
|
||||
const filePath = path.join(process.cwd(), "specs", area, "openapi.full.yaml")
|
||||
|
||||
if (!existsSync(filePath)) {
|
||||
return new NextResponse(null, {
|
||||
status: 404,
|
||||
})
|
||||
}
|
||||
|
||||
const fileContent = readFileSync(filePath)
|
||||
|
||||
return new Response(fileContent, {
|
||||
headers: {
|
||||
"Content-Type": "application/x-yaml",
|
||||
"Content-Disposition": `attachment; filename="openapi.yaml"`,
|
||||
},
|
||||
})
|
||||
}
|
||||
@@ -1,81 +0,0 @@
|
||||
import { NextResponse } from "next/server"
|
||||
import { SchemaObject } from "../../../types/openapi"
|
||||
import path from "path"
|
||||
import { existsSync, promises as fs } from "fs"
|
||||
import { parseDocument } from "yaml"
|
||||
import dereference from "../../../utils/dereference"
|
||||
|
||||
export async function GET(request: Request) {
|
||||
const { searchParams } = new URL(request.url)
|
||||
let name = searchParams.get("name")
|
||||
const area = searchParams.get("area")
|
||||
|
||||
if (!name) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
message: `Name is required.`,
|
||||
},
|
||||
{
|
||||
status: 400,
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
if (area !== "admin" && area !== "store") {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
message: `area ${area} is not allowed`,
|
||||
},
|
||||
{
|
||||
status: 400,
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
name = name
|
||||
.replace("#/components/schemas/", "")
|
||||
.replaceAll("./components/schemas/", "")
|
||||
|
||||
const baseSchemasPath = path.join(
|
||||
process.cwd(),
|
||||
"specs",
|
||||
area,
|
||||
"components",
|
||||
"schemas"
|
||||
)
|
||||
const schemaPath = path.join(baseSchemasPath, name)
|
||||
|
||||
if (!existsSync(schemaPath)) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
message: `Schema ${name} doesn't exist.`,
|
||||
},
|
||||
{
|
||||
status: 404,
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
const schemaContent = await fs.readFile(schemaPath, "utf-8")
|
||||
const schema = parseDocument(schemaContent).toJS() as SchemaObject
|
||||
|
||||
// resolve references in schema
|
||||
const dereferencedDocument = await dereference({
|
||||
basePath: baseSchemasPath,
|
||||
schemas: [schema],
|
||||
})
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
schema: dereferencedDocument.components?.schemas
|
||||
? Object.values(dereferencedDocument.components?.schemas)[0]
|
||||
: schema,
|
||||
},
|
||||
{
|
||||
status: 200,
|
||||
}
|
||||
)
|
||||
}
|
||||
@@ -1,36 +0,0 @@
|
||||
import { NextResponse } from "next/server"
|
||||
import path from "path"
|
||||
import getPathsOfTag from "@/utils/get-paths-of-tag"
|
||||
|
||||
export async function GET(request: Request) {
|
||||
const { searchParams } = new URL(request.url)
|
||||
const tagName = searchParams.get("tagName") || ""
|
||||
const area = searchParams.get("area")
|
||||
|
||||
if (area !== "admin" && area !== "store") {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
message: `area ${area} is not allowed`,
|
||||
},
|
||||
{
|
||||
status: 400,
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
// this is just to ensure that vercel picks up these files on build
|
||||
path.join(process.cwd(), "specs")
|
||||
|
||||
// get path files
|
||||
const paths = await getPathsOfTag(tagName, area)
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
paths: paths.paths,
|
||||
},
|
||||
{
|
||||
status: 200,
|
||||
}
|
||||
)
|
||||
}
|
||||
Reference in New Issue
Block a user