feat: order export and upload stream (#14243)

* feat: order export

* Merge branch 'develop' of https://github.com/medusajs/medusa into feat/order-export

* normalize status

* rm util

* serialize totals

* test

* lock

* comments

* configurable order list
This commit is contained in:
Carlos R. L. Rodrigues
2025-12-14 08:02:53 -03:00
committed by GitHub
parent e199f1eb01
commit 9366c6d468
31 changed files with 1041 additions and 37 deletions

View File

@@ -300,6 +300,12 @@ export function getRouteMap({
{ {
path: "", path: "",
lazy: () => import("../../routes/orders/order-list"), lazy: () => import("../../routes/orders/order-list"),
children: [
{
path: "export",
lazy: () => import("../../routes/orders/order-export"),
},
],
}, },
{ {
path: ":id", path: ":id",

View File

@@ -10,8 +10,8 @@ import {
import { sdk } from "../../lib/client" import { sdk } from "../../lib/client"
import { queryClient } from "../../lib/query-client" import { queryClient } from "../../lib/query-client"
import { queryKeysFactory, TQueryKey } from "../../lib/query-key-factory" import { queryKeysFactory, TQueryKey } from "../../lib/query-key-factory"
import { reservationItemsQueryKeys } from "./reservations"
import { inventoryItemsQueryKeys } from "./inventory" import { inventoryItemsQueryKeys } from "./inventory"
import { reservationItemsQueryKeys } from "./reservations"
const ORDERS_QUERY_KEY = "orders" as const const ORDERS_QUERY_KEY = "orders" as const
const _orderKeys = queryKeysFactory(ORDERS_QUERY_KEY) as TQueryKey<"orders"> & { const _orderKeys = queryKeysFactory(ORDERS_QUERY_KEY) as TQueryKey<"orders"> & {
@@ -438,3 +438,20 @@ export const useUpdateOrderChange = (
...options, ...options,
}) })
} }
export const useExportOrders = (
query?: HttpTypes.AdminOrderFilters,
options?: UseMutationOptions<
{ transaction_id: string },
FetchError,
HttpTypes.AdminOrderFilters
>
) => {
return useMutation({
mutationFn: () => sdk.admin.order.export(query),
onSuccess: (data, variables, context) => {
options?.onSuccess?.(data, variables, context)
},
...options,
})
}

View File

@@ -4049,6 +4049,45 @@
"required": ["noRecordsMessage"], "required": ["noRecordsMessage"],
"additionalProperties": false "additionalProperties": false
}, },
"export": {
"type": "object",
"properties": {
"header": {
"type": "string"
},
"description": {
"type": "string"
},
"success": {
"type": "object",
"properties": {
"title": {
"type": "string"
},
"description": {
"type": "string"
}
},
"required": ["title", "description"],
"additionalProperties": false
},
"filters": {
"type": "object",
"properties": {
"title": {
"type": "string"
},
"description": {
"type": "string"
}
},
"required": ["title", "description"],
"additionalProperties": false
}
},
"required": ["header", "description", "success", "filters"],
"additionalProperties": false
},
"status": { "status": {
"type": "object", "type": "object",
"properties": { "properties": {
@@ -5740,6 +5779,7 @@
"orderCanceled", "orderCanceled",
"onDateFromSalesChannel", "onDateFromSalesChannel",
"list", "list",
"export",
"status", "status",
"summary", "summary",
"transfer", "transfer",

View File

@@ -1080,6 +1080,18 @@
"list": { "list": {
"noRecordsMessage": "Your orders will show up here." "noRecordsMessage": "Your orders will show up here."
}, },
"export": {
"header": "Export Order List",
"description": "Export the order list to a CSV file.",
"success": {
"title": "Export started",
"description": "You will be notified when the export is ready."
},
"filters": {
"title": "Filters",
"description": "The following filters will be applied to the export."
}
},
"status": { "status": {
"not_paid": "Not paid", "not_paid": "Not paid",
"pending": "Pending", "pending": "Pending",

View File

@@ -0,0 +1,22 @@
import { Heading, Text } from "@medusajs/ui"
import { useTranslation } from "react-i18next"
import { DataTableFilter } from "../../../../components/table/data-table/data-table-filter"
import { useOrderTableFilters } from "../../order-list/components/order-list-table/use-order-table-filters"
export const ExportFilters = () => {
const { t } = useTranslation()
const filters = useOrderTableFilters()
return (
<div>
<Heading level="h2">{t("orders.export.filters.title")}</Heading>
<Text size="small" className="text-ui-fg-subtle">
{t("orders.export.filters.description")}
</Text>
<div className="mt-4">
<DataTableFilter filters={filters} readonly />
</div>
</div>
)
}

View File

@@ -0,0 +1 @@
export { OrderExport as Component } from "./order-export"

View File

@@ -0,0 +1,66 @@
import { Button, Heading, toast } from "@medusajs/ui"
import { useTranslation } from "react-i18next"
import { RouteDrawer, useRouteModal } from "../../../components/modals"
import { useExportOrders } from "../../../hooks/api"
import { useOrderTableQuery } from "../../../hooks/table/query"
import { ExportFilters } from "./components/export-filters"
export const OrderExport = () => {
const { t } = useTranslation()
return (
<RouteDrawer>
<RouteDrawer.Header>
<RouteDrawer.Title asChild>
<Heading>{t("orders.export.header")}</Heading>
</RouteDrawer.Title>
<RouteDrawer.Description className="sr-only">
{t("orders.export.description")}
</RouteDrawer.Description>
</RouteDrawer.Header>
<OrderExportContent />
</RouteDrawer>
)
}
const OrderExportContent = () => {
const { t } = useTranslation()
const { searchParams } = useOrderTableQuery({})
const { mutateAsync } = useExportOrders(searchParams)
const { handleSuccess } = useRouteModal()
const handleExportRequest = async () => {
await mutateAsync(searchParams, {
onSuccess: () => {
toast.info(t("orders.export.success.title"), {
description: t("orders.export.success.description"),
})
handleSuccess()
},
onError: (err) => {
toast.error(err.message)
},
})
}
return (
<>
<RouteDrawer.Body>
<ExportFilters />
</RouteDrawer.Body>
<RouteDrawer.Footer>
<div className="flex items-center gap-x-2">
<RouteDrawer.Close asChild>
<Button size="small" variant="secondary">
{t("actions.cancel")}
</Button>
</RouteDrawer.Close>
<Button onClick={handleExportRequest} size="small">
{t("actions.export")}
</Button>
</div>
</RouteDrawer.Footer>
</>
)
}

View File

@@ -1,16 +1,24 @@
import { useTranslation } from "react-i18next" import { useTranslation } from "react-i18next"
import { Outlet, useLocation } from "react-router-dom"
import { ConfigurableDataTable } from "../../../../../components/table/configurable-data-table" import { ConfigurableDataTable } from "../../../../../components/table/configurable-data-table"
import { useOrderTableAdapter } from "./order-table-adapter" import { useOrderTableAdapter } from "./order-table-adapter"
export const ConfigurableOrderListTable = () => { export const ConfigurableOrderListTable = () => {
const { t } = useTranslation() const { t } = useTranslation()
const orderAdapter = useOrderTableAdapter() const location = useLocation()
const adapter = useOrderTableAdapter()
return ( return (
<ConfigurableDataTable <>
adapter={orderAdapter} <ConfigurableDataTable
heading={t("orders.domain")} adapter={adapter}
layout="fill" heading={t("orders.domain")}
/> actions={[
{ label: t("actions.export"), to: `export${location.search}` },
]}
/>
<Outlet />
</>
) )
} }

View File

@@ -1,15 +1,16 @@
import { Container, Heading } from "@medusajs/ui" import { Button, Container, Heading } from "@medusajs/ui"
import { keepPreviousData } from "@tanstack/react-query" import { keepPreviousData } from "@tanstack/react-query"
import { useTranslation } from "react-i18next" import { useTranslation } from "react-i18next"
import { Link, Outlet, useLocation } from "react-router-dom"
import { _DataTable } from "../../../../../components/table/data-table/data-table" import { _DataTable } from "../../../../../components/table/data-table/data-table"
import { useOrders } from "../../../../../hooks/api/orders" import { useOrders } from "../../../../../hooks/api/orders"
import { useOrderTableColumns } from "../../../../../hooks/table/columns/use-order-table-columns" import { useOrderTableColumns } from "../../../../../hooks/table/columns/use-order-table-columns"
import { useOrderTableFilters } from "./use-order-table-filters"
import { useOrderTableQuery } from "../../../../../hooks/table/query/use-order-table-query" import { useOrderTableQuery } from "../../../../../hooks/table/query/use-order-table-query"
import { useDataTable } from "../../../../../hooks/use-data-table" import { useDataTable } from "../../../../../hooks/use-data-table"
import { useFeatureFlag } from "../../../../../providers/feature-flag-provider" import { useFeatureFlag } from "../../../../../providers/feature-flag-provider"
import { ConfigurableOrderListTable } from "./configurable-order-list-table" import { ConfigurableOrderListTable } from "./configurable-order-list-table"
import { useOrderTableFilters } from "./use-order-table-filters"
import { DEFAULT_FIELDS } from "../../const" import { DEFAULT_FIELDS } from "../../const"
@@ -17,6 +18,7 @@ const PAGE_SIZE = 20
export const OrderListTable = () => { export const OrderListTable = () => {
const { t } = useTranslation() const { t } = useTranslation()
const location = useLocation()
const isViewConfigEnabled = useFeatureFlag("view_configurations") const isViewConfigEnabled = useFeatureFlag("view_configurations")
// If feature flag is enabled, use the new configurable table // If feature flag is enabled, use the new configurable table
@@ -57,6 +59,9 @@ export const OrderListTable = () => {
<Container className="divide-y p-0"> <Container className="divide-y p-0">
<div className="flex items-center justify-between px-6 py-4"> <div className="flex items-center justify-between px-6 py-4">
<Heading>{t("orders.domain")}</Heading> <Heading>{t("orders.domain")}</Heading>
<Button size="small" variant="secondary" asChild>
<Link to={`export${location.search}`}>{t("actions.export")}</Link>
</Button>
</div> </div>
<_DataTable <_DataTable
columns={columns} columns={columns}
@@ -78,6 +83,7 @@ export const OrderListTable = () => {
message: t("orders.list.noRecordsMessage"), message: t("orders.list.noRecordsMessage"),
}} }}
/> />
<Outlet />
</Container> </Container>
) )
} }

View File

@@ -0,0 +1,178 @@
import {
FilterableOrderProps,
IFileModuleService,
OrderDTO,
} from "@medusajs/framework/types"
import {
ContainerRegistrationKeys,
Modules,
deduplicate,
} from "@medusajs/framework/utils"
import { StepResponse, createStep } from "@medusajs/framework/workflows-sdk"
import { json2csv } from "json-2-csv"
import {
getLastFulfillmentStatus,
getLastPaymentStatus,
} from "../utils/aggregate-status"
export type ExportOrdersStepInput = {
batch_size?: number | string
select: string[]
filter?: FilterableOrderProps
}
export type ExportOrdersStepOutput = {
id: string
filename: string
}
export const exportOrdersStepId = "export-orders"
const normalizeOrderForExport = (order: OrderDTO): object => {
const order_ = order as any
const customer = order_.customer || {}
const shippingAddress = order_.shipping_address || {}
return JSON.parse(
JSON.stringify({
Order_ID: order.id,
Display_ID: order.display_id,
"Order status": order.status,
Date: order.created_at,
"Customer First name": customer.first_name || "",
"Customer Last name": customer.last_name || "",
"Customer Email": customer.email || "",
"Customer ID": customer.id || "",
"Shipping Address 1": shippingAddress.address_1 || "",
"Shipping Address 2": shippingAddress.address_2 || "",
"Shipping Country Code": shippingAddress.country_code || "",
"Shipping City": shippingAddress.city || "",
"Shipping Postal Code": shippingAddress.postal_code || "",
"Shipping Region ID": order.region_id,
"Fulfillment Status": order_.fulfillment_status,
"Payment Status": order_.payment_status,
Subtotal: order.subtotal,
"Shipping Total": order.shipping_total,
"Discount Total": order.discount_total,
"Gift Card Total": order.gift_card_total,
"Refunded Total": order_.refunded_total,
"Tax Total": order.tax_total,
Total: order.total,
"Currency Code": order.currency_code,
})
)
}
export const exportOrdersStep = createStep(
exportOrdersStepId,
async (input: ExportOrdersStepInput, { container }) => {
const query = container.resolve(ContainerRegistrationKeys.QUERY)
const fileModule = container.resolve(Modules.FILE)
const filename = `${Date.now()}-order-exports.csv`
const { writeStream, promise, fileKey } = await fileModule.getUploadStream({
filename,
mimeType: "text/csv",
})
const pageSize = !isNaN(parseInt(input?.batch_size as string))
? parseInt(input?.batch_size as string, 10)
: 50
let page = 0
let hasHeader = false
const fields = deduplicate([
...input.select,
"id",
"status",
"items.*",
"customer.*",
"shipping_address.*",
"payment_collections.status",
"payment_collections.amount",
"payment_collections.captured_amount",
"payment_collections.refunded_amount",
"fulfillments.packed_at",
"fulfillments.shipped_at",
"fulfillments.delivered_at",
"fulfillments.canceled_at",
])
while (true) {
const { data: orders } = await query.graph({
entity: "order",
filters: {
...input.filter,
status: {
$ne: "draft",
},
},
pagination: {
skip: page * pageSize,
take: pageSize,
},
fields,
})
if (orders.length === 0) {
break
}
for (let i = 0; i < orders.length; i++) {
const order = orders[i]
const order_ = order as any
order_.payment_status = getLastPaymentStatus(order_)
order_.fulfillment_status = getLastFulfillmentStatus(order_)
delete order_.version
delete order.payment_collections
delete order.fulfillments
orders[i] = normalizeOrderForExport(order)
}
const batchCsv = json2csv(orders, {
prependHeader: !hasHeader,
arrayIndexesAsKeys: true,
expandNestedObjects: true,
expandArrayObjects: true,
unwindArrays: false,
preventCsvInjection: true,
emptyFieldValue: "",
})
const ok = writeStream.write((hasHeader ? "\n" : "") + batchCsv)
if (!ok) {
await new Promise((resolve) => writeStream.once("drain", resolve))
}
hasHeader = true
if (orders.length < pageSize) {
break
}
page += 1
}
writeStream.end()
await promise
return new StepResponse(
{ id: fileKey, filename } as ExportOrdersStepOutput,
fileKey
)
},
async (fileId, { container }) => {
if (!fileId) {
return
}
const fileModule: IFileModuleService = container.resolve(Modules.FILE)
await fileModule.deleteFiles(fileId)
}
)

View File

@@ -17,10 +17,11 @@ export * from "./delete-order-change-actions"
export * from "./delete-order-changes" export * from "./delete-order-changes"
export * from "./delete-order-shipping-methods" export * from "./delete-order-shipping-methods"
export * from "./exchange/cancel-exchange" export * from "./exchange/cancel-exchange"
export * from "./list-order-change-actions-by-type"
export * from "./exchange/create-exchange" export * from "./exchange/create-exchange"
export * from "./exchange/create-exchange-items-from-actions" export * from "./exchange/create-exchange-items-from-actions"
export * from "./exchange/delete-exchanges" export * from "./exchange/delete-exchanges"
export * from "./export-orders"
export * from "./list-order-change-actions-by-type"
export * from "./preview-order-change" export * from "./preview-order-change"
export * from "./register-delivery" export * from "./register-delivery"
export * from "./register-fulfillment" export * from "./register-fulfillment"

View File

@@ -0,0 +1,142 @@
import { FilterableOrderProps } from "@medusajs/framework/types"
import {
WorkflowData,
createWorkflow,
transform,
} from "@medusajs/framework/workflows-sdk"
import { useRemoteQueryStep } from "../../common"
import { notifyOnFailureStep, sendNotificationsStep } from "../../notification"
import { exportOrdersStep } from "../steps"
/**
* The data to export orders.
*/
export type ExportOrdersDTO = {
/**
* The fields to select. These fields will be passed to
* [Query](https://docs.medusajs.com/learn/fundamentals/module-links/query), so you can
* pass order properties or any relation names, including custom links.
*/
select: string[]
/**
* The filters to select which orders to export.
*/
filter?: FilterableOrderProps
}
export const exportOrdersWorkflowId = "export-orders"
/**
* This workflow exports orders matching the specified filters. It's used to
* export orders to a CSV file.
*
* :::note
*
* This workflow doesn't return the exported orders. Instead, it sends a notification to the admin
* users that they can download the exported orders.
*
* :::
*
* @example
* To export all orders:
*
* ```ts
* const { result } = await exportOrdersWorkflow(container)
* .run({
* input: {
* select: ["*"],
* }
* })
* ```
*
* To export orders matching a criteria:
*
* ```ts
* const { result } = await exportOrdersWorkflow(container)
* .run({
* input: {
* select: ["*"],
* filter: {
* created_at: {
* $gte: "2024-01-01",
* $lte: "2024-12-31"
* }
* }
* }
* })
* ```
*
* To export orders within a date range:
*
* ```ts
* const { result } = await exportOrdersWorkflow(container)
* .run({
* input: {
* select: ["*"],
* filter: {
* created_at: {
* $gte: "2024-01-01T00:00:00Z",
* $lte: "2024-01-31T23:59:59Z"
* }
* }
* }
* })
* ```
*
* @summary
*
* Export orders with filtering capabilities.
*/
export const exportOrdersWorkflow = createWorkflow(
exportOrdersWorkflowId,
(input: WorkflowData<ExportOrdersDTO>): WorkflowData<void> => {
const file = exportOrdersStep(input).config({
async: true,
backgroundExecution: true,
})
const failureNotification = transform({ input }, (data) => {
return [
{
// We don't need the recipient here for now, but if we want to push feed notifications to a specific user we could add it.
to: "",
channel: "feed",
template: "admin-ui",
data: {
title: "Order export",
description: `Failed to export orders, please try again later.`,
},
},
]
})
notifyOnFailureStep(failureNotification)
const fileDetails = useRemoteQueryStep({
fields: ["id", "url"],
entry_point: "file",
variables: { id: file.id },
list: false,
})
const notifications = transform({ fileDetails, file }, (data) => {
return [
{
// We don't need the recipient here for now, but if we want to push feed notifications to a specific user we could add it.
to: "",
channel: "feed",
template: "admin-ui",
data: {
title: "Order export",
description: "Order export completed successfully!",
file: {
filename: data.file.filename,
url: data.fileDetails.url,
mimeType: "text/csv",
},
},
},
]
})
sendNotificationsStep(notifications)
}
)

View File

@@ -18,6 +18,7 @@ export * from "./claim/update-claim-add-item"
export * from "./claim/update-claim-item" export * from "./claim/update-claim-item"
export * from "./claim/update-claim-shipping-method" export * from "./claim/update-claim-shipping-method"
export * from "./complete-orders" export * from "./complete-orders"
export * from "./compute-adjustments-for-preview"
export * from "./create-fulfillment" export * from "./create-fulfillment"
export * from "./create-or-update-order-payment-collection" export * from "./create-or-update-order-payment-collection"
export * from "./create-order" export * from "./create-order"
@@ -41,6 +42,7 @@ export * from "./exchange/remove-exchange-item-action"
export * from "./exchange/remove-exchange-shipping-method" export * from "./exchange/remove-exchange-shipping-method"
export * from "./exchange/update-exchange-add-item" export * from "./exchange/update-exchange-add-item"
export * from "./exchange/update-exchange-shipping-method" export * from "./exchange/update-exchange-shipping-method"
export * from "./export-orders"
export * from "./fetch-shipping-option" export * from "./fetch-shipping-option"
export * from "./get-order-detail" export * from "./get-order-detail"
export * from "./get-orders-list" export * from "./get-orders-list"
@@ -48,9 +50,9 @@ export * from "./list-shipping-options-for-order"
export * from "./mark-order-fulfillment-as-delivered" export * from "./mark-order-fulfillment-as-delivered"
export * from "./mark-payment-collection-as-paid" export * from "./mark-payment-collection-as-paid"
export * from "./maybe-refresh-shipping-methods" export * from "./maybe-refresh-shipping-methods"
export * from "./on-carry-promotions-flag-set"
export * from "./order-edit/begin-order-edit" export * from "./order-edit/begin-order-edit"
export * from "./order-edit/cancel-begin-order-edit" export * from "./order-edit/cancel-begin-order-edit"
export * from "./compute-adjustments-for-preview"
export * from "./order-edit/confirm-order-edit-request" export * from "./order-edit/confirm-order-edit-request"
export * from "./order-edit/create-order-edit-shipping-method" export * from "./order-edit/create-order-edit-shipping-method"
export * from "./order-edit/order-edit-add-new-item" export * from "./order-edit/order-edit-add-new-item"
@@ -81,7 +83,6 @@ export * from "./return/update-receive-item-return-request"
export * from "./return/update-request-item-return" export * from "./return/update-request-item-return"
export * from "./return/update-return" export * from "./return/update-return"
export * from "./return/update-return-shipping-method" export * from "./return/update-return-shipping-method"
export * from "./on-carry-promotions-flag-set"
export * from "./transfer/accept-order-transfer" export * from "./transfer/accept-order-transfer"
export * from "./transfer/cancel-order-transfer" export * from "./transfer/cancel-order-transfer"
export * from "./transfer/decline-order-transfer" export * from "./transfer/decline-order-transfer"

View File

@@ -638,7 +638,7 @@ export class Order {
* This method updates an order change. It sends a request to the * This method updates an order change. It sends a request to the
* [Update Order Change](https://docs.medusajs.com/api/admin#order-changes_postorder-changesid) * [Update Order Change](https://docs.medusajs.com/api/admin#order-changes_postorder-changesid)
* API route. * API route.
* *
* @since 2.12.0 * @since 2.12.0
* *
* @param id - The order change's ID. * @param id - The order change's ID.
@@ -674,4 +674,36 @@ export class Order {
} }
) )
} }
/**
* This method starts an order export process to retrieve a CSV of exported orders.
*
* You'll receive in the response the transaction ID of the workflow generating the CSV file.
* To check the status of the execution, send a `GET` request to
* `/admin/workflows-executions/export-orders/:transaction-id`.
*
* Once the execution finishes successfully, a notification is created for the export.
* You can retrieve the notifications using the `/admin/notification` API route to
* retrieve the file's download URL.
*
* @param query - Filters to specify which orders to export.
* @param headers - Headers to pass in the request.
* @returns The export's details.
*
* @example
* sdk.admin.order.export({})
* .then(({ transaction_id }) => {
* console.log(transaction_id)
* })
*/
async export(query?: HttpTypes.AdminOrderFilters, headers?: ClientHeaders) {
return await this.client.fetch<HttpTypes.AdminExportOrderResponse>(
`/admin/orders/export`,
{
method: "POST",
headers,
query,
}
)
}
} }

View File

@@ -1,4 +1,4 @@
import { Readable } from "stream" import { Readable, Writable } from "stream"
import { FileAccessPermission } from "./common" import { FileAccessPermission } from "./common"
/** /**
@@ -109,6 +109,28 @@ export type ProviderGetPresignedUploadUrlDTO = {
expiresIn?: number expiresIn?: number
} }
/**
* @interface
*
* The details of the file to upload via a stream.
*/
export type ProviderUploadStreamDTO = {
/**
* The filename of the uploaded file
*/
filename: string
/**
* The mimetype of the uploaded file
*/
mimeType: string
/**
* The access level of the file. Defaults to private if not passed
*/
access?: FileAccessPermission
}
export interface IFileProvider { export interface IFileProvider {
/** /**
* This method is used to upload a file * This method is used to upload a file
@@ -178,4 +200,14 @@ export interface IFileProvider {
* Get the file contents as a Node.js Buffer * Get the file contents as a Node.js Buffer
*/ */
getAsBuffer(fileData: ProviderGetFileDTO): Promise<Buffer> getAsBuffer(fileData: ProviderGetFileDTO): Promise<Buffer>
/**
* Get a writeable stream to upload a file.
*/
getUploadStream(fileData: ProviderUploadStreamDTO): Promise<{
writeStream: Writable
promise: Promise<ProviderFileResultDTO>
url: string
fileKey: string
}>
} }

View File

@@ -1,10 +1,15 @@
import type { Writable } from "stream"
import { Readable } from "stream" import { Readable } from "stream"
import { IModuleService } from "../modules-sdk"
import { FileDTO, FilterableFileProps, UploadFileUrlDTO } from "./common"
import { FindConfig } from "../common" import { FindConfig } from "../common"
import { IModuleService } from "../modules-sdk"
import { Context } from "../shared-context" import { Context } from "../shared-context"
import { IFileProvider } from "./provider" import { FileDTO, FilterableFileProps, UploadFileUrlDTO } from "./common"
import { CreateFileDTO, GetUploadFileUrlDTO } from "./mutations" import { CreateFileDTO, GetUploadFileUrlDTO } from "./mutations"
import {
IFileProvider,
ProviderFileResultDTO,
ProviderUploadStreamDTO,
} from "./provider"
export interface IFileModuleService extends IModuleService { export interface IFileModuleService extends IModuleService {
/** /**
@@ -203,4 +208,14 @@ export interface IFileModuleService extends IModuleService {
* contents.toString('utf-8') * contents.toString('utf-8')
*/ */
getAsBuffer(id: string, sharedContext?: Context): Promise<Buffer> getAsBuffer(id: string, sharedContext?: Context): Promise<Buffer>
/**
* Get a writeable stream to upload a file.
*/
getUploadStream(fileData: ProviderUploadStreamDTO): Promise<{
writeStream: Writable
promise: Promise<ProviderFileResultDTO>
url: string
fileKey: string
}>
} }

View File

@@ -47,3 +47,10 @@ export interface AdminOrderPreviewResponse {
*/ */
order: AdminOrderPreview order: AdminOrderPreview
} }
export interface AdminExportOrderResponse {
/**
* The ID of the export order workflow's transaction.
*/
transaction_id: string
}

View File

@@ -1,5 +1,5 @@
import type { Readable } from "stream"
import { FileTypes, IFileProvider } from "@medusajs/types" import { FileTypes, IFileProvider } from "@medusajs/types"
import type { Readable, Writable } from "stream"
/** /**
* ### constructor * ### constructor
@@ -234,4 +234,34 @@ export class AbstractFileProviderService implements IFileProvider {
getAsBuffer(fileData: FileTypes.ProviderGetFileDTO): Promise<Buffer> { getAsBuffer(fileData: FileTypes.ProviderGetFileDTO): Promise<Buffer> {
throw Error("getAsBuffer must be overridden by the child class") throw Error("getAsBuffer must be overridden by the child class")
} }
/**
* This method returns a writeable stream to upload a file.
*
* @param {FileTypes.ProviderUploadStreamDTO} fileData - The details of the file to upload.
* @returns {Promise<{ writeStream: Writable, promise: Promise<FileTypes.ProviderFileResultDTO>, url: string, fileKey: string }>} The writeable stream and upload promise.
*
* @since 2.8.0
*
* @example
* class MyFileProviderService extends AbstractFileProviderService {
* // ...
* async getUploadStream(fileData: FileTypes.ProviderUploadStreamDTO): Promise<{
* writeStream: Writable
* promise: Promise<FileTypes.ProviderFileResultDTO>
* url: string
* fileKey: string
* }> {
* // TODO logic to get the writeable stream
* }
* }
*/
getUploadStream(fileData: FileTypes.ProviderUploadStreamDTO): Promise<{
writeStream: Writable
promise: Promise<FileTypes.ProviderFileResultDTO>
url: string
fileKey: string
}> {
throw Error("getUploadStream must be overridden by the child class")
}
} }

View File

@@ -0,0 +1,20 @@
import { exportOrdersWorkflow } from "@medusajs/core-flows"
import {
AuthenticatedMedusaRequest,
MedusaResponse,
} from "@medusajs/framework/http"
import { HttpTypes } from "@medusajs/framework/types"
export const POST = async (
req: AuthenticatedMedusaRequest<{}, HttpTypes.AdminOrderFilters>,
res: MedusaResponse<HttpTypes.AdminExportOrderResponse>
) => {
const selectFields = req.queryConfig.fields ?? []
const input = { select: selectFields, filter: req.filterableFields }
const { transaction } = await exportOrdersWorkflow(req.scope).run({
input,
})
res.status(202).json({ transaction_id: transaction.transactionId })
}

View File

@@ -32,6 +32,16 @@ export const adminOrderRoutesMiddlewares: MiddlewareRoute[] = [
), ),
], ],
}, },
{
method: ["POST"],
matcher: "/admin/orders/export",
middlewares: [
validateAndTransformQuery(
AdminGetOrdersParams,
QueryConfig.exportTransformQueryConfig
),
],
},
{ {
method: ["GET"], method: ["GET"],
matcher: "/admin/orders/:id", matcher: "/admin/orders/:id",

View File

@@ -118,3 +118,33 @@ export const listShippingOptionsQueryConfig = {
defaultLimit: 100, defaultLimit: 100,
isList: true, isList: true,
} }
export const defaultAdminExportOrderFields = [
"id",
"display_id",
"status",
"created_at",
"updated_at",
"email",
"currency_code",
"region_id",
"subtotal",
"tax_total",
"shipping_total",
"discount_total",
"gift_card_total",
"total",
"*customer",
"*shipping_address",
"*billing_address",
"*sales_channel",
"*items",
"*shipping_methods",
"*payment_collections",
"*fulfillments",
]
export const exportTransformQueryConfig = {
defaults: defaultAdminExportOrderFields,
isList: true,
}

View File

@@ -1,19 +1,19 @@
import type { Readable } from "stream"
import { import {
Context, Context,
CreateFileDTO, CreateFileDTO,
GetUploadFileUrlDTO,
FileDTO, FileDTO,
UploadFileUrlDTO,
FileTypes, FileTypes,
FilterableFileProps, FilterableFileProps,
FindConfig, FindConfig,
GetUploadFileUrlDTO,
ModuleJoinerConfig, ModuleJoinerConfig,
UploadFileUrlDTO,
} from "@medusajs/framework/types" } from "@medusajs/framework/types"
import type { Readable, Writable } from "stream"
import { MedusaError } from "@medusajs/framework/utils"
import { joinerConfig } from "../joiner-config" import { joinerConfig } from "../joiner-config"
import FileProviderService from "./file-provider-service" import FileProviderService from "./file-provider-service"
import { MedusaError } from "@medusajs/framework/utils"
type InjectedDependencies = { type InjectedDependencies = {
fileProviderService: FileProviderService fileProviderService: FileProviderService
@@ -172,4 +172,25 @@ export default class FileModuleService implements FileTypes.IFileModuleService {
getAsBuffer(id: string): Promise<Buffer> { getAsBuffer(id: string): Promise<Buffer> {
return this.fileProviderService_.getAsBuffer({ fileKey: id }) return this.fileProviderService_.getAsBuffer({ fileKey: id })
} }
/**
* Get a writeable stream to upload a file.
*
* @example
* const { writeStream, promise } = await fileModuleService.getUploadStream({
* filename: "test.csv",
* mimeType: "text/csv",
* })
*
* stream.pipe(writeStream)
* const result = await promise
*/
getUploadStream(data: FileTypes.ProviderUploadStreamDTO): Promise<{
writeStream: Writable
promise: Promise<FileTypes.ProviderFileResultDTO>
url: string
fileKey: string
}> {
return this.fileProviderService_.getUploadStream(data)
}
} }

View File

@@ -1,7 +1,7 @@
import type { Readable } from "stream"
import { Constructor, FileTypes } from "@medusajs/framework/types" import { Constructor, FileTypes } from "@medusajs/framework/types"
import { MedusaError } from "@medusajs/framework/utils" import { MedusaError } from "@medusajs/framework/utils"
import { FileProviderRegistrationPrefix } from "@types" import { FileProviderRegistrationPrefix } from "@types"
import type { Readable, Writable } from "stream"
type InjectedDependencies = { type InjectedDependencies = {
[ [
@@ -81,4 +81,13 @@ export default class FileProviderService {
getAsBuffer(fileData: FileTypes.ProviderGetFileDTO): Promise<Buffer> { getAsBuffer(fileData: FileTypes.ProviderGetFileDTO): Promise<Buffer> {
return this.fileProvider_.getAsBuffer(fileData) return this.fileProvider_.getAsBuffer(fileData)
} }
getUploadStream(fileData: FileTypes.ProviderUploadStreamDTO): Promise<{
writeStream: Writable
promise: Promise<FileTypes.ProviderFileResultDTO>
url: string
fileKey: string
}> {
return this.fileProvider_.getUploadStream(fileData)
}
} }

Binary file not shown.

After

Width:  |  Height:  |  Size: 23 KiB

View File

@@ -0,0 +1,119 @@
import { FileSystem } from "@medusajs/utils"
import fs from "fs/promises"
import path from "path"
import { LocalFileService } from "../../src/services/local-file"
jest.setTimeout(10000)
describe("Local File Plugin", () => {
let localService: LocalFileService
const fixtureImagePath =
process.cwd() + "/integration-tests/__fixtures__/catphoto.jpg"
const uploadDir = path.join(
process.cwd(),
"integration-tests/__tests__/uploads"
)
const fileSystem = new FileSystem(uploadDir)
beforeAll(async () => {
localService = new LocalFileService(
{
logger: console as any,
},
{
upload_dir: uploadDir,
backend_url: "http://localhost:9000/static",
}
)
})
afterAll(async () => {
await fileSystem.cleanup()
})
it(`should upload, read, and then delete a public file successfully`, async () => {
const fileContent = await fs.readFile(fixtureImagePath)
const fixtureAsBase64 = fileContent.toString("base64")
const resp = await localService.upload({
filename: "catphoto.jpg",
mimeType: "image/jpeg",
content: fileContent as any,
access: "public",
})
expect(resp).toEqual({
key: expect.stringMatching(/catphoto.*\.jpg/),
url: expect.stringMatching(
/http:\/\/localhost:9000\/static\/.*catphoto.*\.jpg/
),
})
// For local file provider, we can verify the file exists on disk
const fileKey = resp.key
const baseDir = uploadDir
const filePath = path.join(baseDir, fileKey)
const fileOnDisk = await fs.readFile(filePath)
const fileOnDiskAsBase64 = fileOnDisk.toString("base64")
expect(fileOnDiskAsBase64).toEqual(fixtureAsBase64)
const signedUrl = await localService.getPresignedDownloadUrl({
fileKey: resp.key,
})
expect(signedUrl).toEqual(resp.url)
const buffer = await localService.getAsBuffer({ fileKey: resp.key })
expect(buffer).toEqual(fileContent)
await localService.delete({ fileKey: resp.key })
await expect(fs.access(filePath)).rejects.toThrow()
})
it("uploads using stream", async () => {
const fileContent = await fs.readFile(fixtureImagePath)
const { writeStream, promise } = await localService.getUploadStream({
filename: "catphoto-stream.jpg",
mimeType: "image/jpeg",
access: "public",
})
writeStream.write(fileContent)
writeStream.end()
const resp = await promise
expect(resp).toEqual({
key: expect.stringMatching(/catphoto-stream.*\.jpg/),
url: expect.stringMatching(
/http:\/\/localhost:9000\/static\/.*catphoto-stream.*\.jpg/
),
})
const fileKey = resp.key
const filePath = path.join(uploadDir, fileKey)
const fileOnDisk = await fs.readFile(filePath)
expect(fileOnDisk).toEqual(fileContent)
const signedUrl = await localService.getPresignedDownloadUrl({
fileKey: resp.key,
})
expect(signedUrl).toEqual(resp.url)
const buffer = await localService.getAsBuffer({ fileKey: resp.key })
expect(buffer).toEqual(fileContent)
await localService.delete({ fileKey: resp.key })
await expect(fs.access(filePath)).rejects.toThrow()
})
})

View File

@@ -21,6 +21,7 @@
"license": "MIT", "license": "MIT",
"scripts": { "scripts": {
"test": "../../../../node_modules/.bin/jest --passWithNoTests src", "test": "../../../../node_modules/.bin/jest --passWithNoTests src",
"test:integration": "../../../../node_modules/.bin/jest --passWithNoTests --forceExit --testPathPattern=\"integration-tests/__tests__/[^/]*\\.spec\\.ts\"",
"build": "yarn run -T rimraf dist && yarn run -T tsc --build ./tsconfig.json", "build": "yarn run -T rimraf dist && yarn run -T tsc --build ./tsconfig.json",
"watch": "yarn run -T tsc --watch" "watch": "yarn run -T tsc --watch"
}, },

View File

@@ -3,10 +3,10 @@ import {
AbstractFileProviderService, AbstractFileProviderService,
MedusaError, MedusaError,
} from "@medusajs/framework/utils" } from "@medusajs/framework/utils"
import { createReadStream } from "fs" import { createReadStream, createWriteStream } from "fs"
import fs from "fs/promises" import fs from "fs/promises"
import path from "path" import path from "path"
import type { Readable } from "stream" import type { Readable, Writable } from "stream"
export class LocalFileService extends AbstractFileProviderService { export class LocalFileService extends AbstractFileProviderService {
static identifier = "localfs" static identifier = "localfs"
@@ -78,6 +78,59 @@ export class LocalFileService extends AbstractFileProviderService {
} }
} }
async getUploadStream(fileData: FileTypes.ProviderUploadStreamDTO): Promise<{
writeStream: Writable
promise: Promise<FileTypes.ProviderFileResultDTO>
url: string
fileKey: string
}> {
if (!fileData.filename) {
throw new MedusaError(
MedusaError.Types.INVALID_DATA,
`No filename provided`
)
}
const parsedFilename = path.parse(fileData.filename)
const baseDir =
fileData.access === "public" ? this.uploadDir_ : this.privateUploadDir_
await this.ensureDirExists(baseDir, parsedFilename.dir)
const fileKey = path.join(
parsedFilename.dir,
// We prepend "private" to the file key so deletions and presigned URLs can know which folder to look into
`${fileData.access === "public" ? "" : "private-"}${Date.now()}-${
parsedFilename.base
}`
)
const filePath = this.getUploadFilePath(baseDir, fileKey)
const fileUrl = this.getUploadFileUrl(fileKey)
const writeStream = createWriteStream(filePath)
const promise = new Promise<FileTypes.ProviderFileResultDTO>(
(resolve, reject) => {
writeStream.on("finish", () => {
resolve({
url: fileUrl,
key: fileKey,
})
})
writeStream.on("error", (err) => {
reject(err)
})
}
)
return {
writeStream,
promise,
url: fileUrl,
fileKey,
}
}
async delete( async delete(
files: FileTypes.ProviderDeleteFileDTO | FileTypes.ProviderDeleteFileDTO[] files: FileTypes.ProviderDeleteFileDTO | FileTypes.ProviderDeleteFileDTO[]
): Promise<void> { ): Promise<void> {

View File

@@ -49,7 +49,7 @@ describe.skip("S3 File Plugin", () => {
expect(resp).toEqual({ expect(resp).toEqual({
key: expect.stringMatching(/tests\/catphoto.*\.jpg/), key: expect.stringMatching(/tests\/catphoto.*\.jpg/),
url: expect.stringMatching(/https:\/\/.*\.jpg/), url: expect.stringMatching(/https?:\/\/.*\.jpg/),
}) })
const urlResp = await axios.get(resp.url).catch((e) => e.response) const urlResp = await axios.get(resp.url).catch((e) => e.response)
@@ -95,7 +95,7 @@ describe.skip("S3 File Plugin", () => {
expect(resp).toEqual({ expect(resp).toEqual({
key: expect.stringMatching(/tests\/catphoto-か.*\.jpg/), key: expect.stringMatching(/tests\/catphoto-か.*\.jpg/),
url: expect.stringMatching(/https:\/\/.*\/catphoto-%E3%81%8B.*\.jpg/), url: expect.stringMatching(/https?:\/\/.*\/catphoto-%E3%81%8B.*\.jpg/),
}) })
}) })
@@ -112,7 +112,7 @@ describe.skip("S3 File Plugin", () => {
expect(resp).toEqual({ expect(resp).toEqual({
key: expect.stringMatching(/tests\/catphoto.*\.jpg/), key: expect.stringMatching(/tests\/catphoto.*\.jpg/),
url: expect.stringMatching(/https:\/\/.*\/cat%3Fphoto.*\.jpg/), url: expect.stringMatching(/https?:\/\/.*\/cat%3Fphoto.*\.jpg/),
}) })
}) })
@@ -128,7 +128,7 @@ describe.skip("S3 File Plugin", () => {
expect(resp).toEqual({ expect(resp).toEqual({
key: expect.stringMatching(/tests\/catphoto.*\.jpg/), key: expect.stringMatching(/tests\/catphoto.*\.jpg/),
url: expect.stringMatching(/https:\/\/.*catphoto\.jpg/), url: expect.stringMatching(/https?:\/\/.*catphoto\.jpg/),
}) })
const uploadResp = await axios.put(resp.url, fileContent, { const uploadResp = await axios.put(resp.url, fileContent, {
@@ -169,7 +169,7 @@ describe.skip("S3 File Plugin", () => {
expect(resp).toEqual({ expect(resp).toEqual({
key: expect.stringMatching(/tests\/testfolder\/catphoto.*\.jpg/), key: expect.stringMatching(/tests\/testfolder\/catphoto.*\.jpg/),
url: expect.stringMatching(/https:\/\/.*testfolder\/catphoto\.jpg/), url: expect.stringMatching(/https?:\/\/.*testfolder\/catphoto\.jpg/),
}) })
const uploadResp = await axios.put(resp.url, fileContent, { const uploadResp = await axios.put(resp.url, fileContent, {
@@ -221,4 +221,42 @@ describe.skip("S3 File Plugin", () => {
{ fileKey: cat2.key }, { fileKey: cat2.key },
]) ])
}) })
it("uploads using stream", async () => {
const fileContent = await fs.readFile(fixtureImagePath)
const fixtureAsBinary = fileContent.toString("binary")
const { writeStream, promise } = await s3Service.getUploadStream({
filename: "catphoto-stream.jpg",
mimeType: "image/jpeg",
access: "public",
})
writeStream.write(fileContent)
writeStream.end()
const resp = await promise
expect(resp).toEqual({
key: expect.stringMatching(/tests\/catphoto-stream.*\.jpg/),
url: expect.stringMatching(/https?:\/\/.*\.jpg/),
})
const urlResp = await axios.get(resp.url).catch((e) => e.response)
expect(urlResp.status).toEqual(200)
const signedUrl = await s3Service.getPresignedDownloadUrl({
fileKey: resp.key,
})
const signedUrlFile = Buffer.from(
await axios
.get(signedUrl, { responseType: "arraybuffer" })
.then((r) => r.data)
)
expect(signedUrlFile.toString("binary")).toEqual(fixtureAsBinary)
await s3Service.delete({ fileKey: resp.key })
})
}) })

View File

@@ -30,6 +30,7 @@
}, },
"dependencies": { "dependencies": {
"@aws-sdk/client-s3": "^3.556.0", "@aws-sdk/client-s3": "^3.556.0",
"@aws-sdk/lib-storage": "^3.556.0",
"@aws-sdk/s3-request-presigner": "^3.556.0", "@aws-sdk/s3-request-presigner": "^3.556.0",
"ulid": "^2.3.0" "ulid": "^2.3.0"
}, },

View File

@@ -7,6 +7,7 @@ import {
S3Client, S3Client,
S3ClientConfigType, S3ClientConfigType,
} from "@aws-sdk/client-s3" } from "@aws-sdk/client-s3"
import { Upload } from "@aws-sdk/lib-storage"
import { getSignedUrl } from "@aws-sdk/s3-request-presigner" import { getSignedUrl } from "@aws-sdk/s3-request-presigner"
import { import {
FileTypes, FileTypes,
@@ -18,7 +19,7 @@ import {
MedusaError, MedusaError,
} from "@medusajs/framework/utils" } from "@medusajs/framework/utils"
import path from "path" import path from "path"
import { Readable } from "stream" import { PassThrough, Readable, Writable } from "stream"
import { ulid } from "ulid" import { ulid } from "ulid"
type InjectedDependencies = { type InjectedDependencies = {
@@ -165,6 +166,53 @@ export class S3FileService extends AbstractFileProviderService {
} }
} }
async getUploadStream(fileData: FileTypes.ProviderUploadStreamDTO): Promise<{
writeStream: Writable
promise: Promise<FileTypes.ProviderFileResultDTO>
url: string
fileKey: string
}> {
if (!fileData.filename) {
throw new MedusaError(
MedusaError.Types.INVALID_DATA,
`No filename provided`
)
}
const parsedFilename = path.parse(fileData.filename)
const fileKey = `${this.config_.prefix}${parsedFilename.name}-${ulid()}${
parsedFilename.ext
}`
const pass = new PassThrough()
const upload = new Upload({
client: this.client_,
params: {
ACL: fileData.access === "public" ? "public-read" : "private",
Bucket: this.config_.bucket,
Key: fileKey,
Body: pass,
ContentType: fileData.mimeType,
CacheControl: this.config_.cacheControl,
Metadata: {
"original-filename": encodeURIComponent(fileData.filename),
},
},
})
const promise = upload.done().then(() => ({
url: `${this.config_.fileUrl}/${fileKey}`,
key: fileKey,
}))
return {
writeStream: pass,
promise,
url: `${this.config_.fileUrl}/${fileKey}`,
fileKey,
}
}
async delete( async delete(
files: FileTypes.ProviderDeleteFileDTO | FileTypes.ProviderDeleteFileDTO[] files: FileTypes.ProviderDeleteFileDTO | FileTypes.ProviderDeleteFileDTO[]
): Promise<void> { ): Promise<void> {
@@ -207,7 +255,7 @@ export class S3FileService extends AbstractFileProviderService {
Key: `${fileData.fileKey}`, Key: `${fileData.fileKey}`,
}) })
return await getSignedUrl(this.client_, command, { return await getSignedUrl(this.client_ as any, command as any, {
expiresIn: this.config_.downloadFileDuration, expiresIn: this.config_.downloadFileDuration,
}) })
} }
@@ -238,7 +286,7 @@ export class S3FileService extends AbstractFileProviderService {
Key: fileKey, Key: fileKey,
}) })
const signedUrl = await getSignedUrl(this.client_, command, { const signedUrl = await getSignedUrl(this.client_ as any, command as any, {
expiresIn: expiresIn:
fileData.expiresIn ?? DEFAULT_UPLOAD_EXPIRATION_DURATION_SECONDS, fileData.expiresIn ?? DEFAULT_UPLOAD_EXPIRATION_DURATION_SECONDS,
}) })

View File

@@ -580,6 +580,23 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"@aws-sdk/lib-storage@npm:^3.556.0":
version: 3.948.0
resolution: "@aws-sdk/lib-storage@npm:3.948.0"
dependencies:
"@smithy/abort-controller": ^4.2.5
"@smithy/middleware-endpoint": ^4.3.14
"@smithy/smithy-client": ^4.9.10
buffer: 5.6.0
events: 3.3.0
stream-browserify: 3.0.0
tslib: ^2.6.2
peerDependencies:
"@aws-sdk/client-s3": ^3.948.0
checksum: 11edd46ee1f2ef74efbf9b5b422f77d2c792693bd90051ef3bda6ab36e76f2b6533f531df5d390da31c757efcc98b0513d70a14a28561eefa13e641847d1831d
languageName: node
linkType: hard
"@aws-sdk/middleware-bucket-endpoint@npm:3.936.0": "@aws-sdk/middleware-bucket-endpoint@npm:3.936.0":
version: 3.936.0 version: 3.936.0
resolution: "@aws-sdk/middleware-bucket-endpoint@npm:3.936.0" resolution: "@aws-sdk/middleware-bucket-endpoint@npm:3.936.0"
@@ -3580,6 +3597,7 @@ __metadata:
resolution: "@medusajs/file-s3@workspace:packages/modules/providers/file-s3" resolution: "@medusajs/file-s3@workspace:packages/modules/providers/file-s3"
dependencies: dependencies:
"@aws-sdk/client-s3": ^3.556.0 "@aws-sdk/client-s3": ^3.556.0
"@aws-sdk/lib-storage": ^3.556.0
"@aws-sdk/s3-request-presigner": ^3.556.0 "@aws-sdk/s3-request-presigner": ^3.556.0
"@medusajs/framework": 2.12.2 "@medusajs/framework": 2.12.2
ulid: ^2.3.0 ulid: ^2.3.0
@@ -13465,7 +13483,7 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"base64-js@npm:^1.2.0, base64-js@npm:^1.3.1": "base64-js@npm:^1.0.2, base64-js@npm:^1.2.0, base64-js@npm:^1.3.1":
version: 1.5.1 version: 1.5.1
resolution: "base64-js@npm:1.5.1" resolution: "base64-js@npm:1.5.1"
checksum: f23823513b63173a001030fae4f2dabe283b99a9d324ade3ad3d148e218134676f1ee8568c877cd79ec1c53158dcf2d2ba527a97c606618928ba99dd930102bf checksum: f23823513b63173a001030fae4f2dabe283b99a9d324ade3ad3d148e218134676f1ee8568c877cd79ec1c53158dcf2d2ba527a97c606618928ba99dd930102bf
@@ -13769,6 +13787,16 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"buffer@npm:5.6.0":
version: 5.6.0
resolution: "buffer@npm:5.6.0"
dependencies:
base64-js: ^1.0.2
ieee754: ^1.1.4
checksum: 07037a0278b07fbc779920f1ba1b473933ffb4a2e2f7b387c55daf6ac64a05b58c27da9e85730a4046e8f97a49f8acd9f7bf89605c0a4dfda88ebfb7e08bfe4a
languageName: node
linkType: hard
"buffer@npm:^5.2.1, buffer@npm:^5.5.0": "buffer@npm:^5.2.1, buffer@npm:^5.5.0":
version: 5.7.1 version: 5.7.1
resolution: "buffer@npm:5.7.1" resolution: "buffer@npm:5.7.1"
@@ -16661,7 +16689,7 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"events@npm:^3.3.0": "events@npm:3.3.0, events@npm:^3.3.0":
version: 3.3.0 version: 3.3.0
resolution: "events@npm:3.3.0" resolution: "events@npm:3.3.0"
checksum: d6b6f2adbccbcda74ddbab52ed07db727ef52e31a61ed26db9feb7dc62af7fc8e060defa65e5f8af9449b86b52cc1a1f6a79f2eafcf4e62add2b7a1fa4a432f6 checksum: d6b6f2adbccbcda74ddbab52ed07db727ef52e31a61ed26db9feb7dc62af7fc8e060defa65e5f8af9449b86b52cc1a1f6a79f2eafcf4e62add2b7a1fa4a432f6
@@ -18278,7 +18306,7 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"ieee754@npm:^1.1.13, ieee754@npm:^1.2.1": "ieee754@npm:^1.1.13, ieee754@npm:^1.1.4, ieee754@npm:^1.2.1":
version: 1.2.1 version: 1.2.1
resolution: "ieee754@npm:1.2.1" resolution: "ieee754@npm:1.2.1"
checksum: b0782ef5e0935b9f12883a2e2aa37baa75da6e66ce6515c168697b42160807d9330de9a32ec1ed73149aea02e0d822e572bca6f1e22bdcbd2149e13b050b17bb checksum: b0782ef5e0935b9f12883a2e2aa37baa75da6e66ce6515c168697b42160807d9330de9a32ec1ed73149aea02e0d822e572bca6f1e22bdcbd2149e13b050b17bb
@@ -24170,7 +24198,7 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"readable-stream@npm:^3.0.2, readable-stream@npm:^3.1.1, readable-stream@npm:^3.4.0, readable-stream@npm:^3.6.0, readable-stream@npm:^3.6.2": "readable-stream@npm:^3.0.2, readable-stream@npm:^3.1.1, readable-stream@npm:^3.4.0, readable-stream@npm:^3.5.0, readable-stream@npm:^3.6.0, readable-stream@npm:^3.6.2":
version: 3.6.2 version: 3.6.2
resolution: "readable-stream@npm:3.6.2" resolution: "readable-stream@npm:3.6.2"
dependencies: dependencies:
@@ -25791,6 +25819,16 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"stream-browserify@npm:3.0.0":
version: 3.0.0
resolution: "stream-browserify@npm:3.0.0"
dependencies:
inherits: ~2.0.4
readable-stream: ^3.5.0
checksum: ec3b975a4e0aa4b3dc5e70ffae3fc8fd29ac725353a14e72f213dff477b00330140ad014b163a8cbb9922dfe90803f81a5ea2b269e1bbfd8bd71511b88f889ad
languageName: node
linkType: hard
"stream-shift@npm:^1.0.0, stream-shift@npm:^1.0.2": "stream-shift@npm:^1.0.0, stream-shift@npm:^1.0.2":
version: 1.0.3 version: 1.0.3
resolution: "stream-shift@npm:1.0.3" resolution: "stream-shift@npm:1.0.3"