diff --git a/.cspell b/.cspell
index 7e9cf8acc..0dc59e4cf 100644
--- a/.cspell
+++ b/.cspell
@@ -2,6 +2,7 @@ aoss
 APIV
 cbor
 evals
+formdata
 lucene
 millis
 mxyz
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 073dcec25..16c6cf367 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -44,6 +44,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/)
 - Added `GET /_plugins/_ml/connectors/{connector_id}`, `_search`, `POST /_plugins/_ml/connectors/_search`, and `PUT /_plugins/_ml/connectors/{connector_id}` ([#764](https://github.com/opensearch-project/opensearch-api-specification/pull/764))
 - Added the ability to skip an individual chapter test ([#765](https://github.com/opensearch-project/opensearch-api-specification/pull/765))
 - Added uploading of test spec logs ([#767](https://github.com/opensearch-project/opensearch-api-specification/pull/767))
+- Added generation of file for Postman ([#770](https://github.com/opensearch-project/opensearch-api-specification/pull/770))
 - Added `aggs` property as an alias to `aggregations` in requestBody of `search` [#774](https://github.com/opensearch-project/opensearch-api-specification/issues/774)
 - Added `POST /_plugins/_ml/memory`, `POST /_plugins/_ml/memory/_search`, `{memory_id}/_search`, `{memory_id}/messages`, `PUT /_plugins/_ml/memory/{memory_id}`, `message/{message_id}`,  `GET /_plugins/_ml/memory`, `GET /_plugins/_ml/memory/{memory_id}`, `_search`, `message/{message_id}`, `{memory_id}/messages`, `{memory_id}/_search`, `message/{message_id}/traces`,  and `DELETE /_plugins/_ml/memory/{memory_id}` ([#771](https://github.com/opensearch-project/opensearch-api-specification/pull/771))
 - Added support for evaluating response payloads in prologues and epilogues ([#772](https://github.com/opensearch-project/opensearch-api-specification/pull/772))
diff --git a/package.json b/package.json
index a0715e2a2..33594c7aa 100644
--- a/package.json
+++ b/package.json
@@ -8,6 +8,7 @@
     "coverage:spec": "ts-node tools/src/coverage/coverage.ts",
     "dump-cluster-spec": "ts-node tools/src/dump-cluster-spec/dump-cluster-spec.ts",
     "generate-types": "ts-node tools/src/tester/_generate_story_types.ts",
+    "export:postman": "ts-node tools/src/exporter/export.ts",
     "lint:spec": "ts-node tools/src/linter/lint.ts",
     "lint": "eslint . --report-unused-disable-directives",
     "lint--fix": "eslint . --fix --report-unused-disable-directives",
diff --git a/tools/src/OpenSearchHttpClient.ts b/tools/src/OpenSearchHttpClient.ts
index 247f1f8f9..095dafef0 100644
--- a/tools/src/OpenSearchHttpClient.ts
+++ b/tools/src/OpenSearchHttpClient.ts
@@ -221,6 +221,13 @@ export class OpenSearchHttpClient {
     }
   }
 
+  get_url(): string | undefined {
+    if (this._opts != null && this._opts.url != null && this._opts.url !== '') {
+      return this._opts.url;
+    }
+    return DEFAULT_URL;
+  }
+
   async request<T = any, R = AxiosResponse<T>, D = any>(config: AxiosRequestConfig<D>): Promise<R> {
     return await this._axios.request(config)
   }
diff --git a/tools/src/exporter/ExportChapters.ts b/tools/src/exporter/ExportChapters.ts
new file mode 100644
index 000000000..6e9f1dd87
--- /dev/null
+++ b/tools/src/exporter/ExportChapters.ts
@@ -0,0 +1,128 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*/
+
+import fs from 'fs'
+import { read_yaml, to_ndjson } from '../helpers'
+import { basename, resolve } from 'path'
+import _ from 'lodash'
+import { StoryFile } from 'tester/types/eval.types'
+import { Logger } from 'Logger'
+import StoryParser from './StoryParser'
+import { PostmanManager } from './PostmanManager'
+import { APPLICATION_JSON } from './MimeTypes'
+import { Parameter } from 'tester/types/story.types'
+
+export default class ExportChapters {
+  private readonly _story_files: Record<string, StoryFile[]> = {}
+  private readonly _logger: Logger
+  private readonly _postman_manager: PostmanManager
+
+  constructor (logger: Logger, postman_manager: PostmanManager) {
+    this._logger = logger
+    this._postman_manager = postman_manager
+  }
+
+  run (story_path: string): void {
+    const story_files = this.story_files(story_path)
+
+    for (const story_file of story_files) {
+      for (const chapter of story_file.story.chapters) {
+        const [headers, content_type] = this.#serialize_headers(chapter.request?.headers, chapter.request?.content_type)
+        let params = {};
+        if (chapter.parameters !== undefined) {
+          params = this.#parse_url(chapter.path, chapter.parameters)
+        }
+        const request_data = chapter.request?.payload !== undefined ? this.#serialize_payload(
+          chapter.request.payload,
+          content_type
+        ) : {}
+        this._postman_manager.add_to_collection('url', chapter.method, chapter.path, headers, params, request_data, content_type, story_file.full_path);
+      }
+      this._logger.info(`Evaluating ${story_file.display_path} ...`)
+    }
+    this._postman_manager.save_collection()
+  }
+
+  story_files(story_path: string): StoryFile[] {
+    if (this._story_files[story_path] !== undefined) return this._story_files[story_path]
+    this._story_files[story_path]  = this.#sort_story_files(this.#collect_story_files(resolve(story_path), '', ''))
+    return this._story_files[story_path]
+  }
+
+  #collect_story_files (folder: string, file: string, prefix: string): StoryFile[] {
+    const path = file === '' ? folder : `${folder}/${file}`
+    const next_prefix = prefix === '' ? file : `${prefix}/${file}`
+    if (file.startsWith('.') || file == 'docker-compose.yml' || file == 'Dockerfile' || file.endsWith('.py')) {
+      return []
+    } else if (fs.statSync(path).isFile()) {
+      const story = StoryParser.parse(read_yaml(path))
+      return [{
+        display_path: next_prefix === '' ? basename(path) : next_prefix,
+        full_path: path,
+        story
+      }]
+    } else {
+      return _.compact(fs.readdirSync(path).flatMap(next_file => {
+        return this.#collect_story_files(path, next_file, next_prefix)
+      }))
+    }
+  }
+
+  #sort_story_files (story_files: StoryFile[]): StoryFile[] {
+    return story_files.sort(({ display_path: a }, { display_path: b }) => {
+      const a_depth = a.split('/').length
+      const b_depth = b.split('/').length
+      if (a_depth !== b_depth) return a_depth - b_depth
+      return a.localeCompare(b)
+    })
+  }
+
+  #serialize_headers(headers?: Record<string, any>, content_type?: string): [Record<string, any> | undefined, string] {
+    headers = _.cloneDeep(headers)
+    content_type = content_type ?? APPLICATION_JSON
+    if (!headers) return [headers, content_type]
+    _.forEach(headers, (v, k) => {
+      if (k.toLowerCase() == 'content-type') {
+        content_type = v.toString()
+        if (headers) delete headers[k]
+      }
+    })
+    return [headers, content_type]
+  }
+
+  #serialize_payload(payload: any, content_type: string): any {
+    if (payload === undefined) return undefined
+    switch (content_type) {
+      case 'application/x-ndjson': return to_ndjson(payload as any[])
+      default: return payload
+    }
+  }
+
+  resolve_params (parameters: Record<string, Parameter>): Record<string, Parameter> {
+    const resolved_params: Record<string, Parameter> = {}
+    for (const [param_name, param_value] of Object.entries(parameters ?? {})) {
+      if (typeof param_value === 'string') {
+        resolved_params[param_name] = param_value
+      } else {
+        resolved_params[param_name] = param_value
+      }
+    }
+    return resolved_params
+  }
+
+  #parse_url (path: string, parameters: Record<string, Parameter>): Record<string, Parameter> {
+    const path_params = new Set<string>()
+    path.replace(/{(\w+)}/g, (_, key) => {
+      path_params.add(key as string)
+      return parameters[key] as string
+    })
+    const query_params = Object.fromEntries(Object.entries(parameters).filter(([key]) => !path_params.has(key)))
+    return query_params
+  }
+}
diff --git a/tools/src/exporter/MimeTypes.ts b/tools/src/exporter/MimeTypes.ts
new file mode 100644
index 000000000..156d1cfce
--- /dev/null
+++ b/tools/src/exporter/MimeTypes.ts
@@ -0,0 +1,14 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*/
+
+export const APPLICATION_CBOR = 'application/cbor'
+export const APPLICATION_JSON = 'application/json'
+export const APPLICATION_SMILE = 'application/smile'
+export const APPLICATION_YAML = 'application/yaml'
+export const TEXT_PLAIN = 'text/plain'
\ No newline at end of file
diff --git a/tools/src/exporter/PostmanManager.ts b/tools/src/exporter/PostmanManager.ts
new file mode 100644
index 000000000..f442d0831
--- /dev/null
+++ b/tools/src/exporter/PostmanManager.ts
@@ -0,0 +1,84 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*/
+
+import fs from 'fs';
+
+export class PostmanManager {
+  private readonly collection: any;
+  private readonly collection_path: string;
+
+  constructor(collection_path: string = './postman_collection.json') {
+    this.collection_path = collection_path;
+    this.collection = {
+      info: {
+        name: "OpenSearch tests",
+        schema: "https://schema.getpostman.com/json/collection/v2.1.0/collection.json",
+      },
+      item: [],
+    };
+  }
+
+  add_to_collection(
+    url: string | undefined,
+    method: string,
+    path: string,
+    headers: Record<string, any> | undefined,
+    params: Record<string, any>,
+    body: any,
+    content_type: string,
+    full_path?: string
+  ): void {
+    const folders: string[] = [];
+    console.log(full_path)
+    if (full_path != null && full_path) {
+      const path_parts = full_path.split('/').filter(Boolean);
+
+      const start_index = path_parts.indexOf('tests');
+
+      if (start_index !== -1) {
+        folders.push(...path_parts.slice(start_index + 1));
+      }
+    }
+
+    let current_folder = this.collection.item;
+    folders.forEach(folder => {
+      let existing_folder = current_folder.find((item: any) => item.name === folder);
+
+      if (existing_folder == null) {
+        existing_folder = { name: folder, item: [] };
+        current_folder.push(existing_folder);
+      }
+      current_folder = existing_folder.item;
+    });
+
+    const item = {
+      name: path,
+      request: {
+        method,
+        header: Object.entries(headers ?? {}).map(([key, value]) => ({ key, value })),
+        url: {
+          raw: `${url}${path}`,
+          host: url,
+          path: path.split('/').filter(Boolean),
+          query: Object.entries(params).map(([key, value]) => ({ key, value: String(value) })),
+        },
+        body: body != null ? { mode: content_type === 'application/json' ? 'raw' : 'formdata', raw: JSON.stringify(body) } : undefined,
+      },
+    };
+
+    const exists = current_folder.some((existing_item: any) => existing_item.name === item.name);
+    if (exists != null) {
+      current_folder.push(item);
+    }
+  }
+
+  save_collection(): void {
+    fs.writeFileSync(this.collection_path, JSON.stringify(this.collection, null, 2));
+  }
+}
\ No newline at end of file
diff --git a/tools/src/exporter/StoryParser.ts b/tools/src/exporter/StoryParser.ts
new file mode 100644
index 000000000..a718a9173
--- /dev/null
+++ b/tools/src/exporter/StoryParser.ts
@@ -0,0 +1,41 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*/
+
+import _ from "lodash";
+import { ParsedChapter, ParsedStory } from "tester/types/parsed_story.types";
+import { Chapter, Story } from "tester/types/story.types";
+
+export default class StoryParser {
+  static parse(story: Story): ParsedStory {
+    return {
+      ...story,
+      chapters: this.#expand_chapters(story.chapters),
+    }
+  }
+
+  static #chapter_methods(methods: string[] | string): string[] {
+    return [...(Array.isArray(methods) ? methods : [methods])]
+  }
+
+  static #expand_chapters(chapters?: Chapter[]): ParsedChapter[] {
+    if (chapters === undefined) return []
+    return  _.flatMap(_.map(chapters, (chapter) => {
+      return _.map(this.#chapter_methods(chapter.method), (method) => {
+        let synopsis = chapter.synopsis && Array.isArray(chapter.method) ?
+          `${chapter.synopsis} [${method}]` :
+          chapter.synopsis
+        return {
+          ...chapter,
+          synopsis,
+          method
+        }
+      })
+    })) as ParsedChapter[]
+  }
+}
\ No newline at end of file
diff --git a/tools/src/exporter/export.ts b/tools/src/exporter/export.ts
new file mode 100644
index 000000000..204d67d28
--- /dev/null
+++ b/tools/src/exporter/export.ts
@@ -0,0 +1,27 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*/
+
+import { Logger, LogLevel } from '../Logger'
+import { Command, Option } from '@commander-js/extra-typings'
+import ExportChapters from './ExportChapters'
+import { PostmanManager } from './PostmanManager'
+
+const command = new Command()
+  .description('Run test stories against the OpenSearch spec.')
+  .addOption(new Option('--tests, --tests-path <path>', 'path to the root folder of the tests').default('./tests/default'))
+  .allowExcessArguments(false)
+  .parse()
+
+
+const opts = command.opts()
+const logger = new Logger(LogLevel.warn)
+const postman_manager = new PostmanManager()
+const runner = new ExportChapters(logger, postman_manager)
+
+runner.run(opts.testsPath)
\ No newline at end of file
diff --git a/tools/tests/exporter/GeneratePostman.test.ts b/tools/tests/exporter/GeneratePostman.test.ts
new file mode 100644
index 000000000..9873f668a
--- /dev/null
+++ b/tools/tests/exporter/GeneratePostman.test.ts
@@ -0,0 +1,47 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*/
+
+import fs from 'fs'
+import tmp from 'tmp'
+import { PostmanManager } from 'exporter/PostmanManager'
+import ExportChapters from 'exporter/ExportChapters'
+import { Logger, LogLevel } from 'Logger'
+
+describe('OpenApiMerger', () => {
+  var logger: Logger;
+  var postman_manager: PostmanManager;
+  var runner: ExportChapters;
+
+  describe('defaults', () => {
+
+    describe('write_to()', () => {
+      var temp: tmp.DirResult
+      var filename: string
+
+      beforeEach(() => {
+        temp = tmp.dirSync()
+        filename = `${temp.name}/postman_collection.json`
+        logger = new Logger(LogLevel.warn)
+        postman_manager = new PostmanManager(filename);
+        runner = new ExportChapters(logger, postman_manager)
+      })
+
+      afterEach(() => {
+        fs.unlinkSync(filename)
+        temp.removeCallback()
+      })
+
+      test('writes a spec', () => {
+        runner.run('./tools/tests/exporter/fixtures')
+        expect(fs.readFileSync('./tools/tests/exporter/fixtures/postman_collection.json', 'utf8'))
+          .toEqual(fs.readFileSync(filename, 'utf8'))
+      })
+    })
+  })
+})
diff --git a/tools/tests/exporter/export.test.ts b/tools/tests/exporter/export.test.ts
new file mode 100644
index 000000000..cef4f5eb9
--- /dev/null
+++ b/tools/tests/exporter/export.test.ts
@@ -0,0 +1,22 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*/
+
+import { spawnSync } from "child_process"
+
+const spec = (args: string[]): any => {
+  const start = spawnSync('ts-node', ['tools/src/exporter/export.ts'].concat(args))
+  return {
+    stdout: start.stdout?.toString(),
+    stderr: start.stderr?.toString()
+  }
+}
+
+test('--help', () => {
+  expect(spec(['--help']).stdout).toContain('Usage: export [options]')
+})
diff --git a/tools/tests/exporter/fixtures/postman_collection.json b/tools/tests/exporter/fixtures/postman_collection.json
new file mode 100644
index 000000000..31786a4e1
--- /dev/null
+++ b/tools/tests/exporter/fixtures/postman_collection.json
@@ -0,0 +1,42 @@
+{
+  "info": {
+    "name": "OpenSearch tests",
+    "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json"
+  },
+  "item": [
+    {
+      "name": "exporter",
+      "item": [
+        {
+          "name": "fixtures",
+          "item": [
+            {
+              "name": "valid_story.yaml",
+              "item": [
+                {
+                  "name": "/{index}",
+                  "request": {
+                    "method": "PUT",
+                    "header": [],
+                    "url": {
+                      "raw": "url/{index}",
+                      "host": "url",
+                      "path": [
+                        "{index}"
+                      ],
+                      "query": []
+                    },
+                    "body": {
+                      "mode": "raw",
+                      "raw": "{}"
+                    }
+                  }
+                }
+              ]
+            }
+          ]
+        }
+      ]
+    }
+  ]
+}
\ No newline at end of file
diff --git a/tools/tests/exporter/fixtures/valid_story.yaml b/tools/tests/exporter/fixtures/valid_story.yaml
new file mode 100644
index 000000000..5dabe4b65
--- /dev/null
+++ b/tools/tests/exporter/fixtures/valid_story.yaml
@@ -0,0 +1,13 @@
+$schema: ../../../../json_schemas/test_story.schema.yaml
+
+description: This story file is a valid story file.
+chapters:
+  - synopsis: A PUT method.
+    path: /{index}
+    method: PUT
+    parameters:
+      index: one
+epilogues:
+  - path: /one
+    method: DELETE
+    status: [200, 404]
\ No newline at end of file