diff --git a/.github/workflows/check-branch.yml b/.github/workflows/check-branch.yml index e79864e0..b4907721 100644 --- a/.github/workflows/check-branch.yml +++ b/.github/workflows/check-branch.yml @@ -8,13 +8,13 @@ jobs: runs-on: ubuntu-latest steps: - name: Comment PR - if: github.base_ref == 'master' && github.head_ref != 'staging' + if: github.base_ref == 'master' && github.head_ref != 'developement' uses: thollander/actions-comment-pull-request@v2 with: message: | - We regret to inform you that you are currently not able to merge your changes into the master branch due to restrictions applied by our SRE team. To proceed with merging your changes, we kindly request that you create a pull request from the next branch. Our team will then review the changes and work with you to ensure a successful merge into the master branch. + We regret to inform you that you are currently not able to merge your changes into the master branch due to restrictions applied by our SRE team. To proceed with merging your changes, we kindly request that you create a pull request from the development branch. Our team will then review the changes and work with you to ensure a successful merge into the master branch. - name: Check branch - if: github.base_ref == 'master' && github.head_ref != 'staging' + if: github.base_ref == 'master' && github.head_ref != 'development' run: | - echo "ERROR: We regret to inform you that you are currently not able to merge your changes into the master branch due to restrictions applied by our SRE team. To proceed with merging your changes, we kindly request that you create a pull request from the next branch. Our team will then review the changes and work with you to ensure a successful merge into the master branch." + echo "ERROR: We regret to inform you that you are currently not able to merge your changes into the master branch due to restrictions applied by our SRE team. To proceed with merging your changes, we kindly request that you create a pull request from the development branch. Our team will then review the changes and work with you to ensure a successful merge into the master branch." exit 1 \ No newline at end of file diff --git a/.github/workflows/check-version-bump.yml b/.github/workflows/check-version-bump.yml new file mode 100644 index 00000000..acce65c3 --- /dev/null +++ b/.github/workflows/check-version-bump.yml @@ -0,0 +1,58 @@ +# Ensures package.json and CHANGELOG.md are bumped compared to the latest tag when relevant files change. +name: Check Version Bump + +on: + pull_request: + paths: + - 'package.json' + - 'CHANGELOG.md' + +jobs: + version-bump: + name: Version & Changelog bump + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Setup Node + uses: actions/setup-node@v4 + with: + node-version: '22.x' + + - name: Check version bump + run: | + set -e + PKG_VERSION=$(node -p "require('./package.json').version.replace(/^v/, '')") + if [ -z "$PKG_VERSION" ]; then + echo "::error::Could not read version from package.json" + exit 1 + fi + git fetch --tags --force 2>/dev/null || true + LATEST_TAG=$(git describe --tags --abbrev=0 2>/dev/null || true) + if [ -z "$LATEST_TAG" ]; then + echo "No existing tags found. Skipping version-bump check (first release)." + exit 0 + fi + LATEST_VERSION="${LATEST_TAG#v}" + LATEST_VERSION="${LATEST_VERSION%%-*}" + if [ "$(printf '%s\n' "$LATEST_VERSION" "$PKG_VERSION" | sort -V | tail -1)" != "$PKG_VERSION" ]; then + echo "::error::Version bump required: package.json version ($PKG_VERSION) is not greater than latest tag ($LATEST_TAG). Please bump the version in package.json." + exit 1 + fi + if [ "$PKG_VERSION" = "$LATEST_VERSION" ]; then + echo "::error::Version bump required: package.json version ($PKG_VERSION) equals latest tag ($LATEST_TAG). Please bump the version in package.json." + exit 1 + fi + CHANGELOG_VERSION=$(sed -nE 's/^## \[v?([0-9]+\.[0-9]+\.[0-9]+).*/\1/p' CHANGELOG.md | head -1) + if [ -z "$CHANGELOG_VERSION" ]; then + echo "::error::Could not find a version entry in CHANGELOG.md (expected line like '## [v1.0.0](...)')." + exit 1 + fi + if [ "$CHANGELOG_VERSION" != "$PKG_VERSION" ]; then + echo "::error::CHANGELOG version mismatch: CHANGELOG.md top version ($CHANGELOG_VERSION) does not match package.json version ($PKG_VERSION). Please add or update the CHANGELOG entry for $PKG_VERSION." + exit 1 + fi + echo "Version bump check passed: package.json and CHANGELOG.md are at $PKG_VERSION (latest tag: $LATEST_TAG)." diff --git a/.github/workflows/unit-test.yml b/.github/workflows/unit-test.yml index 5cb5242a..47a0742f 100644 --- a/.github/workflows/unit-test.yml +++ b/.github/workflows/unit-test.yml @@ -4,7 +4,6 @@ on: branches: - master - main - - staging - development jobs: build-test: diff --git a/.gitignore b/.gitignore index 805b88dc..38feb759 100644 --- a/.gitignore +++ b/.gitignore @@ -71,3 +71,4 @@ jsdocs .early.coverage # Snyk Security Extension - AI Rules (auto-generated) .cursor/rules/snyk_rules.mdc +.vscode/settings.json \ No newline at end of file diff --git a/.husky/post-checkout b/.husky/post-checkout new file mode 100755 index 00000000..cb6b9f27 --- /dev/null +++ b/.husky/post-checkout @@ -0,0 +1,40 @@ +#!/usr/bin/env sh +# When switching to a branch that doesn't exist on remote (e.g. newly created), +# pull and merge origin/main or origin/master into current branch. Does not push. + +# Only run on branch checkout (not file checkout) +if [ "$3" != "1" ]; then + exit 0 +fi + +# Skip if we don't have a remote +if ! git rev-parse --verify origin 2>/dev/null; then + exit 0 +fi + +CURRENT_BRANCH=$(git rev-parse --abbrev-ref HEAD) + +# Skip main/master/development - no need to merge base into these +case "$CURRENT_BRANCH" in + main|master|development) exit 0 ;; +esac + +# Only run when current branch does not exist on origin (treat as new local branch) +if git ls-remote --heads origin "$CURRENT_BRANCH" 2>/dev/null | grep -q .; then + echo "post-checkout: $CURRENT_BRANCH exists on origin, skipping merge." + exit 0 +fi + +# Prefer main, fallback to master +if git rev-parse --verify origin/main 2>/dev/null; then + BASE=origin/main +elif git rev-parse --verify origin/master 2>/dev/null; then + BASE=origin/master +else + exit 0 +fi + +echo "New branch detected: merging latest $BASE into $CURRENT_BRANCH (local only, not pushing)..." +git fetch origin +git merge "$BASE" --no-edit --no-ff +echo "Done. Merge is local only; push when ready." diff --git a/CHANGELOG.md b/CHANGELOG.md index d1b5184d..4659fb7b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,9 @@ # Changelog +## [v1.28.0](https://github.com/contentstack/contentstack-management-javascript/tree/v1.27.5) (2026-03-02) +- Enh + - Added DAM 2.0 query support + ## [v1.27.6](https://github.com/contentstack/contentstack-management-javascript/tree/v1.27.5) (2026-02-23) - Fix - Skip token refresh on 401 when API returns error_code 161 (environment/permission) so the actual API error is returned instead of triggering refresh and a generic "Unable to refresh token" message diff --git a/README.md b/README.md index f844b6f3..32f0dcaf 100644 --- a/README.md +++ b/README.md @@ -71,6 +71,39 @@ contentstackClient.stack({ api_key: 'API_KEY', management_token: 'MANAGEMENT_TOK console.log(contenttype) }) ``` + +### Host and Region Configuration +You can configure the SDK to use a specific region or custom host for API requests. + +#### Region +The SDK supports multiple regions. Valid region values are: `NA`, `EU`, `AU`, `AZURE_NA`, `AZURE_EU`, `GCP_NA`, `GCP_EU`. The default region is `NA`. + +```javascript +// Use EU region +contentstackClient = contentstack.client({ + authtoken: 'AUTHTOKEN', + region: 'EU' +}) +``` + +#### Custom Host +You can specify a custom host for API requests. If both `host` and `region` are provided, the `host` parameter takes priority. + +```javascript +// Use custom host +contentstackClient = contentstack.client({ + authtoken: 'AUTHTOKEN', + host: 'api.contentstack.io' +}) + +// Custom host takes priority over region +contentstackClient = contentstack.client({ + authtoken: 'AUTHTOKEN', + region: 'EU', + host: 'custom-api.example.com' +}) +``` + ### Contentstack Management JavaScript SDK: 5-minute Quickstart #### Initializing Your SDK: To use the JavaScript CMA SDK, you need to first initialize it. To do this, use the following code: @@ -124,7 +157,7 @@ contentstackClient.stack({ api_key: 'API_KEY' }).asset().create({ asset }) - [Content Management API Docs](https://www.contentstack.com/docs/developers/apis/content-management-api) ### The MIT License (MIT) -Copyright © 2012-2025 [Contentstack](https://www.contentstack.com/). All Rights Reserved +Copyright © 2012-2026 [Contentstack](https://www.contentstack.com/). All Rights Reserved Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: diff --git a/lib/core/oauthHandler.js b/lib/core/oauthHandler.js index 6d64b586..2ae5e678 100644 --- a/lib/core/oauthHandler.js +++ b/lib/core/oauthHandler.js @@ -1,5 +1,6 @@ import errorFormatter from './contentstackError' import { ERROR_MESSAGES } from './errorMessages' +import { getStoredCodeVerifier, storeCodeVerifier, clearStoredCodeVerifier } from './pkceStorage' /** * @description OAuthHandler class to handle OAuth authorization and token management @@ -35,7 +36,13 @@ export default class OAuthHandler { // Only generate PKCE codeVerifier and codeChallenge if clientSecret is not provided if (!this.clientSecret) { - this.codeVerifier = this.generateCodeVerifier() + const stored = getStoredCodeVerifier(this.appId, this.clientId, this.redirectUri) + if (stored) { + this.codeVerifier = stored + } else { + this.codeVerifier = this.generateCodeVerifier() + storeCodeVerifier(this.appId, this.clientId, this.redirectUri, this.codeVerifier) + } this.codeChallenge = null } } @@ -139,8 +146,10 @@ export default class OAuthHandler { const response = await this.axiosInstance.post(`${this.developerHubBaseUrl}/token`, body) this._saveTokens(response.data) + clearStoredCodeVerifier(this.appId, this.clientId, this.redirectUri) // Clear immediately after successful exchange to prevent replay return response.data } catch (error) { + clearStoredCodeVerifier(this.appId, this.clientId, this.redirectUri) // Clear on error to prevent replay attacks errorFormatter(error) } } diff --git a/lib/core/pkceStorage.js b/lib/core/pkceStorage.js new file mode 100644 index 00000000..3f453406 --- /dev/null +++ b/lib/core/pkceStorage.js @@ -0,0 +1,68 @@ +/** + * PKCE code_verifier persistence in sessionStorage for browser SPAs. + * Survives OAuth redirects; not used in Node. RFC 7636 / OAuth 2.0 for Browser-Based Apps. + */ + +const PKCE_STORAGE_KEY_PREFIX = 'contentstack_oauth_pkce' +const PKCE_STORAGE_EXPIRY_MS = 10 * 60 * 1000 // 10 minutes + +function isBrowser () { + return typeof window !== 'undefined' && typeof window.sessionStorage !== 'undefined' +} + +function getStorageKey (appId, clientId, redirectUri) { + return `${PKCE_STORAGE_KEY_PREFIX}_${appId}_${clientId}_${redirectUri}` +} + +/** + * @param {string} appId + * @param {string} clientId + * @param {string} redirectUri + * @returns {string|null} code_verifier if valid and not expired, otherwise null + */ +export function getStoredCodeVerifier (appId, clientId, redirectUri) { + if (!isBrowser()) return null + try { + const raw = window.sessionStorage.getItem(getStorageKey(appId, clientId, redirectUri)) + if (!raw) return null + const { codeVerifier, expiresAt } = JSON.parse(raw) + if (!codeVerifier || !expiresAt || Date.now() > expiresAt) return null + return codeVerifier + } catch { + return null + } +} + +/** + * @param {string} appId + * @param {string} clientId + * @param {string} redirectUri + * @param {string} codeVerifier + */ +export function storeCodeVerifier (appId, clientId, redirectUri, codeVerifier) { + if (!isBrowser()) return + try { + const key = getStorageKey(appId, clientId, redirectUri) + const value = JSON.stringify({ + codeVerifier, + expiresAt: Date.now() + PKCE_STORAGE_EXPIRY_MS + }) + window.sessionStorage.setItem(key, value) + } catch { + // Ignore storage errors (e.g. private mode); fall back to memory-only + } +} + +/** + * @param {string} appId + * @param {string} clientId + * @param {string} redirectUri + */ +export function clearStoredCodeVerifier (appId, clientId, redirectUri) { + if (!isBrowser()) return + try { + window.sessionStorage.removeItem(getStorageKey(appId, clientId, redirectUri)) + } catch { + // Ignore + } +} diff --git a/lib/stack/bulkOperation/index.js b/lib/stack/bulkOperation/index.js index 27a6fbf6..6c0f1438 100644 --- a/lib/stack/bulkOperation/index.js +++ b/lib/stack/bulkOperation/index.js @@ -126,6 +126,49 @@ export function BulkOperation (http, data = {}) { } } + /** + * The getJobItems request allows you to get the items of a bulk job. + * Response structure varies based on query params: items (always), skip/limit/total_count (when include_count=true), and other fields per params. + * @memberof BulkOperation + * @func getJobItems + * @returns {Promise} Response Object. Structure varies with params - always includes items array; may include skip, limit, total_count when include_count=true. + * @param {String} job_id - The ID of the job. + * @param {Object} [params={}] - Query parameters. Supports: include_count, skip, limit, include_reference, status, type, ct (content type UID or array), api_version, and any other dynamic query params. + * @example + * client.stack({ api_key: 'api_key'}).bulkOperation().getJobItems('job_id') + * .then((response) => { console.log(response) }) + * @example + * client.stack({ api_key: 'api_key'}).bulkOperation().getJobItems('job_id', { skip: 0, limit: 50, include_count: true }) + * .then((response) => { console.log(response) }) + */ + // eslint-disable-next-line camelcase + this.getJobItems = async (job_id, params = {}) => { + // eslint-disable-next-line camelcase + const { api_version = '3.2', ...queryParams } = cloneDeep(params) + // eslint-disable-next-line camelcase + this.urlPath = `/bulk/jobs/${job_id}/items` + const headers = { + headers: { + ...cloneDeep(this.stackHeaders) + } + } + // eslint-disable-next-line camelcase + if (api_version) headers.headers.api_version = api_version + if (Object.keys(queryParams).length > 0) headers.params = queryParams + try { + const response = await http.get(this.urlPath, headers) + if (response.data) { + // eslint-disable-next-line camelcase + if (api_version) delete headers.headers.api_version + return response.data + } + } catch (error) { + // eslint-disable-next-line camelcase + if (api_version) delete headers.headers.api_version + console.error(error) + } + } + /** * The Publish entries and assets in bulk request allows you to publish multiple entries and assets at the same time. * @memberof BulkOperation diff --git a/package-lock.json b/package-lock.json index 3de2cf12..55497667 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@contentstack/management", - "version": "1.27.6", + "version": "1.28.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "@contentstack/management", - "version": "1.27.6", + "version": "1.28.0", "license": "MIT", "dependencies": { "@contentstack/utils": "^1.7.0", @@ -1930,9 +1930,9 @@ "license": "MIT" }, "node_modules/@contentstack/utils": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@contentstack/utils/-/utils-1.7.0.tgz", - "integrity": "sha512-wNWNt+wkoGJzCr5ZhAMKWJ5ND5xbD7N3t++Y6s1O+FB+AFzJszqCT740j6VqwjhQzw5sGfHoGjHIvlQA9dCcBw==", + "version": "1.7.1", + "resolved": "https://registry.npmjs.org/@contentstack/utils/-/utils-1.7.1.tgz", + "integrity": "sha512-b/0t1malpJeFCNd9+1uN3BuO8mRn2b5+aNtrYEZ6YlSNjYNRu9IjqSxZ5Clhs5267950UV1ayhgFE8z3qre2eQ==", "license": "MIT" }, "node_modules/@discoveryjs/json-ext": { @@ -2192,13 +2192,13 @@ } }, "node_modules/@isaacs/cliui/node_modules/strip-ansi": { - "version": "7.1.2", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.2.tgz", - "integrity": "sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA==", + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.2.0.tgz", + "integrity": "sha512-yDPMNjp4WyfYBkHnjIRLfca1i6KMyGCtsVgoKe/z1+6vukgaENdgGBZt+ZmKPc4gavvEZ5OgHfHdrazhgNyG7w==", "dev": true, "license": "MIT", "dependencies": { - "ansi-regex": "^6.0.1" + "ansi-regex": "^6.2.2" }, "engines": { "node": ">=12" @@ -2690,27 +2690,14 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/@jest/reporters/node_modules/balanced-match": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-4.0.4.tgz", - "integrity": "sha512-BLrgEcRTwX2o6gGxGOCNyMvGSp35YofuYzw9h1IMTRmKqttAZZVU67bdb9Pr2vUHA8+j3i2tJfjO6C6+4myGTA==", - "dev": true, - "license": "MIT", - "engines": { - "node": "18 || 20 || >=22" - } - }, "node_modules/@jest/reporters/node_modules/brace-expansion": { - "version": "5.0.3", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-5.0.3.tgz", - "integrity": "sha512-fy6KJm2RawA5RcHkLa1z/ScpBeA762UF9KmZQxwIbDtRJrgLzM10depAiEQ+CXYcoiqW1/m96OAAoke2nE9EeA==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", "dev": true, "license": "MIT", "dependencies": { - "balanced-match": "^4.0.2" - }, - "engines": { - "node": "18 || 20 || >=22" + "balanced-match": "^1.0.0" } }, "node_modules/@jest/reporters/node_modules/chalk": { @@ -2753,13 +2740,13 @@ } }, "node_modules/@jest/reporters/node_modules/minimatch": { - "version": "9.0.6", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.6.tgz", - "integrity": "sha512-kQAVowdR33euIqeA0+VZTDqU+qo1IeVY+hrKYtZMio3Pg0P0vuh/kwRylLUddJhB6pf3q/botcOvRtx4IN1wqQ==", + "version": "9.0.9", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.9.tgz", + "integrity": "sha512-OBwBN9AL4dqmETlpS2zasx+vTeWclWzkblfZk7KTA5j3jeOONz/tRCnZomUyvNg83wL5Zv9Ss6HMJXAgL8R2Yg==", "dev": true, "license": "ISC", "dependencies": { - "brace-expansion": "^5.0.2" + "brace-expansion": "^2.0.2" }, "engines": { "node": ">=16 || 14 >=14.17" @@ -3788,9 +3775,9 @@ "license": "MIT" }, "node_modules/@types/node": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/@types/node/-/node-25.3.0.tgz", - "integrity": "sha512-4K3bqJpXpqfg2XKGK9bpDTc6xO/xoUP/RBWS7AtRMug6zZFaRekiLzjVtAoZMquxoAbzBvy5nxQ7veS5eYzf8A==", + "version": "25.3.2", + "resolved": "https://registry.npmjs.org/@types/node/-/node-25.3.2.tgz", + "integrity": "sha512-RpV6r/ij22zRRdyBPcxDeKAzH43phWVKEjL2iksqo1Vz3CuBUrgmPpPhALKiRfU7OMCmeeO9vECBMsV0hMTG8Q==", "dev": true, "license": "MIT", "dependencies": { @@ -9433,27 +9420,14 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/jest-config/node_modules/balanced-match": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-4.0.4.tgz", - "integrity": "sha512-BLrgEcRTwX2o6gGxGOCNyMvGSp35YofuYzw9h1IMTRmKqttAZZVU67bdb9Pr2vUHA8+j3i2tJfjO6C6+4myGTA==", - "dev": true, - "license": "MIT", - "engines": { - "node": "18 || 20 || >=22" - } - }, "node_modules/jest-config/node_modules/brace-expansion": { - "version": "5.0.3", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-5.0.3.tgz", - "integrity": "sha512-fy6KJm2RawA5RcHkLa1z/ScpBeA762UF9KmZQxwIbDtRJrgLzM10depAiEQ+CXYcoiqW1/m96OAAoke2nE9EeA==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", "dev": true, "license": "MIT", "dependencies": { - "balanced-match": "^4.0.2" - }, - "engines": { - "node": "18 || 20 || >=22" + "balanced-match": "^1.0.0" } }, "node_modules/jest-config/node_modules/chalk": { @@ -9496,13 +9470,13 @@ } }, "node_modules/jest-config/node_modules/minimatch": { - "version": "9.0.6", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.6.tgz", - "integrity": "sha512-kQAVowdR33euIqeA0+VZTDqU+qo1IeVY+hrKYtZMio3Pg0P0vuh/kwRylLUddJhB6pf3q/botcOvRtx4IN1wqQ==", + "version": "9.0.9", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.9.tgz", + "integrity": "sha512-OBwBN9AL4dqmETlpS2zasx+vTeWclWzkblfZk7KTA5j3jeOONz/tRCnZomUyvNg83wL5Zv9Ss6HMJXAgL8R2Yg==", "dev": true, "license": "ISC", "dependencies": { - "brace-expansion": "^5.0.2" + "brace-expansion": "^2.0.2" }, "engines": { "node": ">=16 || 14 >=14.17" @@ -10143,27 +10117,14 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/jest-runtime/node_modules/balanced-match": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-4.0.4.tgz", - "integrity": "sha512-BLrgEcRTwX2o6gGxGOCNyMvGSp35YofuYzw9h1IMTRmKqttAZZVU67bdb9Pr2vUHA8+j3i2tJfjO6C6+4myGTA==", - "dev": true, - "license": "MIT", - "engines": { - "node": "18 || 20 || >=22" - } - }, "node_modules/jest-runtime/node_modules/brace-expansion": { - "version": "5.0.3", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-5.0.3.tgz", - "integrity": "sha512-fy6KJm2RawA5RcHkLa1z/ScpBeA762UF9KmZQxwIbDtRJrgLzM10depAiEQ+CXYcoiqW1/m96OAAoke2nE9EeA==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", "dev": true, "license": "MIT", "dependencies": { - "balanced-match": "^4.0.2" - }, - "engines": { - "node": "18 || 20 || >=22" + "balanced-match": "^1.0.0" } }, "node_modules/jest-runtime/node_modules/chalk": { @@ -10206,13 +10167,13 @@ } }, "node_modules/jest-runtime/node_modules/minimatch": { - "version": "9.0.6", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.6.tgz", - "integrity": "sha512-kQAVowdR33euIqeA0+VZTDqU+qo1IeVY+hrKYtZMio3Pg0P0vuh/kwRylLUddJhB6pf3q/botcOvRtx4IN1wqQ==", + "version": "9.0.9", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.9.tgz", + "integrity": "sha512-OBwBN9AL4dqmETlpS2zasx+vTeWclWzkblfZk7KTA5j3jeOONz/tRCnZomUyvNg83wL5Zv9Ss6HMJXAgL8R2Yg==", "dev": true, "license": "ISC", "dependencies": { - "brace-expansion": "^5.0.2" + "brace-expansion": "^2.0.2" }, "engines": { "node": ">=16 || 14 >=14.17" @@ -11264,9 +11225,9 @@ } }, "node_modules/minimatch": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.3.tgz", - "integrity": "sha512-M2GCs7Vk83NxkUyQV1bkABc4yxgz9kILhHImZiBPAZ9ybuvCb0/H7lEl5XvIg3g+9d4eNotkZA5IWwYl0tibaA==", + "version": "3.1.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.5.tgz", + "integrity": "sha512-VgjWUsnnT6n+NUk6eZq77zeFdpW2LWDzP6zFGrCbHXiYNul5Dzqk2HHQ5uFH2DNW5Xbp8+jVzaeNt94ssEEl4w==", "dev": true, "license": "ISC", "dependencies": { @@ -11355,27 +11316,14 @@ "node": ">= 0.6.0" } }, - "node_modules/mocha/node_modules/balanced-match": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-4.0.4.tgz", - "integrity": "sha512-BLrgEcRTwX2o6gGxGOCNyMvGSp35YofuYzw9h1IMTRmKqttAZZVU67bdb9Pr2vUHA8+j3i2tJfjO6C6+4myGTA==", - "dev": true, - "license": "MIT", - "engines": { - "node": "18 || 20 || >=22" - } - }, "node_modules/mocha/node_modules/brace-expansion": { - "version": "5.0.3", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-5.0.3.tgz", - "integrity": "sha512-fy6KJm2RawA5RcHkLa1z/ScpBeA762UF9KmZQxwIbDtRJrgLzM10depAiEQ+CXYcoiqW1/m96OAAoke2nE9EeA==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", "dev": true, "license": "MIT", "dependencies": { - "balanced-match": "^4.0.2" - }, - "engines": { - "node": "18 || 20 || >=22" + "balanced-match": "^1.0.0" } }, "node_modules/mocha/node_modules/chokidar": { @@ -11430,13 +11378,13 @@ } }, "node_modules/mocha/node_modules/minimatch": { - "version": "9.0.6", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.6.tgz", - "integrity": "sha512-kQAVowdR33euIqeA0+VZTDqU+qo1IeVY+hrKYtZMio3Pg0P0vuh/kwRylLUddJhB6pf3q/botcOvRtx4IN1wqQ==", + "version": "9.0.9", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.9.tgz", + "integrity": "sha512-OBwBN9AL4dqmETlpS2zasx+vTeWclWzkblfZk7KTA5j3jeOONz/tRCnZomUyvNg83wL5Zv9Ss6HMJXAgL8R2Yg==", "dev": true, "license": "ISC", "dependencies": { - "brace-expansion": "^5.0.2" + "brace-expansion": "^2.0.2" }, "engines": { "node": ">=16 || 14 >=14.17" @@ -13350,9 +13298,9 @@ } }, "node_modules/rimraf/node_modules/minimatch": { - "version": "10.2.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.2.2.tgz", - "integrity": "sha512-+G4CpNBxa5MprY+04MbgOw1v7So6n5JY166pFi9KfYwT78fxScCeSNQSNzp6dpPSW2rONOps6Ocam1wFhCgoVw==", + "version": "10.2.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.2.4.tgz", + "integrity": "sha512-oRjTw/97aTBN0RHbYCdtF1MQfvusSIBQM0IZEgzl6426+8jSC0nF1a/GmnVLpfB9yyr6g6FTqWqiZVbxrtaCIg==", "dev": true, "license": "BlueOak-1.0.0", "dependencies": { @@ -15095,9 +15043,9 @@ } }, "node_modules/webpack": { - "version": "5.105.2", - "resolved": "https://registry.npmjs.org/webpack/-/webpack-5.105.2.tgz", - "integrity": "sha512-dRXm0a2qcHPUBEzVk8uph0xWSjV/xZxenQQbLwnwP7caQCYpqG1qddwlyEkIDkYn0K8tvmcrZ+bOrzoQ3HxCDw==", + "version": "5.105.3", + "resolved": "https://registry.npmjs.org/webpack/-/webpack-5.105.3.tgz", + "integrity": "sha512-LLBBA4oLmT7sZdHiYE/PeVuifOxYyE2uL/V+9VQP7YSYdJU7bSf7H8bZRRxW8kEPMkmVjnrXmoR3oejIdX0xbg==", "dev": true, "license": "MIT", "dependencies": { @@ -15107,7 +15055,7 @@ "@webassemblyjs/ast": "^1.14.1", "@webassemblyjs/wasm-edit": "^1.14.1", "@webassemblyjs/wasm-parser": "^1.14.1", - "acorn": "^8.15.0", + "acorn": "^8.16.0", "acorn-import-phases": "^1.0.3", "browserslist": "^4.28.1", "chrome-trace-event": "^1.0.2", @@ -15125,7 +15073,7 @@ "tapable": "^2.3.0", "terser-webpack-plugin": "^5.3.16", "watchpack": "^2.5.1", - "webpack-sources": "^3.3.3" + "webpack-sources": "^3.3.4" }, "bin": { "webpack": "bin/webpack.js" diff --git a/package.json b/package.json index b6da0b8f..caa318e5 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@contentstack/management", - "version": "1.27.6", + "version": "1.28.0", "description": "The Content Management API is used to manage the content of your Contentstack account", "main": "./dist/node/contentstack-management.js", "browser": "./dist/web/contentstack-management.js", diff --git a/test/sanity-check/api/bulkOperation-test.js b/test/sanity-check/api/bulkOperation-test.js index 4e1ccc02..637d9ba0 100644 --- a/test/sanity-check/api/bulkOperation-test.js +++ b/test/sanity-check/api/bulkOperation-test.js @@ -536,6 +536,26 @@ describe('BulkOperation api test', () => { expect(response.body).to.not.equal(undefined) }) + it('should get job items for a completed job', async () => { + await waitForJobReady(jobId1) + + const response = await doBulkOperationWithManagementToken(tokenUidDev) + .getJobItems(jobId1) + + expect(response).to.not.equal(undefined) + expect(response.items).to.be.an('array') + }) + + it('should get job items with explicit api_version', async () => { + await waitForJobReady(jobId2) + + const response = await doBulkOperationWithManagementToken(tokenUidDev) + .getJobItems(jobId2, { api_version: '3.2' }) + + expect(response).to.not.equal(undefined) + expect(response.items).to.be.an('array') + }) + it('should delete a Management Token', done => { makeManagementToken(tokenUid) .delete() diff --git a/test/sanity-check/api/entry-test.js b/test/sanity-check/api/entry-test.js index ca3428eb..974cf84a 100644 --- a/test/sanity-check/api/entry-test.js +++ b/test/sanity-check/api/entry-test.js @@ -42,6 +42,26 @@ describe('Entry api Test', () => { .catch(done) }) + it('should entry fetch with asset_fields parameter - single value', done => { + makeEntry(singlepageCT.content_type.uid, entryUTD) + .fetch({ asset_fields: ['user_defined_fields'] }) + .then((entryResponse) => { + expect(entryResponse.uid).to.be.not.equal(null) + done() + }) + .catch(done) + }) + + it('should entry fetch with asset_fields parameter - multiple values', done => { + makeEntry(singlepageCT.content_type.uid, entryUTD) + .fetch({ asset_fields: ['user_defined_fields', 'embedded', 'ai_suggested', 'visual_markups'] }) + .then((entryResponse) => { + expect(entryResponse.uid).to.be.not.equal(null) + done() + }) + .catch(done) + }) + it('should localize entry with title update', done => { makeEntry(singlepageCT.content_type.uid, entryUTD) .fetch() @@ -127,6 +147,52 @@ describe('Entry api Test', () => { .catch(done) }) + it('should get all Entry with asset_fields parameter - single value', done => { + makeEntry(multiPageCT.content_type.uid) + .query({ include_count: true, asset_fields: ['user_defined_fields'] }).find() + .then((collection) => { + expect(collection.count).to.be.equal(3) + collection.items.forEach((entry) => { + expect(entry.uid).to.be.not.equal(null) + expect(entry.content_type_uid).to.be.equal(multiPageCT.content_type.uid) + }) + done() + }) + .catch(done) + }) + + it('should get all Entry with asset_fields parameter - multiple values', done => { + makeEntry(multiPageCT.content_type.uid) + .query({ include_count: true, asset_fields: ['user_defined_fields', 'embedded', 'ai_suggested', 'visual_markups'] }).find() + .then((collection) => { + expect(collection.count).to.be.equal(3) + collection.items.forEach((entry) => { + expect(entry.uid).to.be.not.equal(null) + expect(entry.content_type_uid).to.be.equal(multiPageCT.content_type.uid) + }) + done() + }) + .catch(done) + }) + + it('should get all Entry with asset_fields parameter combined with other query params', done => { + makeEntry(multiPageCT.content_type.uid) + .query({ + include_count: true, + include_content_type: true, + asset_fields: ['user_defined_fields', 'embedded'] + }).find() + .then((collection) => { + expect(collection.count).to.be.equal(3) + collection.items.forEach((entry) => { + expect(entry.uid).to.be.not.equal(null) + expect(entry.content_type_uid).to.be.equal(multiPageCT.content_type.uid) + }) + done() + }) + .catch(done) + }) + it('should publish Entry', done => { makeEntry(singlepageCT.content_type.uid, entryUTD) .publish({ diff --git a/test/unit/bulkOperation-test.js b/test/unit/bulkOperation-test.js index 07e1d7fb..6d067c59 100644 --- a/test/unit/bulkOperation-test.js +++ b/test/unit/bulkOperation-test.js @@ -11,6 +11,7 @@ describe('Contentstack BulkOperation test', () => { expect(bulkOperation.urlPath).to.be.equal('/bulk') expect(bulkOperation.stackHeaders).to.be.equal(undefined) expect(bulkOperation.addItems).to.not.equal(undefined) + expect(bulkOperation.getJobItems).to.not.equal(undefined) expect(bulkOperation.publish).to.not.equal(undefined) expect(bulkOperation.unpublish).to.not.equal(undefined) expect(bulkOperation.delete).to.not.equal(undefined) @@ -23,6 +24,7 @@ describe('Contentstack BulkOperation test', () => { expect(bulkOperation.stackHeaders).to.not.equal(undefined) expect(bulkOperation.stackHeaders.api_key).to.be.equal(stackHeadersMock.api_key) expect(bulkOperation.addItems).to.not.equal(undefined) + expect(bulkOperation.getJobItems).to.not.equal(undefined) expect(bulkOperation.publish).to.not.equal(undefined) expect(bulkOperation.unpublish).to.not.equal(undefined) expect(bulkOperation.delete).to.not.equal(undefined) @@ -218,6 +220,142 @@ describe('Contentstack BulkOperation test', () => { expect(response.notice).to.equal('Your job status request is successful.') expect(response.status).to.equal('completed') }) + + it('should fetch job items with default api_version', async () => { + const jobId = 'job_id' + + const mock = new MockAdapter(Axios) + mock.onGet(`/bulk/jobs/${jobId}/items`).reply((config) => { + expect(config.headers.api_version).to.equal('3.2') + return [200, { + items: [ + { uid: 'entry_uid', content_type_uid: 'content_type_uid', status: 'completed' } + ] + }] + }) + + const response = await makeBulkOperation().getJobItems(jobId) + expect(response.items).to.not.equal(undefined) + expect(response.items).to.be.an('array') + expect(response.items[0].uid).to.equal('entry_uid') + expect(response.items[0].content_type_uid).to.equal('content_type_uid') + }) + + it('should fetch job items with custom api_version', async () => { + const jobId = 'job_id' + const params = { api_version: '3.0' } + + const mock = new MockAdapter(Axios) + mock.onGet(`/bulk/jobs/${jobId}/items`).reply((config) => { + expect(config.headers.api_version).to.equal('3.0') + return [200, { items: [] }] + }) + + const response = await makeBulkOperation().getJobItems(jobId, params) + expect(response.items).to.not.equal(undefined) + expect(response.items).to.be.an('array') + }) + + it('should fetch job items with query params: include_count, skip, limit', async () => { + const jobId = 'job_id' + const params = { + include_count: true, + skip: 10, + limit: 50 + } + + const mock = new MockAdapter(Axios) + mock.onGet(`/bulk/jobs/${jobId}/items`).reply((config) => { + expect(config.params.include_count).to.equal(true) + expect(config.params.skip).to.equal(10) + expect(config.params.limit).to.equal(50) + return [200, { items: [], count: 0 }] + }) + + const response = await makeBulkOperation().getJobItems(jobId, params) + expect(response.items).to.be.an('array') + expect(response.count).to.equal(0) + }) + + it('should fetch job items with query params: include_reference, status, type', async () => { + const jobId = 'job_id' + const params = { + include_reference: false, + status: 'failed', + type: 'entry' + } + + const mock = new MockAdapter(Axios) + mock.onGet(`/bulk/jobs/${jobId}/items`).reply((config) => { + expect(config.params.include_reference).to.equal(false) + expect(config.params.status).to.equal('failed') + expect(config.params.type).to.equal('entry') + return [200, { items: [] }] + }) + + const response = await makeBulkOperation().getJobItems(jobId, params) + expect(response.items).to.be.an('array') + }) + + it('should fetch job items with ct (content type) filter as array', async () => { + const jobId = 'job_id' + const params = { ct: ['content_type_uid_1', 'content_type_uid_2'] } + + const mock = new MockAdapter(Axios) + mock.onGet(`/bulk/jobs/${jobId}/items`).reply((config) => { + expect(config.params.ct).to.be.an('array') + expect(config.params.ct).to.deep.equal(['content_type_uid_1', 'content_type_uid_2']) + return [200, { items: [] }] + }) + + const response = await makeBulkOperation().getJobItems(jobId, params) + expect(response.items).to.be.an('array') + }) + + it('should fetch job items with dynamic query params', async () => { + const jobId = 'job_id' + const params = { + include_count: true, + skip: 0, + limit: 100, + include_reference: true, + status: 'success', + type: 'asset', + ct: ['blog_post', 'author'], + custom_param: 'custom_value' + } + + const mock = new MockAdapter(Axios) + mock.onGet(`/bulk/jobs/${jobId}/items`).reply((config) => { + expect(config.params.include_count).to.equal(true) + expect(config.params.skip).to.equal(0) + expect(config.params.limit).to.equal(100) + expect(config.params.include_reference).to.equal(true) + expect(config.params.status).to.equal('success') + expect(config.params.type).to.equal('asset') + expect(config.params.ct).to.deep.equal(['blog_post', 'author']) + expect(config.params.custom_param).to.equal('custom_value') + return [200, { items: [], count: 0 }] + }) + + const response = await makeBulkOperation().getJobItems(jobId, params) + expect(response.items).to.be.an('array') + expect(response.count).to.equal(0) + }) + + it('should fetch job items with empty params object', async () => { + const jobId = 'job_id' + + const mock = new MockAdapter(Axios) + mock.onGet(`/bulk/jobs/${jobId}/items`).reply((config) => { + expect(config.headers.api_version).to.equal('3.2') + expect(config.params).to.equal(undefined) + return [200, { items: [] }] + }) + + const response = await makeBulkOperation().getJobItems(jobId, {}) + expect(response.items).to.be.an('array') + }) }) function makeBulkOperation (data) { diff --git a/test/unit/entry-test.js b/test/unit/entry-test.js index afcebe21..bbe35536 100644 --- a/test/unit/entry-test.js +++ b/test/unit/entry-test.js @@ -110,6 +110,97 @@ describe('Contentstack Entry test', () => { .catch(done) }) + it('Entry Query test with asset_fields parameter - single value', done => { + var mock = new MockAdapter(Axios) + mock.onGet('/content_types/content_type_uid/entries', (config) => { + // Check if asset_fields parameter is present in the request + const assetFields = config.params && (config.params['asset_fields[]'] || config.params.asset_fields) + if (Array.isArray(assetFields)) { + return assetFields.includes('user_defined_fields') + } + // Also check URL if params are serialized + if (config.url && config.url.includes('asset_fields')) { + return config.url.includes('user_defined_fields') + } + return false + }).reply(200, { + entries: [ + entryMock + ] + }) + makeEntry() + .query({ asset_fields: ['user_defined_fields'] }) + .find() + .then((entry) => { + checkEntry(entry.items[0]) + done() + }) + .catch(done) + }) + + it('Entry Query test with asset_fields parameter - multiple values', done => { + var mock = new MockAdapter(Axios) + mock.onGet('/content_types/content_type_uid/entries', (config) => { + // Check if asset_fields parameter is present in the request + const assetFields = config.params && (config.params['asset_fields[]'] || config.params.asset_fields) + if (Array.isArray(assetFields)) { + return assetFields.includes('user_defined_fields') && + assetFields.includes('embedded') && + assetFields.includes('ai_suggested') && + assetFields.includes('visual_markups') + } + // Also check URL if params are serialized + if (config.url && config.url.includes('asset_fields')) { + return config.url.includes('user_defined_fields') && + config.url.includes('embedded') && + config.url.includes('ai_suggested') && + config.url.includes('visual_markups') + } + return false + }).reply(200, { + entries: [ + entryMock + ] + }) + makeEntry() + .query({ asset_fields: ['user_defined_fields', 'embedded', 'ai_suggested', 'visual_markups'] }) + .find() + .then((entry) => { + checkEntry(entry.items[0]) + done() + }) + .catch(done) + }) + + it('Entry Query test with asset_fields parameter combined with other query params', done => { + var mock = new MockAdapter(Axios) + mock.onGet('/content_types/content_type_uid/entries', (config) => { + // Check if asset_fields parameter is present in the request + const assetFields = config.params && (config.params['asset_fields[]'] || config.params.asset_fields) + const hasAssetFields = Array.isArray(assetFields) + ? (assetFields.includes('user_defined_fields') && assetFields.includes('embedded')) + : (config.url && config.url.includes('asset_fields') && config.url.includes('user_defined_fields') && config.url.includes('embedded')) + return hasAssetFields && config.params && config.params.include_count === true + }).reply(200, { + entries: [ + entryMock + ], + count: 1 + }) + makeEntry() + .query({ + asset_fields: ['user_defined_fields', 'embedded'], + include_count: true + }) + .find() + .then((entry) => { + checkEntry(entry.items[0]) + expect(entry.count).to.be.equal(1) + done() + }) + .catch(done) + }) + it('Entry update test', done => { var mock = new MockAdapter(Axios) mock.onPut('/content_types/content_type_uid/entries/UID').reply(200, { @@ -152,6 +243,76 @@ describe('Contentstack Entry test', () => { .catch(done) }) + it('Entry fetch test with asset_fields parameter - single value', done => { + var mock = new MockAdapter(Axios) + mock.onGet('/content_types/content_type_uid/entries/UID', (config) => { + // Check if asset_fields parameter is present in the request + const assetFields = config.params && (config.params['asset_fields[]'] || config.params.asset_fields) + if (Array.isArray(assetFields)) { + return assetFields.includes('user_defined_fields') + } + // Also check URL if params are serialized + if (config.url && config.url.includes('asset_fields')) { + return config.url.includes('user_defined_fields') + } + return false + }).reply(200, { + entry: { + ...entryMock + } + }) + makeEntry({ + entry: { + ...systemUidMock + }, + stackHeaders: stackHeadersMock + }) + .fetch({ asset_fields: ['user_defined_fields'] }) + .then((entry) => { + checkEntry(entry) + done() + }) + .catch(done) + }) + + it('Entry fetch test with asset_fields parameter - multiple values', done => { + var mock = new MockAdapter(Axios) + mock.onGet('/content_types/content_type_uid/entries/UID', (config) => { + // Check if asset_fields parameter is present in the request + const assetFields = config.params && (config.params['asset_fields[]'] || config.params.asset_fields) + if (Array.isArray(assetFields)) { + return assetFields.includes('user_defined_fields') && + assetFields.includes('embedded') && + assetFields.includes('ai_suggested') && + assetFields.includes('visual_markups') + } + // Also check URL if params are serialized + if (config.url && config.url.includes('asset_fields')) { + return config.url.includes('user_defined_fields') && + config.url.includes('embedded') && + config.url.includes('ai_suggested') && + config.url.includes('visual_markups') + } + return false + }).reply(200, { + entry: { + ...entryMock + } + }) + makeEntry({ + entry: { + ...systemUidMock + }, + stackHeaders: stackHeadersMock + }) + .fetch({ asset_fields: ['user_defined_fields', 'embedded', 'ai_suggested', 'visual_markups'] }) + .then((entry) => { + checkEntry(entry) + done() + }) + .catch(done) + }) + it('Entry delete test', done => { var mock = new MockAdapter(Axios) mock.onDelete('/content_types/content_type_uid/entries/UID').reply(200, { diff --git a/test/unit/index.js b/test/unit/index.js index 0e40df62..90c965fc 100644 --- a/test/unit/index.js +++ b/test/unit/index.js @@ -39,3 +39,4 @@ require('./ungroupedVariants-test') require('./variantsWithVariantsGroup-test') require('./variants-entry-test') require('./oauthHandler-test') +require('./pkceStorage-test') diff --git a/test/unit/oauthHandler-test.js b/test/unit/oauthHandler-test.js index e1e774ef..275cffdb 100644 --- a/test/unit/oauthHandler-test.js +++ b/test/unit/oauthHandler-test.js @@ -329,4 +329,144 @@ describe('OAuthHandler', () => { expect(deleteStub.called).to.be.false }) }) + + describe('PKCE sessionStorage (browser)', () => { + let sessionStorageStub + + beforeEach(() => { + sessionStorageStub = { + getItem: sandbox.stub(), + setItem: sandbox.stub(), + removeItem: sandbox.stub() + } + global.window = { sessionStorage: sessionStorageStub } + }) + + afterEach(() => { + delete global.window + }) + + it('should store code_verifier in sessionStorage when generated (browser)', () => { + sessionStorageStub.getItem.returns(null) + + const handler = new OAuthHandler( + axiosInstance, + 'appId', + 'clientId', + 'http://localhost:8184', + null + ) + + expect(handler.codeVerifier).to.be.a('string') + expect(handler.codeVerifier).to.have.lengthOf(128) + expect(sessionStorageStub.setItem.calledOnce).to.equal(true) + const [key, valueStr] = sessionStorageStub.setItem.firstCall.args + expect(key).to.include('contentstack_oauth_pkce') + expect(key).to.include('appId') + expect(key).to.include('clientId') + const value = JSON.parse(valueStr) + expect(value).to.have.property('codeVerifier', handler.codeVerifier) + expect(value).to.have.property('expiresAt') + expect(value.expiresAt).to.be.greaterThan(Date.now()) + }) + + it('should retrieve code_verifier from sessionStorage in constructor when valid', () => { + const storedVerifier = 'stored_code_verifier_xyz' + const storedValue = JSON.stringify({ + codeVerifier: storedVerifier, + expiresAt: Date.now() + 600000 + }) + sessionStorageStub.getItem.returns(storedValue) + + const handler = new OAuthHandler( + axiosInstance, + 'appId', + 'clientId', + 'http://localhost:8184', + null + ) + + expect(handler.codeVerifier).to.equal(storedVerifier) + expect(sessionStorageStub.setItem.called).to.equal(false) + }) + + it('should not use expired sessionStorage entry and should generate new code_verifier', () => { + const expiredValue = JSON.stringify({ + codeVerifier: 'expired_verifier', + expiresAt: Date.now() - 1000 + }) + sessionStorageStub.getItem.returns(expiredValue) + + const handler = new OAuthHandler( + axiosInstance, + 'appId', + 'clientId', + 'http://localhost:8184', + null + ) + + expect(handler.codeVerifier).to.not.equal('expired_verifier') + expect(handler.codeVerifier).to.have.lengthOf(128) + expect(sessionStorageStub.setItem.calledOnce).to.equal(true) + }) + + it('should clear sessionStorage after successful token exchange', async () => { + sessionStorageStub.getItem.returns(null) + const handler = new OAuthHandler( + axiosInstance, + 'appId', + 'clientId', + 'http://localhost:8184', + null + ) + const tokenData = { access_token: 'accessToken', refresh_token: 'refreshToken', expires_in: 3600 } + sandbox.stub(axiosInstance, 'post').resolves({ data: tokenData }) + + await handler.exchangeCodeForToken('authorization_code') + + expect(sessionStorageStub.removeItem.calledOnce).to.equal(true) + expect(sessionStorageStub.removeItem.firstCall.args[0]).to.include('contentstack_oauth_pkce') + }) + + it('should clear sessionStorage on token exchange error to prevent replay attacks', async () => { + sessionStorageStub.getItem.returns(null) + const handler = new OAuthHandler( + axiosInstance, + 'appId', + 'clientId', + 'http://localhost:8184', + null + ) + sandbox.stub(axiosInstance, 'post').rejects(new Error('invalid_code_verifier')) + + try { + await handler.exchangeCodeForToken('authorization_code') + } catch { + // errorFormatter rethrows; we only care that removeItem was called + } + expect(sessionStorageStub.removeItem.calledOnce).to.equal(true) + }) + }) + + describe('PKCE memory-only (Node / no sessionStorage)', () => { + it('should use memory-only code_verifier when window is not defined', () => { + const originalWindow = global.window + delete global.window + + const handler = new OAuthHandler( + axiosInstance, + 'appId', + 'clientId', + 'http://localhost:8184', + null + ) + + expect(handler.codeVerifier).to.be.a('string') + expect(handler.codeVerifier).to.have.lengthOf(128) + + if (originalWindow !== undefined) { + global.window = originalWindow + } + }) + }) }) diff --git a/test/unit/pkceStorage-test.js b/test/unit/pkceStorage-test.js new file mode 100644 index 00000000..5e12501d --- /dev/null +++ b/test/unit/pkceStorage-test.js @@ -0,0 +1,119 @@ +import { expect } from 'chai' +import sinon from 'sinon' +import { + getStoredCodeVerifier, + storeCodeVerifier, + clearStoredCodeVerifier +} from '../../lib/core/pkceStorage' +import { describe, it, beforeEach, afterEach } from 'mocha' + +describe('pkceStorage', () => { + let sessionStorageStub + + beforeEach(() => { + sessionStorageStub = { + getItem: sinon.stub(), + setItem: sinon.stub(), + removeItem: sinon.stub() + } + global.window = { sessionStorage: sessionStorageStub } + }) + + afterEach(() => { + delete global.window + }) + + describe('getStoredCodeVerifier', () => { + it('returns null when not in browser', () => { + delete global.window + expect(getStoredCodeVerifier('appId', 'clientId', 'http://localhost:8184')).to.equal(null) + }) + + it('returns null when nothing stored', () => { + sessionStorageStub.getItem.returns(null) + expect(getStoredCodeVerifier('appId', 'clientId', 'http://localhost:8184')).to.equal(null) + }) + + it('returns code_verifier when valid and not expired', () => { + const stored = JSON.stringify({ + codeVerifier: 'stored_verifier_xyz', + expiresAt: Date.now() + 600000 + }) + sessionStorageStub.getItem.returns(stored) + expect(getStoredCodeVerifier('appId', 'clientId', 'http://localhost:8184')).to.equal('stored_verifier_xyz') + }) + + it('returns null when stored entry is expired', () => { + const stored = JSON.stringify({ + codeVerifier: 'expired_verifier', + expiresAt: Date.now() - 1000 + }) + sessionStorageStub.getItem.returns(stored) + expect(getStoredCodeVerifier('appId', 'clientId', 'http://localhost:8184')).to.equal(null) + }) + + it('returns null when storage throws', () => { + sessionStorageStub.getItem.throws(new Error('QuotaExceeded')) + expect(getStoredCodeVerifier('appId', 'clientId', 'http://localhost:8184')).to.equal(null) + }) + + it('uses key containing appId, clientId, redirectUri', () => { + sessionStorageStub.getItem.returns(null) + getStoredCodeVerifier('myApp', 'myClient', 'https://app.example/cb') + expect(sessionStorageStub.getItem.calledOnce).to.equal(true) + const key = sessionStorageStub.getItem.firstCall.args[0] + expect(key).to.include('contentstack_oauth_pkce') + expect(key).to.include('myApp') + expect(key).to.include('myClient') + expect(key).to.include('https://app.example/cb') + }) + }) + + describe('storeCodeVerifier', () => { + it('does nothing when not in browser', () => { + delete global.window + storeCodeVerifier('appId', 'clientId', 'http://localhost:8184', 'verifier123') + expect(sessionStorageStub.setItem.called).to.equal(false) + }) + + it('stores codeVerifier and expiresAt in sessionStorage', () => { + const before = Date.now() + storeCodeVerifier('appId', 'clientId', 'http://localhost:8184', 'verifier123') + const after = Date.now() + expect(sessionStorageStub.setItem.calledOnce).to.equal(true) + const [key, valueStr] = sessionStorageStub.setItem.firstCall.args + expect(key).to.include('contentstack_oauth_pkce') + const value = JSON.parse(valueStr) + expect(value.codeVerifier).to.equal('verifier123') + expect(value.expiresAt).to.be.at.least(before + 9 * 60 * 1000) + expect(value.expiresAt).to.be.at.most(after + 10 * 60 * 1000 + 100) + }) + + it('does not throw when sessionStorage.setItem throws', () => { + sessionStorageStub.setItem.throws(new Error('QuotaExceeded')) + expect(() => storeCodeVerifier('appId', 'clientId', 'http://localhost:8184', 'v')).to.not.throw() + }) + }) + + describe('clearStoredCodeVerifier', () => { + it('does nothing when not in browser', () => { + delete global.window + clearStoredCodeVerifier('appId', 'clientId', 'http://localhost:8184') + expect(sessionStorageStub.removeItem.called).to.equal(false) + }) + + it('calls sessionStorage.removeItem with correct key', () => { + clearStoredCodeVerifier('appId', 'clientId', 'http://localhost:8184') + expect(sessionStorageStub.removeItem.calledOnce).to.equal(true) + const key = sessionStorageStub.removeItem.firstCall.args[0] + expect(key).to.include('contentstack_oauth_pkce') + expect(key).to.include('appId') + expect(key).to.include('clientId') + }) + + it('does not throw when sessionStorage.removeItem throws', () => { + sessionStorageStub.removeItem.throws(new Error('SecurityError')) + expect(() => clearStoredCodeVerifier('appId', 'clientId', 'http://localhost:8184')).to.not.throw() + }) + }) +}) diff --git a/types/stack/bulkOperation/index.d.ts b/types/stack/bulkOperation/index.d.ts index ba5c3731..c4c31c87 100644 --- a/types/stack/bulkOperation/index.d.ts +++ b/types/stack/bulkOperation/index.d.ts @@ -9,6 +9,7 @@ export interface BulkOperation extends SystemFields { addItems(config: BulkAddItemsConfig): Promise updateItems(config: BulkAddItemsConfig): Promise jobStatus(config: BulkJobStatus): Promise + getJobItems(job_id: string, params?: BulkJobItemsParams): Promise } export interface BulkOperationConfig { details: PublishItems @@ -61,4 +62,52 @@ export interface BulkJobStatus { job_id: AnyProperty; bulk_version?: string; api_version?: string; +} + +export interface BulkJobItemsParams { + api_version?: string; + include_count?: boolean; + skip?: number; + limit?: number; + include_reference?: boolean; + status?: string; + type?: string; + ct?: string | string[]; + [key: string]: unknown; +} + +export interface BulkJobItem { + uid: string; + locale: string; + version: number; + title: string; + type: "asset" | "entry"; + publish_details: { + status: string; + failure_reason?: string; + }; + publish_locale: string; + environment: string; + action: string; + published_at: string | null; + scheduled_at: string; + user: string; + depth: number; + content_type?: { uid: string }; +} + +/** + * Response structure varies based on query params passed to getJobItems: + * - items: Always present - array of job items + * - skip, limit, total_count: Present when include_count=true + * - Additional fields may be included based on other params (e.g. include_reference) + */ +export interface BulkJobItemsResponse { + items?: Array; + skip?: number; + limit?: number; + total_count?: number; + count?: number; + notice?: string; + [key: string]: unknown; } \ No newline at end of file