diff --git a/docs/options.md b/docs/options.md index ee4290a0..d1960925 100644 --- a/docs/options.md +++ b/docs/options.md @@ -27,6 +27,7 @@ $RefParser.dereference("my-schema.yaml", { withCredentials: true, // Include auth credentials when resolving HTTP references } }, + failFast: true, // Abort upon first exception dereference: { circular: false // Don't allow circular $refs } diff --git a/lib/bundle.js b/lib/bundle.js index 1fa423e2..f41563b2 100644 --- a/lib/bundle.js +++ b/lib/bundle.js @@ -96,7 +96,11 @@ function crawl (parent, key, path, pathFromRoot, indirections, inventory, $refs, function inventory$Ref ($refParent, $refKey, path, pathFromRoot, indirections, inventory, $refs, options) { let $ref = $refKey === null ? $refParent : $refParent[$refKey]; let $refPath = url.resolve(path, $ref.$ref); - let pointer = $refs._resolve($refPath, options); + let pointer = $refs._resolve($refPath, pathFromRoot, options); + if (pointer === null) { + return; + } + let depth = Pointer.parse(pathFromRoot).length; let file = url.stripHash(pointer.path); let hash = url.getHash(pointer.path); diff --git a/lib/dereference.js b/lib/dereference.js index 09576342..65811e3f 100644 --- a/lib/dereference.js +++ b/lib/dereference.js @@ -96,7 +96,14 @@ function dereference$Ref ($ref, path, pathFromRoot, parents, $refs, options) { // console.log('Dereferencing $ref pointer "%s" at %s', $ref.$ref, path); let $refPath = url.resolve(path, $ref.$ref); - let pointer = $refs._resolve($refPath, options); + let pointer = $refs._resolve($refPath, pathFromRoot, options); + + if (pointer === null) { + return { + circular: false, + value: null, + }; + } // Check for circular references let directCircular = pointer.circular; diff --git a/lib/index.d.ts b/lib/index.d.ts index 83cc37d6..e3d5187f 100644 --- a/lib/index.d.ts +++ b/lib/index.d.ts @@ -210,6 +210,12 @@ declare namespace $RefParser { [key: string]: Partial } + /** + * Determines how lenient the processing should be. + * If this option is enable, the processing will be performed in a bail mode - will abort upon the first exception. + */ + failFast?: boolean; + /** * The `dereference` options control how JSON Schema `$Ref` Parser will dereference `$ref` pointers within the JSON schema. */ @@ -398,4 +404,60 @@ declare namespace $RefParser { set($ref: string, value: JSONSchema4Type | JSONSchema6Type): void } + export type JSONParserErrorType = "EUNKNOWN" | "EPARSER" | "EUNMATCHEDPARSER" | "ERESOLVER" | "EUNMATCHEDRESOLVER" | "EMISSINGPOINTER" | "EINVALIDPOINTER"; + + export class JSONParserError extends Error { + readonly name: string; + readonly message: string; + readonly path: Array; + readonly errors: string; + readonly code: JSONParserErrorType; + } + + export class JSONParserErrorGroup extends Error { + /** + * List of all errors + * + * See https://github.com/APIDevTools/json-schema-ref-parser/blob/master/docs/ref-parser.md#errors + */ + readonly errors: Array<$RefParser.JSONParserError | $RefParser.InvalidPointerError | $RefParser.ResolverError | $RefParser.ParserError | $RefParser.MissingPointerError | $RefParser.UnmatchedParserError | $RefParser.UnmatchedResolverError>; + + /** + * The fields property is a `$RefParser` instance + * + * See https://apitools.dev/json-schema-ref-parser/docs/ref-parser.html + */ + readonly files: $RefParser; + + /** + * User friendly message containing the total amount of errors, as well as the absolute path to the source document + */ + readonly message: string; + } + + export class ParserError extends JSONParserError { + readonly name = "ParserError"; + readonly code = "EPARSER"; + } + export class UnmatchedParserError extends JSONParserError { + readonly name = "UnmatchedParserError"; + readonly code ="EUNMATCHEDPARSER"; + } + export class ResolverError extends JSONParserError { + readonly name = "ResolverError"; + readonly code ="ERESOLVER"; + readonly ioErrorCode?: string; + } + export class UnmatchedResolverError extends JSONParserError { + readonly name = "UnmatchedResolverError"; + readonly code ="EUNMATCHEDRESOLVER"; + } + export class MissingPointerError extends JSONParserError { + readonly name = "MissingPointerError"; + readonly code ="EMISSINGPOINTER"; + } + export class InvalidPointerError extends JSONParserError { + readonly name = "InvalidPointerError"; + readonly code ="EINVALIDPOINTER"; + } } diff --git a/lib/index.js b/lib/index.js index 85993563..af2a81ac 100644 --- a/lib/index.js +++ b/lib/index.js @@ -8,11 +8,19 @@ const resolveExternal = require("./resolve-external"); const bundle = require("./bundle"); const dereference = require("./dereference"); const url = require("./util/url"); +const { JSONParserError, InvalidPointerError, MissingPointerError, ResolverError, ParserError, UnmatchedParserError, UnmatchedResolverError, isHandledError, JSONParserErrorGroup } = require("./util/errors"); const maybe = require("call-me-maybe"); const { ono } = require("@jsdevtools/ono"); module.exports = $RefParser; module.exports.YAML = require("./util/yaml"); +module.exports.JSONParserError = JSONParserError; +module.exports.InvalidPointerError = InvalidPointerError; +module.exports.MissingPointerError = MissingPointerError; +module.exports.ResolverError = ResolverError; +module.exports.ParserError = ParserError; +module.exports.UnmatchedParserError = UnmatchedParserError; +module.exports.UnmatchedResolverError = UnmatchedResolverError; /** * This class parses a JSON schema, builds a map of its JSON references and their resolved values, @@ -111,16 +119,28 @@ $RefParser.prototype.parse = async function (path, schema, options, callback) { try { let result = await promise; - if (!result || typeof result !== "object" || Buffer.isBuffer(result)) { - throw ono.syntax(`"${me.$refs._root$Ref.path || result}" is not a valid JSON Schema`); - } - else { + if (result !== null && typeof result === "object" && !Buffer.isBuffer(result)) { me.schema = result; return maybe(args.callback, Promise.resolve(me.schema)); } + else if (!args.options.failFast) { + me.schema = null; // it's already set to null at line 79, but let's set it again for the sake of readability + return maybe(args.callback, Promise.resolve(me.schema)); + } + else { + throw ono.syntax(`"${me.$refs._root$Ref.path || result}" is not a valid JSON Schema`); + } } - catch (e) { - return maybe(args.callback, Promise.reject(e)); + catch (err) { + if (args.options.failFast || !isHandledError(err)) { + return maybe(args.callback, Promise.reject(err)); + } + + if (this.$refs._$refs[url.stripHash(args.path)]) { + this.$refs._$refs[url.stripHash(args.path)].addError(err); + } + + return maybe(args.callback, Promise.resolve(null)); } }; @@ -163,6 +183,7 @@ $RefParser.prototype.resolve = async function (path, schema, options, callback) try { await this.parse(args.path, args.schema, args.options); await resolveExternal(me, args.options); + finalize(me); return maybe(args.callback, Promise.resolve(me.$refs)); } catch (err) { @@ -205,6 +226,7 @@ $RefParser.prototype.bundle = async function (path, schema, options, callback) { try { await this.resolve(args.path, args.schema, args.options); bundle(me, args.options); + finalize(me); return maybe(args.callback, Promise.resolve(me.schema)); } catch (err) { @@ -245,9 +267,17 @@ $RefParser.prototype.dereference = async function (path, schema, options, callba try { await this.resolve(args.path, args.schema, args.options); dereference(me, args.options); + finalize(me); return maybe(args.callback, Promise.resolve(me.schema)); } catch (err) { return maybe(args.callback, Promise.reject(err)); } }; + +function finalize (parser) { + const errors = JSONParserErrorGroup.getParserErrors(parser); + if (errors.length > 0) { + throw new JSONParserErrorGroup(parser); + } +} diff --git a/lib/options.js b/lib/options.js index bfe3ad9f..001c622d 100644 --- a/lib/options.js +++ b/lib/options.js @@ -26,7 +26,7 @@ $RefParserOptions.defaults = { * Determines how different types of files will be parsed. * * You can add additional parsers of your own, replace an existing one with - * your own implemenation, or disable any parser by setting it to false. + * your own implementation, or disable any parser by setting it to false. */ parse: { json: jsonParser, @@ -39,7 +39,7 @@ $RefParserOptions.defaults = { * Determines how JSON References will be resolved. * * You can add additional resolvers of your own, replace an existing one with - * your own implemenation, or disable any resolver by setting it to false. + * your own implementation, or disable any resolver by setting it to false. */ resolve: { file: fileResolver, @@ -55,6 +55,12 @@ $RefParserOptions.defaults = { external: true, }, + /** + * Determines how lenient the processing should be. + * If this option is enable, the processing will be performed in a bail mode - will abort upon the first exception. + */ + failFast: true, + /** * Determines the types of JSON references that are allowed. */ diff --git a/lib/parse.js b/lib/parse.js index e74f0409..ba13f2ae 100644 --- a/lib/parse.js +++ b/lib/parse.js @@ -3,6 +3,7 @@ const { ono } = require("@jsdevtools/ono"); const url = require("./util/url"); const plugins = require("./util/plugins"); +const { StoplightParserError, ResolverError, ParserError, UnmatchedParserError, UnmatchedResolverError, isHandledError } = require("./util/errors"); module.exports = parse; @@ -17,21 +18,21 @@ module.exports = parse; * The promise resolves with the parsed file contents, NOT the raw (Buffer) contents. */ async function parse (path, $refs, options) { - try { - // Remove the URL fragment, if any - path = url.stripHash(path); + // Remove the URL fragment, if any + path = url.stripHash(path); - // Add a new $Ref for this file, even though we don't have the value yet. - // This ensures that we don't simultaneously read & parse the same file multiple times - let $ref = $refs._add(path); + // Add a new $Ref for this file, even though we don't have the value yet. + // This ensures that we don't simultaneously read & parse the same file multiple times + let $ref = $refs._add(path); - // This "file object" will be passed to all resolvers and parsers. - let file = { - url: path, - extension: url.getExtension(path), - }; + // This "file object" will be passed to all resolvers and parsers. + let file = { + url: path, + extension: url.getExtension(path), + }; - // Read the file and then parse the data + // Read the file and then parse the data + try { const resolver = await readFile(file, options, $refs); $ref.pathType = resolver.plugin.name; file.data = resolver.result; @@ -41,8 +42,12 @@ async function parse (path, $refs, options) { return parser.result; } - catch (e) { - return Promise.reject(e); + catch (err) { + if (isHandledError(err)) { + $ref.value = err; + } + + throw err; } } @@ -71,13 +76,20 @@ function readFile (file, options, $refs) { .then(resolve, onError); function onError (err) { + if (!err && !options.failFast) { + // No resolver could be matched + reject(new UnmatchedResolverError(file.url)); + } + else if (!err || !("error" in err)) { + // Throw a generic, friendly error. + reject(ono.syntax(`Unable to resolve $ref pointer "${file.url}"`)); + } // Throw the original error, if it's one of our own (user-friendly) errors. - // Otherwise, throw a generic, friendly error. - if (err && !(err instanceof SyntaxError)) { - reject(err); + else if (err.error instanceof ResolverError) { + reject(err.error); } else { - reject(ono.syntax(`Unable to resolve $ref pointer "${file.url}"`)); + reject(new ResolverError(err, file.url)); } } })); @@ -112,7 +124,7 @@ function parseFile (file, options, $refs) { .then(onParsed, onError); function onParsed (parser) { - if (!parser.plugin.allowEmpty && isEmpty(parser.result)) { + if ((!options.failFast || !parser.plugin.allowEmpty) && isEmpty(parser.result)) { reject(ono.syntax(`Error parsing "${file.url}" as ${parser.plugin.name}. \nParsed value is empty`)); } else { @@ -121,13 +133,19 @@ function parseFile (file, options, $refs) { } function onError (err) { - if (err) { - err = err instanceof Error ? err : new Error(err); - reject(ono.syntax(err, `Error parsing ${file.url}`)); + if (!err && !options.failFast) { + // No resolver could be matched + reject(new UnmatchedParserError(file.url)); } - else { + else if (!err || !("error" in err)) { reject(ono.syntax(`Unable to parse ${file.url}`)); } + else if (err.error instanceof ParserError || err.error instanceof StoplightParserError) { + reject(err.error); + } + else { + reject(new ParserError(err.error.message, file.url)); + } } })); } diff --git a/lib/parsers/binary.js b/lib/parsers/binary.js index 0a3b280b..be15073c 100644 --- a/lib/parsers/binary.js +++ b/lib/parsers/binary.js @@ -41,7 +41,7 @@ module.exports = { * @param {string} file.url - The full URL of the referenced file * @param {string} file.extension - The lowercased file extension (e.g. ".txt", ".html", etc.) * @param {*} file.data - The file contents. This will be whatever data type was returned by the resolver - * @returns {Promise} + * @returns {Buffer} */ parse (file) { if (Buffer.isBuffer(file.data)) { diff --git a/lib/parsers/json.js b/lib/parsers/json.js index 45ac68dc..dac373d6 100644 --- a/lib/parsers/json.js +++ b/lib/parsers/json.js @@ -1,5 +1,9 @@ "use strict"; +const { parseWithPointers } = require("@stoplight/json"); +const { StoplightParserError } = require("../util/errors"); + + module.exports = { /** * The order that this parser will run, in relation to other parsers. @@ -21,7 +25,7 @@ module.exports = { * Parsers that don't match will be skipped, UNLESS none of the parsers match, in which case * every parser will be tried. * - * @type {RegExp|string[]|function} + * @type {RegExp|string|string[]|function} */ canParse: ".json", @@ -34,25 +38,31 @@ module.exports = { * @param {*} file.data - The file contents. This will be whatever data type was returned by the resolver * @returns {Promise} */ - parse (file) { - return new Promise(((resolve, reject) => { - let data = file.data; - if (Buffer.isBuffer(data)) { - data = data.toString(); - } + async parse (file) { + let data = file.data; + if (Buffer.isBuffer(data)) { + data = data.toString(); + } - if (typeof data === "string") { - if (data.trim().length === 0) { - resolve(undefined); // This mirrors the YAML behavior - } - else { - resolve(JSON.parse(data)); - } + if (typeof data === "string") { + if (data.trim().length === 0) { + return; } else { - // data is already a JavaScript value (object, array, number, null, NaN, etc.) - resolve(data); + let result = parseWithPointers(data, { + ignoreDuplicateKeys: false, + }); + + if (StoplightParserError.hasErrors(result.diagnostics)) { + throw new StoplightParserError(result.diagnostics, file.url); + } + + return result.data; } - })); + } + else { + // data is already a JavaScript value (object, array, number, null, NaN, etc.) + return data; + } } }; diff --git a/lib/parsers/text.js b/lib/parsers/text.js index f2452e15..03bcd96b 100644 --- a/lib/parsers/text.js +++ b/lib/parsers/text.js @@ -1,5 +1,7 @@ "use strict"; +const { ParserError } = require("../util/errors"); + let TEXT_REGEXP = /\.(txt|htm|html|md|xml|js|min|map|css|scss|less|svg)$/i; module.exports = { @@ -48,7 +50,7 @@ module.exports = { * @param {string} file.url - The full URL of the referenced file * @param {string} file.extension - The lowercased file extension (e.g. ".txt", ".html", etc.) * @param {*} file.data - The file contents. This will be whatever data type was returned by the resolver - * @returns {Promise} + * @returns {string} */ parse (file) { if (typeof file.data === "string") { @@ -58,7 +60,7 @@ module.exports = { return file.data.toString(this.encoding); } else { - throw new Error("data is not text"); + throw new ParserError("data is not text", file.url); } } }; diff --git a/lib/parsers/yaml.js b/lib/parsers/yaml.js index 13cd98c5..5b537495 100644 --- a/lib/parsers/yaml.js +++ b/lib/parsers/yaml.js @@ -1,6 +1,7 @@ "use strict"; const YAML = require("../util/yaml"); +const { StoplightParserError } = require("../util/errors"); module.exports = { /** @@ -36,20 +37,23 @@ module.exports = { * @param {*} file.data - The file contents. This will be whatever data type was returned by the resolver * @returns {Promise} */ - parse (file) { - return new Promise(((resolve, reject) => { - let data = file.data; - if (Buffer.isBuffer(data)) { - data = data.toString(); - } + async parse (file) { + let data = file.data; + if (Buffer.isBuffer(data)) { + data = data.toString(); + } - if (typeof data === "string") { - resolve(YAML.parse(data)); - } - else { - // data is already a JavaScript value (object, array, number, null, NaN, etc.) - resolve(data); + if (typeof data === "string") { + let result = YAML.parse(data); + if (StoplightParserError.hasErrors(result.diagnostics)) { + throw new StoplightParserError(result.diagnostics, file.url); } - })); + + return result.data; + } + else { + // data is already a JavaScript value (object, array, number, null, NaN, etc.) + return data; + } } }; diff --git a/lib/pointer.js b/lib/pointer.js index 20e91a49..b312e95d 100644 --- a/lib/pointer.js +++ b/lib/pointer.js @@ -4,7 +4,7 @@ module.exports = Pointer; const $Ref = require("./ref"); const url = require("./util/url"); -const { ono } = require("@jsdevtools/ono"); +const { JSONParserError, InvalidPointerError, MissingPointerError, isHandledError } = require("./util/errors"); const slashes = /\//g; const tildes = /~/g; const escapedSlash = /~1/g; @@ -72,10 +72,11 @@ function Pointer ($ref, path, friendlyPath) { * of the resolved value. */ Pointer.prototype.resolve = function (obj, options) { - let tokens = Pointer.parse(this.path); + let tokens = Pointer.parse(this.path, this.originalPath); // Crawl the object, one token at a time - this.value = obj; + this.value = unwrapOrThrow(obj); + for (let i = 0; i < tokens.length; i++) { if (resolveIf$Ref(this, options)) { // The $ref path has changed, so append the remaining tokens to the path @@ -83,8 +84,9 @@ Pointer.prototype.resolve = function (obj, options) { } let token = tokens[i]; - if (this.value[token] === undefined) { - throw ono.syntax(`Error resolving $ref pointer "${this.originalPath}". \nToken "${token}" does not exist.`); + if (this.value[token] === undefined || this.value[token] === null) { + this.value = null; + throw new MissingPointerError(token, this.originalPath); } else { this.value = this.value[token]; @@ -117,7 +119,8 @@ Pointer.prototype.set = function (obj, value, options) { } // Crawl the object, one token at a time - this.value = obj; + this.value = unwrapOrThrow(obj); + for (let i = 0; i < tokens.length - 1; i++) { resolveIf$Ref(this, options); @@ -150,9 +153,10 @@ Pointer.prototype.set = function (obj, value, options) { * {@link https://tools.ietf.org/html/rfc6901#section-3} * * @param {string} path + * @param {string} [originalPath] * @returns {string[]} */ -Pointer.parse = function (path) { +Pointer.parse = function (path, originalPath) { // Get the JSON pointer from the path's hash let pointer = url.getHash(path).substr(1); @@ -171,7 +175,7 @@ Pointer.parse = function (path) { } if (pointer[0] !== "") { - throw ono.syntax(`Invalid $ref pointer "${pointer}". Pointers must begin with "#/"`); + throw new InvalidPointerError(pointer, originalPath === undefined ? path : originalPath); } return pointer.slice(1); @@ -222,7 +226,7 @@ function resolveIf$Ref (pointer, options) { pointer.circular = true; } else { - let resolved = pointer.$ref.$refs._resolve($refPath, options); + let resolved = pointer.$ref.$refs._resolve($refPath, url.getHash(pointer.path), options); pointer.indirections += resolved.indirections + 1; if ($Ref.isExtended$Ref(pointer.value)) { @@ -264,7 +268,16 @@ function setValue (pointer, token, value) { } } else { - throw ono.syntax(`Error assigning $ref pointer "${pointer.path}". \nCannot set "${token}" of a non-object.`); + throw new JSONParserError(`Error assigning $ref pointer "${pointer.path}". \nCannot set "${token}" of a non-object.`); } return value; } + + +function unwrapOrThrow (value) { + if (isHandledError(value)) { + throw value; + } + + return value; +} diff --git a/lib/ref.js b/lib/ref.js index 30c69f45..ee66be80 100644 --- a/lib/ref.js +++ b/lib/ref.js @@ -3,6 +3,8 @@ module.exports = $Ref; const Pointer = require("./pointer"); +const { JSONParserError, JSONParserErrorGroup, ParserError, MissingPointerError, ResolverError, isHandledError } = require("./util/errors"); +const { safePointerToPath } = require("./util/url"); /** * This class represents a single JSON reference and its resolved value. @@ -40,8 +42,34 @@ function $Ref () { * @type {?string} */ this.pathType = undefined; + + /** + * List of all errors. Undefined if no errors. + * @type {Array} + */ + this.errors = undefined; } +/** + * Pushes an error to errors array. + * + * @param {Array} error - The error to be pushed + * @returns {void} + */ +$Ref.prototype.addError = function (err) { + if (this.errors === undefined) { + this.errors = []; + } + + if (Array.isArray(err.errors)) { + this.errors.push(...err.errors); + } + else { + this.errors.push(err); + } +}; + + /** * Determines whether the given JSON reference exists within this {@link $Ref#value}. * @@ -75,12 +103,24 @@ $Ref.prototype.get = function (path, options) { * * @param {string} path - The full path being resolved, optionally with a JSON pointer in the hash * @param {$RefParserOptions} options - * @param {string} [friendlyPath] - The original user-specified path (used for error messages) + * @param {string} friendlyPath - The original user-specified path (used for error messages) +* @param {string} pathFromRoot - The path of `obj` from the schema root * @returns {Pointer} */ -$Ref.prototype.resolve = function (path, options, friendlyPath) { +$Ref.prototype.resolve = function (path, options, friendlyPath, pathFromRoot) { let pointer = new Pointer(this, path, friendlyPath); - return pointer.resolve(this.value, options); + try { + return pointer.resolve(this.value, options); + } + catch (err) { + if (!options || options.failFast || !isHandledError(err)) { + throw err; + } + + err.path = safePointerToPath(pathFromRoot); + this.addError(err); + return null; + } }; /** diff --git a/lib/refs.js b/lib/refs.js index 4e197b53..41fb32d9 100644 --- a/lib/refs.js +++ b/lib/refs.js @@ -79,7 +79,7 @@ $Refs.prototype.toJSON = $Refs.prototype.values; */ $Refs.prototype.exists = function (path, options) { try { - this._resolve(path, options); + this._resolve(path, "", options); return true; } catch (e) { @@ -95,7 +95,7 @@ $Refs.prototype.exists = function (path, options) { * @returns {*} - Returns the resolved value */ $Refs.prototype.get = function (path, options) { - return this._resolve(path, options).value; + return this._resolve(path, "", options).value; }; /** @@ -139,11 +139,12 @@ $Refs.prototype._add = function (path) { * Resolves the given JSON reference. * * @param {string} path - The path being resolved, optionally with a JSON pointer in the hash + * @param {string} pathFromRoot - The path of `obj` from the schema root * @param {$RefParserOptions} [options] * @returns {Pointer} * @protected */ -$Refs.prototype._resolve = function (path, options) { +$Refs.prototype._resolve = function (path, pathFromRoot, options) { let absPath = url.resolve(this._root$Ref.path, path); let withoutHash = url.stripHash(absPath); let $ref = this._$refs[withoutHash]; @@ -152,7 +153,7 @@ $Refs.prototype._resolve = function (path, options) { throw ono(`Error resolving $ref pointer "${path}". \n"${withoutHash}" not found.`); } - return $ref.resolve(absPath, options, path); + return $ref.resolve(absPath, options, path, pathFromRoot); }; /** diff --git a/lib/resolve-external.js b/lib/resolve-external.js index fb2e0ae7..1d3b5cda 100644 --- a/lib/resolve-external.js +++ b/lib/resolve-external.js @@ -4,6 +4,7 @@ const $Ref = require("./ref"); const Pointer = require("./pointer"); const parse = require("./parse"); const url = require("./util/url"); +const { isHandledError } = require("./util/errors"); module.exports = resolveExternal; @@ -101,11 +102,25 @@ async function resolve$Ref ($ref, path, $refs, options) { } // Parse the $referenced file/url - const result = await parse(resolvedPath, $refs, options); + try { + const result = await parse(resolvedPath, $refs, options); + + // Crawl the parsed value + // console.log('Resolving $ref pointers in %s', withoutHash); + let promises = crawl(result, withoutHash + "#", $refs, options); + + return Promise.all(promises); + } + catch (err) { + if (options.failFast || !isHandledError(err)) { + throw err; + } - // Crawl the parsed value - // console.log('Resolving $ref pointers in %s', withoutHash); - let promises = crawl(result, withoutHash + "#", $refs, options); + if ($refs._$refs[withoutHash]) { + err.source = url.stripHash(path); + err.path = url.safePointerToPath(url.getHash(path)); + } - return Promise.all(promises); + return []; + } } diff --git a/lib/resolvers/file.js b/lib/resolvers/file.js index e1276fd1..45577ad7 100644 --- a/lib/resolvers/file.js +++ b/lib/resolvers/file.js @@ -2,6 +2,7 @@ const fs = require("fs"); const { ono } = require("@jsdevtools/ono"); const url = require("../util/url"); +const { ResolverError } = require("../util/errors"); module.exports = { /** @@ -40,7 +41,7 @@ module.exports = { path = url.toFileSystemPath(file.url); } catch (err) { - reject(ono.uri(err, `Malformed URI: ${file.url}`)); + reject(new ResolverError(ono.uri(err, `Malformed URI: ${file.url}`), file.url)); } // console.log('Opening file: %s', path); @@ -48,7 +49,7 @@ module.exports = { try { fs.readFile(path, (err, data) => { if (err) { - reject(ono(err, `Error opening file "${path}"`)); + reject(new ResolverError(ono(err, `Error opening file "${path}"`), path)); } else { resolve(data); @@ -56,7 +57,7 @@ module.exports = { }); } catch (err) { - reject(ono(err, `Error opening file "${path}"`)); + reject(new ResolverError(ono(err, `Error opening file "${path}"`), path)); } })); } diff --git a/lib/resolvers/http.js b/lib/resolvers/http.js index f2d02dae..eaadd2de 100644 --- a/lib/resolvers/http.js +++ b/lib/resolvers/http.js @@ -4,6 +4,7 @@ const http = require("http"); const https = require("https"); const { ono } = require("@jsdevtools/ono"); const url = require("../util/url"); +const { ResolverError } = require("../util/errors"); module.exports = { /** @@ -106,8 +107,8 @@ function download (u, httpOptions, redirects) { } else if (res.statusCode >= 300) { if (redirects.length > httpOptions.redirects) { - reject(ono({ status: res.statusCode }, - `Error downloading ${redirects[0]}. \nToo many redirects: \n ${redirects.join(" \n ")}`)); + reject(new ResolverError(ono({ status: res.statusCode }, + `Error downloading ${redirects[0]}. \nToo many redirects: \n ${redirects.join(" \n ")}`))); } else if (!res.headers.location) { throw ono({ status: res.statusCode }, `HTTP ${res.statusCode} redirect with no location header`); @@ -123,7 +124,7 @@ function download (u, httpOptions, redirects) { } }) .catch((err) => { - reject(ono(err, `Error downloading ${u.href}`)); + reject(new ResolverError(ono(err, `Error downloading ${u.href}`), u.href)); }); })); } diff --git a/lib/util/errors.js b/lib/util/errors.js new file mode 100644 index 00000000..2354522b --- /dev/null +++ b/lib/util/errors.js @@ -0,0 +1,176 @@ +"use strict"; + +const { Ono } = require("@jsdevtools/ono"); + +const { stripHash, toFileSystemPath } = require("./url"); + +const JSONParserError = exports.JSONParserError = class JSONParserError extends Error { + constructor (message, source) { + super(); + + this.code = "EUNKNOWN"; + this.message = message; + this.source = source; + this.path = []; + + Ono.extend(this); + } +}; + +setErrorName(JSONParserError); + +const JSONParserErrorGroup = exports.JSONParserErrorGroup = class JSONParserErrorGroup extends Error { + constructor (parser) { + super(); + + this.files = parser; + this.message = `${this.errors.length} error${this.errors.length > 1 ? "s" : ""} occurred while reading '${toFileSystemPath(parser.$refs._root$Ref.path)}'`; + + Ono.extend(this); + } + + static getParserErrors (parser) { + const errors = []; + + for (const $ref of Object.values(parser.$refs._$refs)) { + if ($ref.errors) { + errors.push(...$ref.errors); + } + } + + return errors; + } + + get errors () { + return JSONParserErrorGroup.getParserErrors(this.files); + } +}; + +setErrorName(JSONParserErrorGroup); + +exports.StoplightParserError = class StoplightParserError extends JSONParserError { + constructor (diagnostics, source) { + super(`Error parsing ${source}`, source); + + this.code = "ESTOPLIGHTPARSER"; + + this._source = source; + this._path = []; + this.errors = diagnostics.filter(StoplightParserError.pickError).map(error => { + let parserError = new ParserError(error.message, source); + parserError.message = error.message; + return parserError; + }); + } + + static pickError (diagnostic) { + return diagnostic.severity === 0; + } + + static hasErrors (diagnostics) { + return diagnostics.some(StoplightParserError.pickError); + } + + get source () { + return this._source; + } + + set source (source) { + this._source = source; + + if (this.errors) { + for (let error of this.errors) { + error.source = source; + } + } + } + + get path () { + return this._path; + } + + set path (path) { + this._path = path; + + if (this.errors) { + for (let error of this.errors) { + error.path = path; + } + } + } +}; + +const ParserError = exports.ParserError = class ParserError extends JSONParserError { + constructor (message, source) { + super(`Error parsing ${source}: ${message}`, source); + + this.code = "EPARSER"; + } +}; + +setErrorName(ParserError); + +const UnmatchedParserError = exports.UnmatchedParserError = class UnmatchedParserError extends JSONParserError { + constructor (source) { + super(`Could not find parser for "${source}"`, source); + + this.code = "EUNMATCHEDPARSER"; + } +}; + +setErrorName(UnmatchedParserError); + +const ResolverError = exports.ResolverError = class ResolverError extends JSONParserError { + constructor (ex, source) { + super(ex.message || `Error reading file "${source}"`, source); + + this.code = "ERESOLVER"; + + if ("code" in ex) { + this.ioErrorCode = String(ex.code); + } + } +}; + +setErrorName(ResolverError); + +const UnmatchedResolverError = exports.UnmatchedResolverError = class UnmatchedResolverError extends JSONParserError { + constructor (source) { + super(`Could not find resolver for "${source}"`, source); + + this.code = "EUNMATCHEDRESOLVER"; + } +}; + +setErrorName(UnmatchedResolverError); + +const MissingPointerError = exports.MissingPointerError = class MissingPointerError extends JSONParserError { + constructor (token, path) { + super(`Token "${token}" does not exist.`, stripHash(path)); + + this.code = "EMISSINGPOINTER"; + } +}; + +setErrorName(MissingPointerError); + +const InvalidPointerError = exports.InvalidPointerError = class InvalidPointerError extends JSONParserError { + constructor (pointer, path) { + super(`Invalid $ref pointer "${pointer}". Pointers must begin with "#/"`, stripHash(path)); + + this.code = "EINVALIDPOINTER"; + } +}; + +setErrorName(InvalidPointerError); + +function setErrorName (err) { + Object.defineProperty(err.prototype, "name", { + value: err.name, + enumerable: true, + }); +} + +exports.isHandledError = function (err) { + return err instanceof JSONParserError || err instanceof JSONParserErrorGroup; +}; diff --git a/lib/util/plugins.js b/lib/util/plugins.js index 932281cf..3d327367 100644 --- a/lib/util/plugins.js +++ b/lib/util/plugins.js @@ -108,9 +108,12 @@ exports.run = function (plugins, method, file, $refs) { }); } - function onError (err) { + function onError (error) { // console.log(' %s', err.message || err); - lastError = err; + lastError = { + plugin, + error, + }; runNextPlugin(); } })); diff --git a/lib/util/url.js b/lib/util/url.js index 5c925a00..4d1b380d 100644 --- a/lib/util/url.js +++ b/lib/util/url.js @@ -1,5 +1,7 @@ "use strict"; +const { pointerToPath } = require("@stoplight/json"); + let isWindows = /^win/.test(process.platform), forwardSlashPattern = /\//g, protocolPattern = /^(\w{2,}):\/\//i, @@ -232,3 +234,19 @@ exports.toFileSystemPath = function toFileSystemPath (path, keepFileProtocol) { return path; }; + +/** + * Converts a $ref pointer to a valid JSON Path. + * It _does not_ throw. + * + * @param {string} pointer + * @returns {Array} + */ +exports.safePointerToPath = function safePointerToPath (pointer) { + try { + return pointerToPath(pointer); + } + catch (ex) { + return []; + } +}; diff --git a/lib/util/yaml.js b/lib/util/yaml.js index 80503566..ab802064 100644 --- a/lib/util/yaml.js +++ b/lib/util/yaml.js @@ -1,8 +1,7 @@ /* eslint lines-around-comment: [2, {beforeBlockComment: false}] */ "use strict"; -const yaml = require("js-yaml"); -const { ono } = require("@jsdevtools/ono"); +const { parseWithPointers, safeStringify } = require("@stoplight/yaml"); /** * Simple YAML parsing functions, similar to {@link JSON.parse} and {@link JSON.stringify} @@ -16,18 +15,11 @@ module.exports = { * @returns {*} */ parse (text, reviver) { - try { - return yaml.safeLoad(text); - } - catch (e) { - if (e instanceof Error) { - throw e; - } - else { - // https://github.com/nodeca/js-yaml/issues/153 - throw ono(e, e.message); - } - } + return parseWithPointers(text, { + json: true, + mergeKeys: true, + ignoreDuplicateKeys: false, + }); }, /** @@ -39,18 +31,7 @@ module.exports = { * @returns {string} */ stringify (value, replacer, space) { - try { - let indent = (typeof space === "string" ? space.length : space) || 2; - return yaml.safeDump(value, { indent }); - } - catch (e) { - if (e instanceof Error) { - throw e; - } - else { - // https://github.com/nodeca/js-yaml/issues/153 - throw ono(e, e.message); - } - } + let indent = (typeof space === "string" ? space.length : space) || 2; + return safeStringify(value, { indent }); } }; diff --git a/package-lock.json b/package-lock.json index 2fac0c8a..1e0d519e 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1071,6 +1071,40 @@ "integrity": "sha512-D5H5RjqqE+YxI2oeTgSRuIjdy/hli90H5mMd81bBrYlOfB/f4TBsKMoaWfzI5E4bmFzLfQJuvvepTaWrxVfBug==", "dev": true }, + "@stoplight/json": { + "version": "3.5.1", + "resolved": "https://registry.npmjs.org/@stoplight/json/-/json-3.5.1.tgz", + "integrity": "sha512-O5WUW2yfAvtrqeq60YrbxpTvk87Ti2IeJ5oVa2XNJ2s+IIxx0CM+j316QoOjSGs+twrRpwb3jT9CFPrq7Ghkzg==", + "requires": { + "@stoplight/types": "^11.4.0", + "jsonc-parser": "~2.2.0", + "lodash": "^4.17.15", + "safe-stable-stringify": "^1.1" + } + }, + "@stoplight/types": { + "version": "11.4.1", + "resolved": "https://registry.npmjs.org/@stoplight/types/-/types-11.4.1.tgz", + "integrity": "sha512-NvzokIoSbHk8UUv0PT7Y8RB4eC74Vj92NwYLoUwOqAeA64IXqsMxuS+Yf5AP3PY1MQoIbPSGx13Yi5KiqkioFQ==", + "requires": { + "@types/json-schema": "^7.0.4" + } + }, + "@stoplight/yaml": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/@stoplight/yaml/-/yaml-3.6.0.tgz", + "integrity": "sha512-38EhZjNsVwuwrxPawr5R3Q1QhO3l/MTLD5F/d0v7vIrvWgde6GJzdh6rmWbrKJttEqqwuqX8mpLjWihWB7Qm1A==", + "requires": { + "@stoplight/types": "^11.1.1", + "@stoplight/yaml-ast-parser": "0.0.44", + "lodash": "^4.17.15" + } + }, + "@stoplight/yaml-ast-parser": { + "version": "0.0.44", + "resolved": "https://registry.npmjs.org/@stoplight/yaml-ast-parser/-/yaml-ast-parser-0.0.44.tgz", + "integrity": "sha512-PdY8p2Ufgtorf4d2DbKMfknILMa8KwuyyMMR/2lgK1mLaU8F5PKWYc+h9hIzC+ar0bh7m9h2rINo32m7ADfVyA==" + }, "@types/color-name": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/@types/color-name/-/color-name-1.1.1.tgz", @@ -1080,8 +1114,7 @@ "@types/json-schema": { "version": "7.0.4", "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.4.tgz", - "integrity": "sha512-8+KAKzEvSUdeo+kmqnKrqgeE+LcA0tjYWFY7RPProVYwnqDjukzO+3b6dLD56rYX5TdWejnEOLJYOIeh4CXKuA==", - "dev": true + "integrity": "sha512-8+KAKzEvSUdeo+kmqnKrqgeE+LcA0tjYWFY7RPProVYwnqDjukzO+3b6dLD56rYX5TdWejnEOLJYOIeh4CXKuA==" }, "@types/node": { "version": "13.9.1", @@ -1480,6 +1513,7 @@ "version": "1.0.10", "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "dev": true, "requires": { "sprintf-js": "~1.0.2" } @@ -2289,6 +2323,12 @@ "type-detect": "^4.0.5" } }, + "chai-subset": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/chai-subset/-/chai-subset-1.6.0.tgz", + "integrity": "sha1-pdDKFOMpp5WW7XAFi2ZGvWmIz+k=", + "dev": true + }, "chalk": { "version": "2.4.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", @@ -3618,7 +3658,8 @@ "esprima": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", - "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==" + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", + "dev": true }, "esquery": { "version": "1.1.0", @@ -5898,6 +5939,7 @@ "version": "3.13.1", "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.13.1.tgz", "integrity": "sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw==", + "dev": true, "requires": { "argparse": "^1.0.7", "esprima": "^4.0.0" @@ -5936,6 +5978,11 @@ "minimist": "^1.2.0" } }, + "jsonc-parser": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/jsonc-parser/-/jsonc-parser-2.2.1.tgz", + "integrity": "sha512-o6/yDBYccGvTz1+QFevz6l6OBZ2+fMVu2JZ9CIhzsYRX4mjaK5IyX9eldUdCmga16zlgQxyrj5pt9kzuj2C02w==" + }, "jsonfile": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-4.0.0.tgz", @@ -6378,8 +6425,7 @@ "lodash": { "version": "4.17.15", "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", - "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==", - "dev": true + "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==" }, "lodash.camelcase": { "version": "4.3.0", @@ -8762,6 +8808,11 @@ "ret": "~0.1.10" } }, + "safe-stable-stringify": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/safe-stable-stringify/-/safe-stable-stringify-1.1.0.tgz", + "integrity": "sha512-8h+96qSufNQrydRPzbHms38VftQQSRGbqUkaIMWUBWN4/N8sLNALIALa8KmFcQ8P/a9uzMkA+KY04Rj5WQiXPA==" + }, "safer-buffer": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", @@ -9390,7 +9441,8 @@ "sprintf-js": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", - "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=" + "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=", + "dev": true }, "ssri": { "version": "6.0.1", diff --git a/package.json b/package.json index 418eb941..d78e953d 100644 --- a/package.json +++ b/package.json @@ -59,6 +59,7 @@ "@types/json-schema": "^7.0.4", "@types/node": "^13.1.2", "chai": "^4.2.0", + "chai-subset": "^1.6.0", "eslint": "^6.8.0", "karma": "^4.4.1", "karma-cli": "^2.0.0", @@ -69,8 +70,9 @@ "typescript": "^3.7.4" }, "dependencies": { + "@stoplight/json": "^3.5.1", + "@stoplight/yaml": "^3.6.0", "call-me-maybe": "^1.0.1", - "js-yaml": "^3.13.1", "@jsdevtools/ono": "^7.1.0" } } diff --git a/test/specs/callbacks.spec.js b/test/specs/callbacks.spec.js index 46f1f292..59920efa 100644 --- a/test/specs/callbacks.spec.js +++ b/test/specs/callbacks.spec.js @@ -4,6 +4,7 @@ const { expect } = require("chai"); const $RefParser = require("../../lib"); const helper = require("../utils/helper"); const path = require("../utils/path"); +const { StoplightParserError } = require("../../lib/util/errors"); describe("Callback & Promise syntax", () => { for (let method of ["parse", "resolve", "dereference", "bundle"]) { @@ -42,7 +43,7 @@ describe("Callback & Promise syntax", () => { return function (done) { $RefParser[method](path.rel("specs/invalid/invalid.yaml"), (err, result) => { try { - expect(err).to.be.an.instanceOf(SyntaxError); + expect(err).to.be.an.instanceOf(StoplightParserError); expect(result).to.be.undefined; done(); } @@ -75,7 +76,7 @@ describe("Callback & Promise syntax", () => { return $RefParser[method](path.rel("specs/invalid/invalid.yaml")) .then(helper.shouldNotGetCalled) .catch((err) => { - expect(err).to.be.an.instanceOf(SyntaxError); + expect(err).to.be.an.instanceOf(StoplightParserError); }); }; } diff --git a/test/specs/invalid-pointers/invalid-pointers.js b/test/specs/invalid-pointers/invalid-pointers.js new file mode 100644 index 00000000..033b681b --- /dev/null +++ b/test/specs/invalid-pointers/invalid-pointers.js @@ -0,0 +1,44 @@ +"use strict"; + +const chai = require("chai"); +const chaiSubset = require("chai-subset"); +chai.use(chaiSubset); +const { expect } = chai; +const $RefParser = require("../../../lib"); +const helper = require("../../utils/helper"); +const path = require("../../utils/path"); +const { JSONParserErrorGroup, InvalidPointerError } = require("../../../lib/util/errors"); + +describe("Schema with invalid pointers", () => { + it("should throw an error for an invalid pointer", async () => { + try { + await $RefParser.dereference(path.rel("specs/invalid-pointers/invalid.json")); + helper.shouldNotGetCalled(); + } + catch (err) { + expect(err).to.be.an.instanceOf(InvalidPointerError); + expect(err.message).to.contain("Invalid $ref pointer \"f\". Pointers must begin with \"#/\""); + } + }); + + it("should throw a grouped error for an invalid pointer if failFast is false", async () => { + const parser = new $RefParser(); + try { + await parser.dereference(path.rel("specs/invalid-pointers/invalid.json"), { failFast: false }); + helper.shouldNotGetCalled(); + } + catch (err) { + expect(err).to.be.instanceof(JSONParserErrorGroup); + expect(err.files).to.equal(parser); + expect(err.message).to.equal(`1 error occurred while reading '${path.abs("specs/invalid-pointers/invalid.json")}'`); + expect(err.errors).to.containSubset([ + { + name: InvalidPointerError.name, + message: "Invalid $ref pointer \"f\". Pointers must begin with \"#/\"", + path: ["foo"], + source: path.abs("specs/invalid-pointers/invalid.json"), + } + ]); + } + }); +}); diff --git a/test/specs/invalid-pointers/invalid.json b/test/specs/invalid-pointers/invalid.json new file mode 100644 index 00000000..fd0d8ea3 --- /dev/null +++ b/test/specs/invalid-pointers/invalid.json @@ -0,0 +1,5 @@ +{ + "foo": { + "$ref": "./invalid.json#f" + } +} diff --git a/test/specs/invalid/invalid.spec.js b/test/specs/invalid/invalid.spec.js index 6b50cca6..3bb89a7e 100644 --- a/test/specs/invalid/invalid.spec.js +++ b/test/specs/invalid/invalid.spec.js @@ -1,10 +1,14 @@ "use strict"; const { host } = require("@jsdevtools/host-environment"); -const { expect } = require("chai"); +const chai = require("chai"); +const chaiSubset = require("chai-subset"); +chai.use(chaiSubset); +const { expect } = chai; const $RefParser = require("../../../lib"); const helper = require("../../utils/helper"); const path = require("../../utils/path"); +const { JSONParserErrorGroup, StoplightParserError, ParserError, ResolverError } = require("../../../lib/util/errors"); describe("Invalid syntax", () => { describe("in main file", () => { @@ -14,9 +18,9 @@ describe("Invalid syntax", () => { helper.shouldNotGetCalled(); } catch (err) { - expect(err).to.be.an.instanceOf(Error); + expect(err).to.be.an.instanceOf(ResolverError); if (host.node) { - expect(err.code).to.equal("ENOENT"); + expect(err.ioErrorCode).to.equal("ENOENT"); expect(err.message).to.contain("Error opening file "); } } @@ -28,7 +32,7 @@ describe("Invalid syntax", () => { helper.shouldNotGetCalled(); } catch (err) { - expect(err).to.be.an.instanceOf(SyntaxError); + expect(err).to.be.an.instanceOf(StoplightParserError); expect(err.message).to.contain("Error parsing "); expect(err.message).to.contain("invalid/invalid.yaml"); } @@ -40,7 +44,7 @@ describe("Invalid syntax", () => { helper.shouldNotGetCalled(); } catch (err) { - expect(err).to.be.an.instanceOf(SyntaxError); + expect(err).to.be.an.instanceOf(StoplightParserError); expect(err.message).to.contain("Error parsing "); expect(err.message).to.contain("invalid/invalid.json"); } @@ -52,7 +56,7 @@ describe("Invalid syntax", () => { helper.shouldNotGetCalled(); } catch (err) { - expect(err).to.be.an.instanceOf(SyntaxError); + expect(err).to.be.an.instanceOf(StoplightParserError); expect(err.message).to.contain("Error parsing "); expect(err.message).to.contain("invalid/invalid.json"); } @@ -68,6 +72,103 @@ describe("Invalid syntax", () => { expect(err.message).to.contain('invalid/invalid.yaml" is not a valid JSON Schema'); } }); + + describe("when failFast is false", () => { + it("should throw a grouped error for an invalid file path", async () => { + const parser = new $RefParser(); + try { + await parser.dereference("this file does not exist", { failFast: false }); + helper.shouldNotGetCalled(); + } + catch (err) { + expect(err).to.be.instanceof(JSONParserErrorGroup); + expect(err.files).to.equal(parser); + expect(err.message).to.have.string("1 error occurred while reading '"); + expect(err.message).to.have.string("this file does not exist'"); + expect(err.errors.length).to.equal(1); + expect(err.errors).to.containSubset([ + { + name: ResolverError.name, + message: expectedValue => expectedValue.startsWith("Error opening file") || expectedValue.endsWith("HTTP ERROR 404"), + path: [], + source: expectedValue => expectedValue.endsWith("this file does not exist") || expectedValue.startsWith("http://localhost"), + } + ]); + } + }); + + it("should throw a grouped error for an invalid YAML file", async () => { + const parser = new $RefParser(); + try { + await parser.dereference(path.rel("specs/invalid/invalid.yaml"), { failFast: false }); + helper.shouldNotGetCalled(); + } + catch (err) { + expect(err).to.be.instanceof(JSONParserErrorGroup); + expect(err.files).to.equal(parser); + expect(err.message).to.equal(`1 error occurred while reading '${path.abs("specs/invalid/invalid.yaml")}'`); + expect(err.errors.length).to.equal(1); + expect(err.errors).to.containSubset([ + { + name: ParserError.name, + message: "incomplete explicit mapping pair; a key node is missed", + path: [], + source: expectedValue => expectedValue.endsWith("test/specs/invalid/invalid.yaml"), + }, + ]); + } + }); + + it("should throw a grouped error for an invalid JSON file", async () => { + const parser = new $RefParser(); + try { + await parser.dereference(path.rel("specs/invalid/invalid.json"), { failFast: false }); + helper.shouldNotGetCalled(); + } + catch (err) { + expect(err).to.be.instanceof(JSONParserErrorGroup); + expect(err.files).to.equal(parser); + expect(err.message).to.equal(`1 error occurred while reading '${path.abs("specs/invalid/invalid.json")}'`); + expect(err.errors.length).to.equal(1); + expect(err.errors).to.containSubset([ + { + name: ParserError.name, + message: "unexpected end of the stream within a flow collection", + path: [], + source: expectedValue => expectedValue.endsWith("test/specs/invalid/invalid.json"), + } + ]); + } + }); + + it("should throw a grouped error for an invalid JSON file with YAML disabled", async () => { + const parser = new $RefParser(); + try { + await parser.dereference(path.rel("specs/invalid/invalid.json"), { failFast: false, parse: { yaml: false }}); + helper.shouldNotGetCalled(); + } + catch (err) { + expect(err).to.be.instanceof(JSONParserErrorGroup); + expect(err.files).to.equal(parser); + expect(err.message).to.equal(`1 error occurred while reading '${path.abs("specs/invalid/invalid.json")}'`); + expect(err.errors.length).to.equal(1); + expect(err.errors).to.containSubset([ + { + name: ParserError.name, + message: "CloseBraceExpected", + path: [], + source: expectedValue => expectedValue.endsWith("test/specs/invalid/invalid.json"), + } + ]); + } + }); + + it("should not throw an error for an invalid YAML file with JSON and YAML disabled", async () => { + const parser = new $RefParser(); + const result = await parser.dereference(path.rel("specs/invalid/invalid.yaml"), { failFast: false, parse: { yaml: false, json: false }}); + expect(result).to.be.null; + }); + }); }); describe("in referenced files", () => { @@ -77,7 +178,7 @@ describe("Invalid syntax", () => { helper.shouldNotGetCalled(); } catch (err) { - expect(err).to.be.an.instanceOf(SyntaxError); + expect(err).to.be.an.instanceOf(StoplightParserError); expect(err.message).to.contain("Error parsing "); expect(err.message).to.contain("invalid/invalid.yaml"); } @@ -89,7 +190,7 @@ describe("Invalid syntax", () => { helper.shouldNotGetCalled(); } catch (err) { - expect(err).to.be.an.instanceOf(SyntaxError); + expect(err).to.be.an.instanceOf(StoplightParserError); expect(err.message).to.contain("Error parsing "); expect(err.message).to.contain("invalid/invalid.json"); } @@ -103,13 +204,13 @@ describe("Invalid syntax", () => { helper.shouldNotGetCalled(); } catch (err) { - expect(err).to.be.an.instanceOf(SyntaxError); + expect(err).to.be.an.instanceOf(StoplightParserError); expect(err.message).to.contain("Error parsing "); expect(err.message).to.contain("invalid/invalid.json"); } }); - it("should NOT throw an error for an invalid YAML file with JSON and YAML disabled", async () => { + it("should throw a grouped error for an invalid YAML file with JSON and YAML disabled", async () => { const schema = await $RefParser .dereference({ foo: { $ref: path.rel("specs/invalid/invalid.yaml") }}, { parse: { yaml: false, json: false } @@ -120,5 +221,97 @@ describe("Invalid syntax", () => { foo: ":\n" }); }); + + describe("when failFast is false", () => { + it("should throw a grouped error for an invalid file path", async () => { + try { + const parser = new $RefParser(); + await parser.dereference({ foo: { $ref: "this file does not exist" }}, { failFast: false }); + helper.shouldNotGetCalled(); + } + catch (err) { + expect(err).to.be.instanceof(JSONParserErrorGroup); + expect(err.files.$refs._root$Ref.value).to.deep.equal({ foo: null }); + expect(err.errors.length).to.equal(1); + expect(err.errors).to.containSubset([ + { + name: ResolverError.name, + message: expectedValue => expectedValue.startsWith("Error opening file") || expectedValue.endsWith("HTTP ERROR 404"), + path: ["foo"], + source: expectedValue => expectedValue.endsWith("/test/") || expectedValue.startsWith("http://localhost"), + } + ]); + } + }); + + it("should throw a grouped error for an invalid YAML file", async () => { + try { + const parser = new $RefParser(); + await parser.dereference({ foo: { $ref: path.rel("specs/invalid/invalid.yaml") }}, { failFast: false }); + helper.shouldNotGetCalled(); + } + catch (err) { + expect(err).to.be.instanceof(JSONParserErrorGroup); + expect(err.files.$refs._root$Ref.value).to.deep.equal({ foo: null }); + expect(err.errors.length).to.equal(1); + expect(err.errors).to.containSubset([ + { + name: ParserError.name, + message: "incomplete explicit mapping pair; a key node is missed", + path: ["foo"], + source: expectedValue => expectedValue.endsWith("/test/") || expectedValue.startsWith("http://localhost"), + }, + ]); + } + }); + + it("should throw a grouped error for an invalid JSON file", async () => { + try { + const parser = new $RefParser(); + await parser.dereference({ foo: { $ref: path.rel("specs/invalid/invalid.json") }}, { failFast: false }); + helper.shouldNotGetCalled(); + } + catch (err) { + expect(err).to.be.instanceof(JSONParserErrorGroup); + expect(err.files.$refs._root$Ref.value).to.deep.equal({ foo: null }); + expect(err.errors.length).to.equal(1); + expect(err.errors).to.containSubset([ + { + name: ParserError.name, + message: "unexpected end of the stream within a flow collection", + path: ["foo"], + source: expectedValue => expectedValue.endsWith("/test/") || expectedValue.startsWith("http://localhost"), + } + ]); + } + }); + + it("should throw a grouped error for an invalid JSON file with YAML disabled", async () => { + try { + const parser = new $RefParser(); + await parser.dereference({ foo: { $ref: path.rel("specs/invalid/invalid.json") }}, { failFast: false, parse: { yaml: false }}); + helper.shouldNotGetCalled(); + } + catch (err) { + expect(err).to.be.instanceof(JSONParserErrorGroup); + expect(err.files.$refs._root$Ref.value).to.deep.equal({ foo: null }); + expect(err.errors.length).to.equal(1); + expect(err.errors).to.containSubset([ + { + name: ParserError.name, + message: "CloseBraceExpected", + path: ["foo"], + source: expectedValue => expectedValue.endsWith("/test/") || expectedValue.startsWith("http://localhost"), + } + ]); + } + }); + + it("should not throw an error for an invalid YAML file with JSON and YAML disabled", async () => { + const parser = new $RefParser(); + const result = await parser.dereference({ foo: { $ref: path.rel("specs/invalid/invalid.yaml") }}, { failFast: false, parse: { yaml: false, json: false }}); + expect(result).to.deep.equal({ foo: ":\n" }); + }); + }); }); }); diff --git a/test/specs/missing-pointers/missing-pointers.spec.js b/test/specs/missing-pointers/missing-pointers.spec.js new file mode 100644 index 00000000..d6a04718 --- /dev/null +++ b/test/specs/missing-pointers/missing-pointers.spec.js @@ -0,0 +1,45 @@ +"use strict"; + +const chai = require("chai"); +const chaiSubset = require("chai-subset"); +chai.use(chaiSubset); +const { expect } = chai; +const $RefParser = require("../../../lib"); +const { JSONParserErrorGroup, MissingPointerError } = require("../../../lib/util/errors"); +const helper = require("../../utils/helper"); +const path = require("../../utils/path"); + +describe("Schema with missing pointers", () => { + it("should throw an error for missing pointer", async () => { + try { + await $RefParser.dereference({ foo: { $ref: "#/baz" }}); + helper.shouldNotGetCalled(); + } + catch (err) { + expect(err).to.be.an.instanceOf(MissingPointerError); + expect(err.message).to.contain("Token \"baz\" does not exist."); + } + }); + + it("should throw a grouped error for missing pointer if failFast is false", async () => { + const parser = new $RefParser(); + try { + await parser.dereference({ foo: { $ref: "#/baz" }}, { failFast: false }); + helper.shouldNotGetCalled(); + } + catch (err) { + expect(err).to.be.instanceof(JSONParserErrorGroup); + expect(err.files).to.equal(parser); + expect(err.files.$refs._root$Ref.value).to.deep.equal({ foo: null }); + expect(err.message).to.have.string("1 error occurred while reading '"); + expect(err.errors).to.containSubset([ + { + name: MissingPointerError.name, + message: "Token \"baz\" does not exist.", + path: ["foo"], + source: expectedValue => expectedValue.endsWith("/test/") || expectedValue.startsWith("http://localhost"), + } + ]); + } + }); +}); diff --git a/test/specs/parsers/parsers.spec.js b/test/specs/parsers/parsers.spec.js index 3773a01f..24758868 100644 --- a/test/specs/parsers/parsers.spec.js +++ b/test/specs/parsers/parsers.spec.js @@ -1,11 +1,15 @@ "use strict"; -const { expect } = require("chai"); +const chai = require("chai"); +const chaiSubset = require("chai-subset"); +chai.use(chaiSubset); +const { expect } = chai; const $RefParser = require("../../.."); const helper = require("../../utils/helper"); const path = require("../../utils/path"); const parsedSchema = require("./parsed"); const dereferencedSchema = require("./dereferenced"); +const { JSONParserErrorGroup, StoplightParserError, ParserError, UnmatchedParserError } = require("../../../lib/util/errors"); describe("References to non-JSON files", () => { it("should parse successfully", async () => { @@ -64,13 +68,31 @@ describe("References to non-JSON files", () => { expect(schema).to.deep.equal(dereferencedSchema.binaryParser); }); + it("should throw an error if no no parser can be matched", async () => { + try { + await $RefParser.dereference(path.rel("specs/parsers/parsers.yaml"), { + parse: { + yaml: false, + json: false, + text: false, + binary: false, + }, + }); + } + catch (err) { + expect(err).to.be.an.instanceOf(SyntaxError); + expect(err.message).to.contain("Unable to parse "); + expect(err.message).to.contain("parsers/parsers.yaml"); + } + }); + it('should throw an error if "parse.text" and "parse.binary" are disabled', async () => { try { await $RefParser.dereference(path.rel("specs/parsers/parsers.yaml"), { parse: { text: false, binary: false }}); helper.shouldNotGetCalled(); } catch (err) { - expect(err).to.be.an.instanceOf(SyntaxError); + expect(err).to.be.an.instanceOf(StoplightParserError); expect(err.message).to.contain("Error parsing "); } }); @@ -161,4 +183,54 @@ describe("References to non-JSON files", () => { expect(schema).to.deep.equal(dereferencedSchema.defaultParsers); }); + it("should normalize errors thrown by parsers", async () => { + try { + await $RefParser.dereference(path.rel("specs/parsers/parsers.yaml"), { + parse: { + // A custom parser that always fails, + // so the built-in parsers will be used as a fallback + yaml: { + order: 1, + parse () { + throw new Error("Woops"); + } + } + } + }); + helper.shouldNotGetCalled(); + } + catch (err) { + expect(err).to.be.instanceof(ParserError); + expect(err.message).to.contain("Error parsing"); + expect(err.message).to.contain("arsers/parsers.yaml: Woops"); + } + }); + + it("should throw a grouped error if no parser can be matched and fastFail is false", async () => { + try { + const parser = new $RefParser(); + await parser.dereference(path.rel("specs/parsers/parsers.yaml"), { + parse: { + yaml: false, + json: false, + text: false, + binary: false, + }, + failFast: false, + }); + helper.shouldNotGetCalled(); + } + catch (err) { + expect(err).to.be.instanceof(JSONParserErrorGroup); + expect(err.errors.length).to.equal(1); + expect(err.errors).to.containSubset([ + { + name: UnmatchedParserError.name, + message: expectedValue => expectedValue.startsWith("Could not find parser for"), + path: [], + source: expectedValue => expectedValue.endsWith("specs/parsers/parsers.yaml") || expectedValue.startsWith("http://localhost"), + }, + ]); + } + }); }); diff --git a/test/specs/refs.spec.js b/test/specs/refs.spec.js index 6f28fc3e..65a64940 100644 --- a/test/specs/refs.spec.js +++ b/test/specs/refs.spec.js @@ -216,10 +216,7 @@ describe("$Refs object", () => { } catch (err) { expect(err).to.be.an.instanceOf(Error); - expect(err.message).to.equal( - 'Error resolving $ref pointer "definitions/name.yaml#/". ' + - '\nToken "" does not exist.' - ); + expect(err.message).to.equal('Token "" does not exist.'); } }); @@ -257,10 +254,7 @@ describe("$Refs object", () => { } catch (err) { expect(err).to.be.an.instanceOf(Error); - expect(err.message).to.equal( - 'Error resolving $ref pointer "external.yaml#/foo/bar". ' + - '\nToken "foo" does not exist.' - ); + expect(err.message).to.equal('Token "foo" does not exist.'); } }); }); diff --git a/test/specs/resolvers/resolvers.spec.js b/test/specs/resolvers/resolvers.spec.js index 736086b1..ccb22403 100644 --- a/test/specs/resolvers/resolvers.spec.js +++ b/test/specs/resolvers/resolvers.spec.js @@ -1,11 +1,15 @@ "use strict"; -const { expect } = require("chai"); +const chai = require("chai"); +const chaiSubset = require("chai-subset"); +chai.use(chaiSubset); +const { expect } = chai; const $RefParser = require("../../.."); const helper = require("../../utils/helper"); const path = require("../../utils/path"); const parsedSchema = require("./parsed"); const dereferencedSchema = require("./dereferenced"); +const { ResolverError, UnmatchedResolverError, JSONParserErrorGroup } = require("../../../lib/util/errors"); describe("options.resolve", () => { it('should not resolve external links if "resolve.external" is disabled', async () => { @@ -111,4 +115,51 @@ describe("options.resolve", () => { expect(schema).to.deep.equal(dereferencedSchema); }); + it("should normalize errors thrown by resolvers", async () => { + try { + await $RefParser.dereference({ $ref: path.abs("specs/resolvers/resolvers.yaml") }, { + resolve: { + // A custom resolver that always fails + file: { + order: 1, + canRead: true, + parse () { + throw new Error("Woops"); + } + } + } + }); + helper.shouldNotGetCalled(); + } + catch (err) { + expect(err).to.be.instanceof(ResolverError); + expect(err.message).to.contain("Error opening file"); + } + }); + + it("should throw a grouped error if no resolver can be matched and fastFail is false", async () => { + const parser = new $RefParser(); + try { + await parser.dereference(path.abs("specs/resolvers/resolvers.yaml"), { + resolve: { + file: false, + http: false, + }, + failFast: false, + }); + helper.shouldNotGetCalled(); + } + catch (err) { + expect(err).to.be.instanceof(JSONParserErrorGroup); + expect(err.errors.length).to.equal(1); + expect(err.errors).to.containSubset([ + { + name: UnmatchedResolverError.name, + message: expectedValue => expectedValue.startsWith("Could not find resolver for"), + path: [], + source: expectedValue => expectedValue.endsWith("specs/resolvers/resolvers.yaml"), + }, + ]); + } + }); }); diff --git a/test/specs/yaml.spec.js b/test/specs/yaml.spec.js index 28902dce..f9b47efe 100644 --- a/test/specs/yaml.spec.js +++ b/test/specs/yaml.spec.js @@ -18,7 +18,7 @@ describe("YAML object", () => { " type: number" ); - expect(obj).to.deep.equal({ + expect(obj).to.have.property("data").that.deep.equal({ title: "person", required: ["name", "age"], properties: { @@ -34,12 +34,12 @@ describe("YAML object", () => { it("should parse a string", async () => { let str = $RefParser.YAML.parse("hello, world"); - expect(str).to.equal("hello, world"); + expect(str).to.have.property("data", "hello, world"); }); it("should parse a number", async () => { let str = $RefParser.YAML.parse("42"); - expect(str).to.be.a("number").equal(42); + expect(str).to.have.property("data").that.is.a("number").and.equal(42); }); }); @@ -127,7 +127,7 @@ describe("YAML object", () => { it("should stringify a string", async () => { let yaml = $RefParser.YAML.stringify("hello, world"); - expect(yaml).to.equal("'hello, world'\n"); + expect(yaml).to.equal("hello, world"); }); it("should stringify a number", async () => {