mirror of
https://github.com/microsoft/TypeScript.git
synced 2026-02-23 10:29:01 -06:00
Merge pull request #8841 from Microsoft/glob2_merged
Add glob-style file include pattern support for tsconfig.json
This commit is contained in:
commit
6d99b2fd3a
@ -121,6 +121,7 @@ var languageServiceLibrarySources = [
|
||||
|
||||
var harnessCoreSources = [
|
||||
"harness.ts",
|
||||
"virtualFileSystem.ts",
|
||||
"sourceMapRecorder.ts",
|
||||
"harnessLanguageService.ts",
|
||||
"fourslash.ts",
|
||||
@ -155,7 +156,8 @@ var harnessSources = harnessCoreSources.concat([
|
||||
"commandLineParsing.ts",
|
||||
"convertCompilerOptionsFromJson.ts",
|
||||
"convertTypingOptionsFromJson.ts",
|
||||
"tsserverProjectSystem.ts"
|
||||
"tsserverProjectSystem.ts",
|
||||
"matchFiles.ts"
|
||||
].map(function (f) {
|
||||
return path.join(unittestsDirectory, f);
|
||||
})).concat([
|
||||
|
||||
@ -686,7 +686,7 @@ namespace ts {
|
||||
|
||||
// Skip over any minified JavaScript files (ending in ".min.js")
|
||||
// Skip over dotted files and folders as well
|
||||
const IgnoreFileNamePattern = /(\.min\.js$)|([\\/]\.[\w.])/;
|
||||
const ignoreFileNamePattern = /(\.min\.js$)|([\\/]\.[\w.])/;
|
||||
/**
|
||||
* Parse the contents of a config file (tsconfig.json).
|
||||
* @param json The contents of the config file to parse
|
||||
@ -702,80 +702,66 @@ namespace ts {
|
||||
|
||||
options.configFilePath = configFileName;
|
||||
|
||||
const fileNames = getFileNames(errors);
|
||||
const { fileNames, wildcardDirectories } = getFileNames(errors);
|
||||
|
||||
return {
|
||||
options,
|
||||
fileNames,
|
||||
typingOptions,
|
||||
raw: json,
|
||||
errors
|
||||
errors,
|
||||
wildcardDirectories
|
||||
};
|
||||
|
||||
function getFileNames(errors: Diagnostic[]): string[] {
|
||||
let fileNames: string[] = [];
|
||||
function getFileNames(errors: Diagnostic[]): ExpandResult {
|
||||
let fileNames: string[];
|
||||
if (hasProperty(json, "files")) {
|
||||
if (isArray(json["files"])) {
|
||||
fileNames = map(<string[]>json["files"], s => combinePaths(basePath, s));
|
||||
fileNames = <string[]>json["files"];
|
||||
}
|
||||
else {
|
||||
errors.push(createCompilerDiagnostic(Diagnostics.Compiler_option_0_requires_a_value_of_type_1, "files", "Array"));
|
||||
}
|
||||
}
|
||||
else {
|
||||
const filesSeen: Map<boolean> = {};
|
||||
|
||||
let exclude: string[] = [];
|
||||
if (isArray(json["exclude"])) {
|
||||
exclude = json["exclude"];
|
||||
let includeSpecs: string[];
|
||||
if (hasProperty(json, "include")) {
|
||||
if (isArray(json["include"])) {
|
||||
includeSpecs = <string[]>json["include"];
|
||||
}
|
||||
else {
|
||||
// by default exclude node_modules, and any specificied output directory
|
||||
exclude = ["node_modules", "bower_components", "jspm_packages"];
|
||||
}
|
||||
const outDir = json["compilerOptions"] && json["compilerOptions"]["outDir"];
|
||||
if (outDir) {
|
||||
exclude.push(outDir);
|
||||
}
|
||||
exclude = map(exclude, e => getNormalizedAbsolutePath(e, basePath));
|
||||
|
||||
const supportedExtensions = getSupportedExtensions(options);
|
||||
Debug.assert(indexOf(supportedExtensions, ".ts") < indexOf(supportedExtensions, ".d.ts"), "Changed priority of extensions to pick");
|
||||
|
||||
// Get files of supported extensions in their order of resolution
|
||||
for (const extension of supportedExtensions) {
|
||||
const filesInDirWithExtension = host.readDirectory(basePath, extension, exclude);
|
||||
for (const fileName of filesInDirWithExtension) {
|
||||
// .ts extension would read the .d.ts extension files too but since .d.ts is lower priority extension,
|
||||
// lets pick them when its turn comes up
|
||||
if (extension === ".ts" && fileExtensionIs(fileName, ".d.ts")) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (IgnoreFileNamePattern.test(fileName)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// If this is one of the output extension (which would be .d.ts and .js if we are allowing compilation of js files)
|
||||
// do not include this file if we included .ts or .tsx file with same base name as it could be output of the earlier compilation
|
||||
if (extension === ".d.ts" || (options.allowJs && contains(supportedJavascriptExtensions, extension))) {
|
||||
const baseName = fileName.substr(0, fileName.length - extension.length);
|
||||
if (hasProperty(filesSeen, baseName + ".ts") || hasProperty(filesSeen, baseName + ".tsx")) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if (!filesSeen[fileName]) {
|
||||
filesSeen[fileName] = true;
|
||||
fileNames.push(fileName);
|
||||
}
|
||||
}
|
||||
errors.push(createCompilerDiagnostic(Diagnostics.Compiler_option_0_requires_a_value_of_type_1, "include", "Array"));
|
||||
}
|
||||
}
|
||||
if (hasProperty(json, "excludes") && !hasProperty(json, "exclude")) {
|
||||
|
||||
let excludeSpecs: string[];
|
||||
if (hasProperty(json, "exclude")) {
|
||||
if (isArray(json["exclude"])) {
|
||||
excludeSpecs = <string[]>json["exclude"];
|
||||
}
|
||||
else {
|
||||
errors.push(createCompilerDiagnostic(Diagnostics.Compiler_option_0_requires_a_value_of_type_1, "exclude", "Array"));
|
||||
}
|
||||
}
|
||||
else if (hasProperty(json, "excludes")) {
|
||||
errors.push(createCompilerDiagnostic(Diagnostics.Unknown_option_excludes_Did_you_mean_exclude));
|
||||
}
|
||||
return fileNames;
|
||||
else {
|
||||
// By default, exclude common package folders
|
||||
excludeSpecs = ["node_modules", "bower_components", "jspm_packages"];
|
||||
}
|
||||
|
||||
// Always exclude the output directory unless explicitly included
|
||||
const outDir = json["compilerOptions"] && json["compilerOptions"]["outDir"];
|
||||
if (outDir) {
|
||||
excludeSpecs.push(outDir);
|
||||
}
|
||||
|
||||
if (fileNames === undefined && includeSpecs === undefined) {
|
||||
includeSpecs = ["**/*"];
|
||||
}
|
||||
|
||||
return matchFileNames(fileNames, includeSpecs, excludeSpecs, basePath, options, host, errors);
|
||||
}
|
||||
}
|
||||
|
||||
@ -871,4 +857,296 @@ namespace ts {
|
||||
function trimString(s: string) {
|
||||
return typeof s.trim === "function" ? s.trim() : s.replace(/^[\s]+|[\s]+$/g, "");
|
||||
}
|
||||
|
||||
/**
|
||||
* Tests for a path that ends in a recursive directory wildcard.
|
||||
* Matches **, \**, **\, and \**\, but not a**b.
|
||||
*
|
||||
* NOTE: used \ in place of / above to avoid issues with multiline comments.
|
||||
*
|
||||
* Breakdown:
|
||||
* (^|\/) # matches either the beginning of the string or a directory separator.
|
||||
* \*\* # matches the recursive directory wildcard "**".
|
||||
* \/?$ # matches an optional trailing directory separator at the end of the string.
|
||||
*/
|
||||
const invalidTrailingRecursionPattern = /(^|\/)\*\*\/?$/;
|
||||
|
||||
/**
|
||||
* Tests for a path with multiple recursive directory wildcards.
|
||||
* Matches **\** and **\a\**, but not **\a**b.
|
||||
*
|
||||
* NOTE: used \ in place of / above to avoid issues with multiline comments.
|
||||
*
|
||||
* Breakdown:
|
||||
* (^|\/) # matches either the beginning of the string or a directory separator.
|
||||
* \*\*\/ # matches a recursive directory wildcard "**" followed by a directory separator.
|
||||
* (.*\/)? # optionally matches any number of characters followed by a directory separator.
|
||||
* \*\* # matches a recursive directory wildcard "**"
|
||||
* ($|\/) # matches either the end of the string or a directory separator.
|
||||
*/
|
||||
const invalidMultipleRecursionPatterns = /(^|\/)\*\*\/(.*\/)?\*\*($|\/)/;
|
||||
|
||||
/**
|
||||
* Tests for a path containing a wildcard character in a directory component of the path.
|
||||
* Matches \*\, \?\, and \a*b\, but not \a\ or \a\*.
|
||||
*
|
||||
* NOTE: used \ in place of / above to avoid issues with multiline comments.
|
||||
*
|
||||
* Breakdown:
|
||||
* \/ # matches a directory separator.
|
||||
* [^/]*? # matches any number of characters excluding directory separators (non-greedy).
|
||||
* [*?] # matches either a wildcard character (* or ?)
|
||||
* [^/]* # matches any number of characters excluding directory separators (greedy).
|
||||
* \/ # matches a directory separator.
|
||||
*/
|
||||
const watchRecursivePattern = /\/[^/]*?[*?][^/]*\//;
|
||||
|
||||
/**
|
||||
* Matches the portion of a wildcard path that does not contain wildcards.
|
||||
* Matches \a of \a\*, or \a\b\c of \a\b\c\?\d.
|
||||
*
|
||||
* NOTE: used \ in place of / above to avoid issues with multiline comments.
|
||||
*
|
||||
* Breakdown:
|
||||
* ^ # matches the beginning of the string
|
||||
* [^*?]* # matches any number of non-wildcard characters
|
||||
* (?=\/[^/]*[*?]) # lookahead that matches a directory separator followed by
|
||||
* # a path component that contains at least one wildcard character (* or ?).
|
||||
*/
|
||||
const wildcardDirectoryPattern = /^[^*?]*(?=\/[^/]*[*?])/;
|
||||
|
||||
/**
|
||||
* Expands an array of file specifications.
|
||||
*
|
||||
* @param fileNames The literal file names to include.
|
||||
* @param include The wildcard file specifications to include.
|
||||
* @param exclude The wildcard file specifications to exclude.
|
||||
* @param basePath The base path for any relative file specifications.
|
||||
* @param options Compiler options.
|
||||
* @param host The host used to resolve files and directories.
|
||||
* @param errors An array for diagnostic reporting.
|
||||
*/
|
||||
function matchFileNames(fileNames: string[], include: string[], exclude: string[], basePath: string, options: CompilerOptions, host: ParseConfigHost, errors: Diagnostic[]): ExpandResult {
|
||||
basePath = normalizePath(basePath);
|
||||
|
||||
// The exclude spec list is converted into a regular expression, which allows us to quickly
|
||||
// test whether a file or directory should be excluded before recursively traversing the
|
||||
// file system.
|
||||
const keyMapper = host.useCaseSensitiveFileNames ? caseSensitiveKeyMapper : caseInsensitiveKeyMapper;
|
||||
|
||||
// Literal file names (provided via the "files" array in tsconfig.json) are stored in a
|
||||
// file map with a possibly case insensitive key. We use this map later when when including
|
||||
// wildcard paths.
|
||||
const literalFileMap: Map<string> = {};
|
||||
|
||||
// Wildcard paths (provided via the "includes" array in tsconfig.json) are stored in a
|
||||
// file map with a possibly case insensitive key. We use this map to store paths matched
|
||||
// via wildcard, and to handle extension priority.
|
||||
const wildcardFileMap: Map<string> = {};
|
||||
|
||||
if (include) {
|
||||
include = validateSpecs(include, errors, /*allowTrailingRecursion*/ false);
|
||||
}
|
||||
|
||||
if (exclude) {
|
||||
exclude = validateSpecs(exclude, errors, /*allowTrailingRecursion*/ true);
|
||||
}
|
||||
|
||||
// Wildcard directories (provided as part of a wildcard path) are stored in a
|
||||
// file map that marks whether it was a regular wildcard match (with a `*` or `?` token),
|
||||
// or a recursive directory. This information is used by filesystem watchers to monitor for
|
||||
// new entries in these paths.
|
||||
const wildcardDirectories: Map<WatchDirectoryFlags> = getWildcardDirectories(include, exclude, basePath, host.useCaseSensitiveFileNames);
|
||||
|
||||
// Rather than requery this for each file and filespec, we query the supported extensions
|
||||
// once and store it on the expansion context.
|
||||
const supportedExtensions = getSupportedExtensions(options);
|
||||
|
||||
// Literal files are always included verbatim. An "include" or "exclude" specification cannot
|
||||
// remove a literal file.
|
||||
if (fileNames) {
|
||||
for (const fileName of fileNames) {
|
||||
const file = combinePaths(basePath, fileName);
|
||||
literalFileMap[keyMapper(file)] = file;
|
||||
}
|
||||
}
|
||||
|
||||
if (include && include.length > 0) {
|
||||
for (const file of host.readDirectory(basePath, supportedExtensions, exclude, include)) {
|
||||
// If we have already included a literal or wildcard path with a
|
||||
// higher priority extension, we should skip this file.
|
||||
//
|
||||
// This handles cases where we may encounter both <file>.ts and
|
||||
// <file>.d.ts (or <file>.js if "allowJs" is enabled) in the same
|
||||
// directory when they are compilation outputs.
|
||||
if (hasFileWithHigherPriorityExtension(file, literalFileMap, wildcardFileMap, supportedExtensions, keyMapper)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (ignoreFileNamePattern.test(file)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// We may have included a wildcard path with a lower priority
|
||||
// extension due to the user-defined order of entries in the
|
||||
// "include" array. If there is a lower priority extension in the
|
||||
// same directory, we should remove it.
|
||||
removeWildcardFilesWithLowerPriorityExtension(file, wildcardFileMap, supportedExtensions, keyMapper);
|
||||
|
||||
const key = keyMapper(file);
|
||||
if (!hasProperty(literalFileMap, key) && !hasProperty(wildcardFileMap, key)) {
|
||||
wildcardFileMap[key] = file;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const literalFiles = reduceProperties(literalFileMap, addFileToOutput, []);
|
||||
const wildcardFiles = reduceProperties(wildcardFileMap, addFileToOutput, []);
|
||||
wildcardFiles.sort(host.useCaseSensitiveFileNames ? compareStrings : compareStringsCaseInsensitive);
|
||||
return {
|
||||
fileNames: literalFiles.concat(wildcardFiles),
|
||||
wildcardDirectories
|
||||
};
|
||||
}
|
||||
|
||||
function validateSpecs(specs: string[], errors: Diagnostic[], allowTrailingRecursion: boolean) {
|
||||
const validSpecs: string[] = [];
|
||||
for (const spec of specs) {
|
||||
if (!allowTrailingRecursion && invalidTrailingRecursionPattern.test(spec)) {
|
||||
errors.push(createCompilerDiagnostic(Diagnostics.File_specification_cannot_end_in_a_recursive_directory_wildcard_Asterisk_Asterisk_Colon_0, spec));
|
||||
}
|
||||
else if (invalidMultipleRecursionPatterns.test(spec)) {
|
||||
errors.push(createCompilerDiagnostic(Diagnostics.File_specification_cannot_contain_multiple_recursive_directory_wildcards_Asterisk_Asterisk_Colon_0, spec));
|
||||
}
|
||||
else {
|
||||
validSpecs.push(spec);
|
||||
}
|
||||
}
|
||||
|
||||
return validSpecs;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets directories in a set of include patterns that should be watched for changes.
|
||||
*/
|
||||
function getWildcardDirectories(include: string[], exclude: string[], path: string, useCaseSensitiveFileNames: boolean) {
|
||||
// We watch a directory recursively if it contains a wildcard anywhere in a directory segment
|
||||
// of the pattern:
|
||||
//
|
||||
// /a/b/**/d - Watch /a/b recursively to catch changes to any d in any subfolder recursively
|
||||
// /a/b/*/d - Watch /a/b recursively to catch any d in any immediate subfolder, even if a new subfolder is added
|
||||
//
|
||||
// We watch a directory without recursion if it contains a wildcard in the file segment of
|
||||
// the pattern:
|
||||
//
|
||||
// /a/b/* - Watch /a/b directly to catch any new file
|
||||
// /a/b/a?z - Watch /a/b directly to catch any new file matching a?z
|
||||
const rawExcludeRegex = getRegularExpressionForWildcard(exclude, path, "exclude");
|
||||
const excludeRegex = rawExcludeRegex && new RegExp(rawExcludeRegex, useCaseSensitiveFileNames ? "" : "i");
|
||||
const wildcardDirectories: Map<WatchDirectoryFlags> = {};
|
||||
if (include !== undefined) {
|
||||
const recursiveKeys: string[] = [];
|
||||
for (const file of include) {
|
||||
const name = combinePaths(path, file);
|
||||
if (excludeRegex && excludeRegex.test(name)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const match = wildcardDirectoryPattern.exec(name);
|
||||
if (match) {
|
||||
const key = useCaseSensitiveFileNames ? match[0] : match[0].toLowerCase();
|
||||
const flags = watchRecursivePattern.test(name) ? WatchDirectoryFlags.Recursive : WatchDirectoryFlags.None;
|
||||
const existingFlags = getProperty(wildcardDirectories, key);
|
||||
if (existingFlags === undefined || existingFlags < flags) {
|
||||
wildcardDirectories[key] = flags;
|
||||
if (flags === WatchDirectoryFlags.Recursive) {
|
||||
recursiveKeys.push(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Remove any subpaths under an existing recursively watched directory.
|
||||
for (const key in wildcardDirectories) {
|
||||
if (hasProperty(wildcardDirectories, key)) {
|
||||
for (const recursiveKey of recursiveKeys) {
|
||||
if (key !== recursiveKey && containsPath(recursiveKey, key, path, !useCaseSensitiveFileNames)) {
|
||||
delete wildcardDirectories[key];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return wildcardDirectories;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines whether a literal or wildcard file has already been included that has a higher
|
||||
* extension priority.
|
||||
*
|
||||
* @param file The path to the file.
|
||||
* @param extensionPriority The priority of the extension.
|
||||
* @param context The expansion context.
|
||||
*/
|
||||
function hasFileWithHigherPriorityExtension(file: string, literalFiles: Map<string>, wildcardFiles: Map<string>, extensions: string[], keyMapper: (value: string) => string) {
|
||||
const extensionPriority = getExtensionPriority(file, extensions);
|
||||
const adjustedExtensionPriority = adjustExtensionPriority(extensionPriority);
|
||||
for (let i = ExtensionPriority.Highest; i < adjustedExtensionPriority; i++) {
|
||||
const higherPriorityExtension = extensions[i];
|
||||
const higherPriorityPath = keyMapper(changeExtension(file, higherPriorityExtension));
|
||||
if (hasProperty(literalFiles, higherPriorityPath) || hasProperty(wildcardFiles, higherPriorityPath)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes files included via wildcard expansion with a lower extension priority that have
|
||||
* already been included.
|
||||
*
|
||||
* @param file The path to the file.
|
||||
* @param extensionPriority The priority of the extension.
|
||||
* @param context The expansion context.
|
||||
*/
|
||||
function removeWildcardFilesWithLowerPriorityExtension(file: string, wildcardFiles: Map<string>, extensions: string[], keyMapper: (value: string) => string) {
|
||||
const extensionPriority = getExtensionPriority(file, extensions);
|
||||
const nextExtensionPriority = getNextLowestExtensionPriority(extensionPriority);
|
||||
for (let i = nextExtensionPriority; i < extensions.length; i++) {
|
||||
const lowerPriorityExtension = extensions[i];
|
||||
const lowerPriorityPath = keyMapper(changeExtension(file, lowerPriorityExtension));
|
||||
delete wildcardFiles[lowerPriorityPath];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a file to an array of files.
|
||||
*
|
||||
* @param output The output array.
|
||||
* @param file The file path.
|
||||
*/
|
||||
function addFileToOutput(output: string[], file: string) {
|
||||
output.push(file);
|
||||
return output;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a case sensitive key.
|
||||
*
|
||||
* @param key The original key.
|
||||
*/
|
||||
function caseSensitiveKeyMapper(key: string) {
|
||||
return key;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a case insensitive key.
|
||||
*
|
||||
* @param key The original key.
|
||||
*/
|
||||
function caseInsensitiveKeyMapper(key: string) {
|
||||
return key.toLowerCase();
|
||||
}
|
||||
}
|
||||
|
||||
@ -25,7 +25,7 @@ namespace ts {
|
||||
contains,
|
||||
remove,
|
||||
forEachValue: forEachValueInMap,
|
||||
clear
|
||||
clear,
|
||||
};
|
||||
|
||||
function forEachValueInMap(f: (key: Path, value: T) => void) {
|
||||
@ -113,6 +113,15 @@ namespace ts {
|
||||
return -1;
|
||||
}
|
||||
|
||||
export function indexOfAnyCharCode(text: string, charCodes: number[], start?: number): number {
|
||||
for (let i = start || 0, len = text.length; i < len; i++) {
|
||||
if (contains(charCodes, text.charCodeAt(i))) {
|
||||
return i;
|
||||
}
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
export function countWhere<T>(array: T[], predicate: (x: T) => boolean): number {
|
||||
let count = 0;
|
||||
if (array) {
|
||||
@ -534,6 +543,28 @@ namespace ts {
|
||||
return a < b ? Comparison.LessThan : Comparison.GreaterThan;
|
||||
}
|
||||
|
||||
export function compareStrings(a: string, b: string, ignoreCase?: boolean): Comparison {
|
||||
if (a === b) return Comparison.EqualTo;
|
||||
if (a === undefined) return Comparison.LessThan;
|
||||
if (b === undefined) return Comparison.GreaterThan;
|
||||
if (ignoreCase) {
|
||||
if (String.prototype.localeCompare) {
|
||||
const result = a.localeCompare(b, /*locales*/ undefined, { usage: "sort", sensitivity: "accent" });
|
||||
return result < 0 ? Comparison.LessThan : result > 0 ? Comparison.GreaterThan : Comparison.EqualTo;
|
||||
}
|
||||
|
||||
a = a.toUpperCase();
|
||||
b = b.toUpperCase();
|
||||
if (a === b) return Comparison.EqualTo;
|
||||
}
|
||||
|
||||
return a < b ? Comparison.LessThan : Comparison.GreaterThan;
|
||||
}
|
||||
|
||||
export function compareStringsCaseInsensitive(a: string, b: string) {
|
||||
return compareStrings(a, b, /*ignoreCase*/ true);
|
||||
}
|
||||
|
||||
function getDiagnosticFileName(diagnostic: Diagnostic): string {
|
||||
return diagnostic.file ? diagnostic.file.fileName : undefined;
|
||||
}
|
||||
@ -803,12 +834,275 @@ namespace ts {
|
||||
return path1 + directorySeparator + path2;
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes a trailing directory separator from a path.
|
||||
* @param path The path.
|
||||
*/
|
||||
export function removeTrailingDirectorySeparator(path: string) {
|
||||
if (path.charAt(path.length - 1) === directorySeparator) {
|
||||
return path.substr(0, path.length - 1);
|
||||
}
|
||||
|
||||
return path;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a trailing directory separator to a path, if it does not already have one.
|
||||
* @param path The path.
|
||||
*/
|
||||
export function ensureTrailingDirectorySeparator(path: string) {
|
||||
if (path.charAt(path.length - 1) !== directorySeparator) {
|
||||
return path + directorySeparator;
|
||||
}
|
||||
|
||||
return path;
|
||||
}
|
||||
|
||||
export function comparePaths(a: string, b: string, currentDirectory: string, ignoreCase?: boolean) {
|
||||
if (a === b) return Comparison.EqualTo;
|
||||
if (a === undefined) return Comparison.LessThan;
|
||||
if (b === undefined) return Comparison.GreaterThan;
|
||||
a = removeTrailingDirectorySeparator(a);
|
||||
b = removeTrailingDirectorySeparator(b);
|
||||
const aComponents = getNormalizedPathComponents(a, currentDirectory);
|
||||
const bComponents = getNormalizedPathComponents(b, currentDirectory);
|
||||
const sharedLength = Math.min(aComponents.length, bComponents.length);
|
||||
for (let i = 0; i < sharedLength; i++) {
|
||||
const result = compareStrings(aComponents[i], bComponents[i], ignoreCase);
|
||||
if (result !== Comparison.EqualTo) {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
return compareValues(aComponents.length, bComponents.length);
|
||||
}
|
||||
|
||||
export function containsPath(parent: string, child: string, currentDirectory: string, ignoreCase?: boolean) {
|
||||
if (parent === undefined || child === undefined) return false;
|
||||
if (parent === child) return true;
|
||||
parent = removeTrailingDirectorySeparator(parent);
|
||||
child = removeTrailingDirectorySeparator(child);
|
||||
if (parent === child) return true;
|
||||
const parentComponents = getNormalizedPathComponents(parent, currentDirectory);
|
||||
const childComponents = getNormalizedPathComponents(child, currentDirectory);
|
||||
if (childComponents.length < parentComponents.length) {
|
||||
return false;
|
||||
}
|
||||
|
||||
for (let i = 0; i < parentComponents.length; i++) {
|
||||
const result = compareStrings(parentComponents[i], childComponents[i], ignoreCase);
|
||||
if (result !== Comparison.EqualTo) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
export function fileExtensionIs(path: string, extension: string): boolean {
|
||||
const pathLen = path.length;
|
||||
const extLen = extension.length;
|
||||
return pathLen > extLen && path.substr(pathLen - extLen, extLen) === extension;
|
||||
}
|
||||
|
||||
export function fileExtensionIsAny(path: string, extensions: string[]): boolean {
|
||||
for (const extension of extensions) {
|
||||
if (fileExtensionIs(path, extension)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
// Reserved characters, forces escaping of any non-word (or digit), non-whitespace character.
|
||||
// It may be inefficient (we could just match (/[-[\]{}()*+?.,\\^$|#\s]/g), but this is future
|
||||
// proof.
|
||||
const reservedCharacterPattern = /[^\w\s\/]/g;
|
||||
const wildcardCharCodes = [CharacterCodes.asterisk, CharacterCodes.question];
|
||||
|
||||
export function getRegularExpressionForWildcard(specs: string[], basePath: string, usage: "files" | "directories" | "exclude") {
|
||||
if (specs === undefined || specs.length === 0) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
let pattern = "";
|
||||
let hasWrittenSubpattern = false;
|
||||
spec: for (const spec of specs) {
|
||||
if (!spec) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let subpattern = "";
|
||||
let hasRecursiveDirectoryWildcard = false;
|
||||
let hasWrittenComponent = false;
|
||||
const components = getNormalizedPathComponents(spec, basePath);
|
||||
if (usage !== "exclude" && components[components.length - 1] === "**") {
|
||||
continue spec;
|
||||
}
|
||||
|
||||
// getNormalizedPathComponents includes the separator for the root component.
|
||||
// We need to remove to create our regex correctly.
|
||||
components[0] = removeTrailingDirectorySeparator(components[0]);
|
||||
|
||||
let optionalCount = 0;
|
||||
for (const component of components) {
|
||||
if (component === "**") {
|
||||
if (hasRecursiveDirectoryWildcard) {
|
||||
continue spec;
|
||||
}
|
||||
|
||||
subpattern += "(/.+?)?";
|
||||
hasRecursiveDirectoryWildcard = true;
|
||||
hasWrittenComponent = true;
|
||||
}
|
||||
else {
|
||||
if (usage === "directories") {
|
||||
subpattern += "(";
|
||||
optionalCount++;
|
||||
}
|
||||
|
||||
if (hasWrittenComponent) {
|
||||
subpattern += directorySeparator;
|
||||
}
|
||||
|
||||
subpattern += component.replace(reservedCharacterPattern, replaceWildcardCharacter);
|
||||
hasWrittenComponent = true;
|
||||
}
|
||||
}
|
||||
|
||||
while (optionalCount > 0) {
|
||||
subpattern += ")?";
|
||||
optionalCount--;
|
||||
}
|
||||
|
||||
if (hasWrittenSubpattern) {
|
||||
pattern += "|";
|
||||
}
|
||||
|
||||
pattern += "(" + subpattern + ")";
|
||||
hasWrittenSubpattern = true;
|
||||
}
|
||||
|
||||
if (!pattern) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return "^(" + pattern + (usage === "exclude" ? ")($|/)" : ")$");
|
||||
}
|
||||
|
||||
function replaceWildcardCharacter(match: string) {
|
||||
return match === "*" ? "[^/]*" : match === "?" ? "[^/]" : "\\" + match;
|
||||
}
|
||||
|
||||
export interface FileSystemEntries {
|
||||
files: string[];
|
||||
directories: string[];
|
||||
}
|
||||
|
||||
interface FileMatcherPatterns {
|
||||
includeFilePattern: string;
|
||||
includeDirectoryPattern: string;
|
||||
excludePattern: string;
|
||||
basePaths: string[];
|
||||
}
|
||||
|
||||
export function getFileMatcherPatterns(path: string, extensions: string[], excludes: string[], includes: string[], useCaseSensitiveFileNames: boolean, currentDirectory: string): FileMatcherPatterns {
|
||||
path = normalizePath(path);
|
||||
currentDirectory = normalizePath(currentDirectory);
|
||||
const absolutePath = combinePaths(currentDirectory, path);
|
||||
|
||||
return {
|
||||
includeFilePattern: getRegularExpressionForWildcard(includes, absolutePath, "files"),
|
||||
includeDirectoryPattern: getRegularExpressionForWildcard(includes, absolutePath, "directories"),
|
||||
excludePattern: getRegularExpressionForWildcard(excludes, absolutePath, "exclude"),
|
||||
basePaths: getBasePaths(path, includes, useCaseSensitiveFileNames)
|
||||
};
|
||||
}
|
||||
|
||||
export function matchFiles(path: string, extensions: string[], excludes: string[], includes: string[], useCaseSensitiveFileNames: boolean, currentDirectory: string, getFileSystemEntries: (path: string) => FileSystemEntries): string[] {
|
||||
path = normalizePath(path);
|
||||
currentDirectory = normalizePath(currentDirectory);
|
||||
|
||||
const patterns = getFileMatcherPatterns(path, extensions, excludes, includes, useCaseSensitiveFileNames, currentDirectory);
|
||||
|
||||
const regexFlag = useCaseSensitiveFileNames ? "" : "i";
|
||||
const includeFileRegex = patterns.includeFilePattern && new RegExp(patterns.includeFilePattern, regexFlag);
|
||||
const includeDirectoryRegex = patterns.includeDirectoryPattern && new RegExp(patterns.includeDirectoryPattern, regexFlag);
|
||||
const excludeRegex = patterns.excludePattern && new RegExp(patterns.excludePattern, regexFlag);
|
||||
|
||||
const result: string[] = [];
|
||||
for (const basePath of patterns.basePaths) {
|
||||
visitDirectory(basePath, combinePaths(currentDirectory, basePath));
|
||||
}
|
||||
return result;
|
||||
|
||||
function visitDirectory(path: string, absolutePath: string) {
|
||||
const { files, directories } = getFileSystemEntries(path);
|
||||
|
||||
for (const current of files) {
|
||||
const name = combinePaths(path, current);
|
||||
const absoluteName = combinePaths(absolutePath, current);
|
||||
if ((!extensions || fileExtensionIsAny(name, extensions)) &&
|
||||
(!includeFileRegex || includeFileRegex.test(absoluteName)) &&
|
||||
(!excludeRegex || !excludeRegex.test(absoluteName))) {
|
||||
result.push(name);
|
||||
}
|
||||
}
|
||||
|
||||
for (const current of directories) {
|
||||
const name = combinePaths(path, current);
|
||||
const absoluteName = combinePaths(absolutePath, current);
|
||||
if ((!includeDirectoryRegex || includeDirectoryRegex.test(absoluteName)) &&
|
||||
(!excludeRegex || !excludeRegex.test(absoluteName))) {
|
||||
visitDirectory(name, absoluteName);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes the unique non-wildcard base paths amongst the provided include patterns.
|
||||
*/
|
||||
function getBasePaths(path: string, includes: string[], useCaseSensitiveFileNames: boolean) {
|
||||
// Storage for our results in the form of literal paths (e.g. the paths as written by the user).
|
||||
const basePaths: string[] = [path];
|
||||
if (includes) {
|
||||
// Storage for literal base paths amongst the include patterns.
|
||||
const includeBasePaths: string[] = [];
|
||||
for (const include of includes) {
|
||||
if (isRootedDiskPath(include)) {
|
||||
const wildcardOffset = indexOfAnyCharCode(include, wildcardCharCodes);
|
||||
const includeBasePath = wildcardOffset < 0
|
||||
? removeTrailingDirectorySeparator(getDirectoryPath(include))
|
||||
: include.substring(0, include.lastIndexOf(directorySeparator, wildcardOffset));
|
||||
|
||||
// Append the literal and canonical candidate base paths.
|
||||
includeBasePaths.push(includeBasePath);
|
||||
}
|
||||
}
|
||||
|
||||
// Sort the offsets array using either the literal or canonical path representations.
|
||||
includeBasePaths.sort(useCaseSensitiveFileNames ? compareStrings : compareStringsCaseInsensitive);
|
||||
|
||||
// Iterate over each include base path and include unique base paths that are not a
|
||||
// subpath of an existing base path
|
||||
include: for (let i = 0; i < includeBasePaths.length; i++) {
|
||||
const includeBasePath = includeBasePaths[i];
|
||||
for (let j = 0; j < basePaths.length; j++) {
|
||||
if (containsPath(basePaths[j], includeBasePath, path, !useCaseSensitiveFileNames)) {
|
||||
continue include;
|
||||
}
|
||||
}
|
||||
|
||||
basePaths.push(includeBasePath);
|
||||
}
|
||||
}
|
||||
|
||||
return basePaths;
|
||||
}
|
||||
|
||||
export function ensureScriptKind(fileName: string, scriptKind?: ScriptKind): ScriptKind {
|
||||
// Using scriptKind as a condition handles both:
|
||||
// - 'scriptKind' is unspecified and thus it is `undefined`
|
||||
@ -857,6 +1151,59 @@ namespace ts {
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extension boundaries by priority. Lower numbers indicate higher priorities, and are
|
||||
* aligned to the offset of the highest priority extension in the
|
||||
* allSupportedExtensions array.
|
||||
*/
|
||||
export const enum ExtensionPriority {
|
||||
TypeScriptFiles = 0,
|
||||
DeclarationAndJavaScriptFiles = 2,
|
||||
Limit = 5,
|
||||
|
||||
Highest = TypeScriptFiles,
|
||||
Lowest = DeclarationAndJavaScriptFiles,
|
||||
}
|
||||
|
||||
export function getExtensionPriority(path: string, supportedExtensions: string[]): ExtensionPriority {
|
||||
for (let i = supportedExtensions.length - 1; i >= 0; i--) {
|
||||
if (fileExtensionIs(path, supportedExtensions[i])) {
|
||||
return adjustExtensionPriority(<ExtensionPriority>i);
|
||||
}
|
||||
}
|
||||
|
||||
// If its not in the list of supported extensions, this is likely a
|
||||
// TypeScript file with a non-ts extension
|
||||
return ExtensionPriority.Highest;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adjusts an extension priority to be the highest priority within the same range.
|
||||
*/
|
||||
export function adjustExtensionPriority(extensionPriority: ExtensionPriority): ExtensionPriority {
|
||||
if (extensionPriority < ExtensionPriority.DeclarationAndJavaScriptFiles) {
|
||||
return ExtensionPriority.TypeScriptFiles;
|
||||
}
|
||||
else if (extensionPriority < ExtensionPriority.Limit) {
|
||||
return ExtensionPriority.DeclarationAndJavaScriptFiles;
|
||||
}
|
||||
else {
|
||||
return ExtensionPriority.Limit;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the next lowest extension priority for a given priority.
|
||||
*/
|
||||
export function getNextLowestExtensionPriority(extensionPriority: ExtensionPriority): ExtensionPriority {
|
||||
if (extensionPriority < ExtensionPriority.DeclarationAndJavaScriptFiles) {
|
||||
return ExtensionPriority.DeclarationAndJavaScriptFiles;
|
||||
}
|
||||
else {
|
||||
return ExtensionPriority.Limit;
|
||||
}
|
||||
}
|
||||
|
||||
const extensionsToRemove = [".d.ts", ".ts", ".js", ".tsx", ".jsx"];
|
||||
export function removeFileExtension(path: string): string {
|
||||
for (const ext of extensionsToRemove) {
|
||||
@ -876,6 +1223,10 @@ namespace ts {
|
||||
return ext === ".jsx" || ext === ".tsx";
|
||||
}
|
||||
|
||||
export function changeExtension<T extends string | Path>(path: T, newExtension: string): T {
|
||||
return <T>(removeFileExtension(path) + newExtension);
|
||||
}
|
||||
|
||||
export interface ObjectAllocator {
|
||||
getNodeConstructor(): new (kind: SyntaxKind, pos?: number, end?: number) => Node;
|
||||
getSourceFileConstructor(): new (kind: SyntaxKind, pos?: number, end?: number) => SourceFile;
|
||||
|
||||
@ -2232,6 +2232,14 @@
|
||||
"category": "Error",
|
||||
"code": 5009
|
||||
},
|
||||
"File specification cannot end in a recursive directory wildcard ('**'): '{0}'.": {
|
||||
"category": "Error",
|
||||
"code": 5010
|
||||
},
|
||||
"File specification cannot contain multiple recursive directory wildcards ('**'): '{0}'.": {
|
||||
"category": "Error",
|
||||
"code": 5011
|
||||
},
|
||||
"Cannot read file '{0}': {1}": {
|
||||
"category": "Error",
|
||||
"code": 5012
|
||||
|
||||
@ -26,7 +26,7 @@ namespace ts {
|
||||
getExecutingFilePath(): string;
|
||||
getCurrentDirectory(): string;
|
||||
getDirectories(path: string): string[];
|
||||
readDirectory(path: string, extension?: string, exclude?: string[]): string[];
|
||||
readDirectory(path: string, extensions?: string[], exclude?: string[], include?: string[]): string[];
|
||||
getModifiedTime?(path: string): Date;
|
||||
createHash?(data: string): string;
|
||||
getMemoryUsage?(): number;
|
||||
@ -74,7 +74,7 @@ namespace ts {
|
||||
readFile(path: string): string;
|
||||
writeFile(path: string, contents: string): void;
|
||||
getDirectories(path: string): string[];
|
||||
readDirectory(path: string, extension?: string, exclude?: string[]): string[];
|
||||
readDirectory(path: string, extensions?: string[], basePaths?: string[], excludeEx?: string, includeFileEx?: string, includeDirEx?: string): string[];
|
||||
watchFile?(path: string, callback: FileWatcherCallback): FileWatcher;
|
||||
watchDirectory?(path: string, callback: DirectoryWatcherCallback, recursive?: boolean): FileWatcher;
|
||||
realpath(path: string): string;
|
||||
@ -85,6 +85,7 @@ namespace ts {
|
||||
function getWScriptSystem(): System {
|
||||
|
||||
const fso = new ActiveXObject("Scripting.FileSystemObject");
|
||||
const shell = new ActiveXObject("WScript.Shell");
|
||||
|
||||
const fileStream = new ActiveXObject("ADODB.Stream");
|
||||
fileStream.Type = 2 /*text*/;
|
||||
@ -152,10 +153,6 @@ namespace ts {
|
||||
}
|
||||
}
|
||||
|
||||
function getCanonicalPath(path: string): string {
|
||||
return path.toLowerCase();
|
||||
}
|
||||
|
||||
function getNames(collection: any): string[] {
|
||||
const result: string[] = [];
|
||||
for (let e = new Enumerator(collection); !e.atEnd(); e.moveNext()) {
|
||||
@ -169,28 +166,20 @@ namespace ts {
|
||||
return getNames(folder.subfolders);
|
||||
}
|
||||
|
||||
function readDirectory(path: string, extension?: string, exclude?: string[]): string[] {
|
||||
const result: string[] = [];
|
||||
exclude = map(exclude, s => getCanonicalPath(combinePaths(path, s)));
|
||||
visitDirectory(path);
|
||||
return result;
|
||||
function visitDirectory(path: string) {
|
||||
function getAccessibleFileSystemEntries(path: string): FileSystemEntries {
|
||||
try {
|
||||
const folder = fso.GetFolder(path || ".");
|
||||
const files = getNames(folder.files);
|
||||
for (const current of files) {
|
||||
const name = combinePaths(path, current);
|
||||
if ((!extension || fileExtensionIs(name, extension)) && !contains(exclude, getCanonicalPath(name))) {
|
||||
result.push(name);
|
||||
}
|
||||
}
|
||||
const subfolders = getNames(folder.subfolders);
|
||||
for (const current of subfolders) {
|
||||
const name = combinePaths(path, current);
|
||||
if (!contains(exclude, getCanonicalPath(name))) {
|
||||
visitDirectory(name);
|
||||
}
|
||||
}
|
||||
const directories = getNames(folder.subfolders);
|
||||
return { files, directories };
|
||||
}
|
||||
catch (e) {
|
||||
return { files: [], directories: [] };
|
||||
}
|
||||
}
|
||||
|
||||
function readDirectory(path: string, extensions?: string[], excludes?: string[], includes?: string[]): string[] {
|
||||
return matchFiles(path, extensions, excludes, includes, /*useCaseSensitiveFileNames*/ false, shell.CurrentDirectory, getAccessibleFileSystemEntries);
|
||||
}
|
||||
|
||||
return {
|
||||
@ -220,7 +209,7 @@ namespace ts {
|
||||
return WScript.ScriptFullName;
|
||||
},
|
||||
getCurrentDirectory() {
|
||||
return new ActiveXObject("WScript.Shell").CurrentDirectory;
|
||||
return shell.CurrentDirectory;
|
||||
},
|
||||
getDirectories,
|
||||
readDirectory,
|
||||
@ -381,8 +370,43 @@ namespace ts {
|
||||
}
|
||||
}
|
||||
|
||||
function getCanonicalPath(path: string): string {
|
||||
return useCaseSensitiveFileNames ? path : path.toLowerCase();
|
||||
function getAccessibleFileSystemEntries(path: string): FileSystemEntries {
|
||||
try {
|
||||
const entries = _fs.readdirSync(path || ".").sort();
|
||||
const files: string[] = [];
|
||||
const directories: string[] = [];
|
||||
for (const entry of entries) {
|
||||
// This is necessary because on some file system node fails to exclude
|
||||
// "." and "..". See https://github.com/nodejs/node/issues/4002
|
||||
if (entry === "." || entry === "..") {
|
||||
continue;
|
||||
}
|
||||
const name = combinePaths(path, entry);
|
||||
|
||||
let stat: any;
|
||||
try {
|
||||
stat = _fs.statSync(name);
|
||||
}
|
||||
catch (e) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (stat.isFile()) {
|
||||
files.push(entry);
|
||||
}
|
||||
else if (stat.isDirectory()) {
|
||||
directories.push(entry);
|
||||
}
|
||||
}
|
||||
return { files, directories };
|
||||
}
|
||||
catch (e) {
|
||||
return { files: [], directories: [] };
|
||||
}
|
||||
}
|
||||
|
||||
function readDirectory(path: string, extensions?: string[], excludes?: string[], includes?: string[]): string[] {
|
||||
return matchFiles(path, extensions, excludes, includes, useCaseSensitiveFileNames, process.cwd(), getAccessibleFileSystemEntries);
|
||||
}
|
||||
|
||||
const enum FileSystemEntryKind {
|
||||
@ -415,39 +439,6 @@ namespace ts {
|
||||
return filter<string>(_fs.readdirSync(path), p => fileSystemEntryExists(combinePaths(path, p), FileSystemEntryKind.Directory));
|
||||
}
|
||||
|
||||
function readDirectory(path: string, extension?: string, exclude?: string[]): string[] {
|
||||
const result: string[] = [];
|
||||
exclude = map(exclude, s => getCanonicalPath(combinePaths(path, s)));
|
||||
visitDirectory(path);
|
||||
return result;
|
||||
function visitDirectory(path: string) {
|
||||
const files = _fs.readdirSync(path || ".").sort();
|
||||
const directories: string[] = [];
|
||||
for (const current of files) {
|
||||
// This is necessary because on some file system node fails to exclude
|
||||
// "." and "..". See https://github.com/nodejs/node/issues/4002
|
||||
if (current === "." || current === "..") {
|
||||
continue;
|
||||
}
|
||||
const name = combinePaths(path, current);
|
||||
if (!contains(exclude, getCanonicalPath(name))) {
|
||||
const stat = _fs.statSync(name);
|
||||
if (stat.isFile()) {
|
||||
if (!extension || fileExtensionIs(name, extension)) {
|
||||
result.push(name);
|
||||
}
|
||||
}
|
||||
else if (stat.isDirectory()) {
|
||||
directories.push(name);
|
||||
}
|
||||
}
|
||||
}
|
||||
for (const current of directories) {
|
||||
visitDirectory(current);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
args: process.argv.slice(2),
|
||||
newLine: _os.EOL,
|
||||
@ -586,7 +577,10 @@ namespace ts {
|
||||
getExecutingFilePath: () => ChakraHost.executingFile,
|
||||
getCurrentDirectory: () => ChakraHost.currentDirectory,
|
||||
getDirectories: ChakraHost.getDirectories,
|
||||
readDirectory: ChakraHost.readDirectory,
|
||||
readDirectory: (path: string, extensions?: string[], excludes?: string[], includes?: string[]) => {
|
||||
const pattern = getFileMatcherPatterns(path, extensions, excludes, includes, !!ChakraHost.useCaseSensitiveFileNames, ChakraHost.currentDirectory);
|
||||
return ChakraHost.readDirectory(path, extensions, pattern.basePaths, pattern.excludePattern, pattern.includeFilePattern, pattern.includeDirectoryPattern);
|
||||
},
|
||||
exit: ChakraHost.quit,
|
||||
realpath
|
||||
};
|
||||
|
||||
@ -1677,7 +1677,15 @@ namespace ts {
|
||||
}
|
||||
|
||||
export interface ParseConfigHost {
|
||||
readDirectory(rootDir: string, extension: string, exclude: string[]): string[];
|
||||
useCaseSensitiveFileNames: boolean;
|
||||
|
||||
readDirectory(rootDir: string, extensions: string[], excludes: string[], includes: string[]): string[];
|
||||
|
||||
/**
|
||||
* Gets a value indicating whether the specified path exists and is a file.
|
||||
* @param path The path to test.
|
||||
*/
|
||||
fileExists(path: string): boolean;
|
||||
}
|
||||
|
||||
export interface WriteFileCallback {
|
||||
@ -2660,6 +2668,17 @@ namespace ts {
|
||||
fileNames: string[];
|
||||
raw?: any;
|
||||
errors: Diagnostic[];
|
||||
wildcardDirectories?: Map<WatchDirectoryFlags>;
|
||||
}
|
||||
|
||||
export const enum WatchDirectoryFlags {
|
||||
None = 0,
|
||||
Recursive = 1 << 0,
|
||||
}
|
||||
|
||||
export interface ExpandResult {
|
||||
fileNames: string[];
|
||||
wildcardDirectories: Map<WatchDirectoryFlags>;
|
||||
}
|
||||
|
||||
/* @internal */
|
||||
|
||||
5
src/harness/external/chai.d.ts
vendored
5
src/harness/external/chai.d.ts
vendored
@ -167,8 +167,13 @@ declare module chai {
|
||||
module assert {
|
||||
function equal(actual: any, expected: any, message?: string): void;
|
||||
function notEqual(actual: any, expected: any, message?: string): void;
|
||||
function deepEqual<T>(actual: T, expected: T, message?: string): void;
|
||||
function notDeepEqual<T>(actual: T, expected: T, message?: string): void;
|
||||
function lengthOf(object: any[], length: number, message?: string): void;
|
||||
function isTrue(value: any, message?: string): void;
|
||||
function isFalse(value: any, message?: string): void;
|
||||
function isOk(actual: any, message?: string): void;
|
||||
function isUndefined(value: any, message?: string): void;
|
||||
function isDefined(value: any, message?: string): void;
|
||||
}
|
||||
}
|
||||
@ -23,6 +23,7 @@
|
||||
/// <reference path="external\chai.d.ts"/>
|
||||
/// <reference path="sourceMapRecorder.ts"/>
|
||||
/// <reference path="runnerbase.ts"/>
|
||||
/// <reference path="virtualFileSystem.ts" />
|
||||
|
||||
// Block scoped definitions work poorly for global variables, temporarily enable var
|
||||
/* tslint:disable:no-var-keyword */
|
||||
@ -443,7 +444,7 @@ namespace Harness {
|
||||
args(): string[];
|
||||
getExecutingFilePath(): string;
|
||||
exit(exitCode?: number): void;
|
||||
readDirectory(path: string, extension?: string, exclude?: string[]): string[];
|
||||
readDirectory(path: string, extension?: string[], exclude?: string[], include?: string[]): string[];
|
||||
}
|
||||
export var IO: IO;
|
||||
|
||||
@ -482,7 +483,7 @@ namespace Harness {
|
||||
export const directoryExists: typeof IO.directoryExists = fso.FolderExists;
|
||||
export const fileExists: typeof IO.fileExists = fso.FileExists;
|
||||
export const log: typeof IO.log = global.WScript && global.WScript.StdOut.WriteLine;
|
||||
export const readDirectory: typeof IO.readDirectory = (path, extension, exclude) => ts.sys.readDirectory(path, extension, exclude);
|
||||
export const readDirectory: typeof IO.readDirectory = (path, extension, exclude, include) => ts.sys.readDirectory(path, extension, exclude, include);
|
||||
|
||||
export function createDirectory(path: string) {
|
||||
if (directoryExists(path)) {
|
||||
@ -552,7 +553,7 @@ namespace Harness {
|
||||
export const fileExists: typeof IO.fileExists = fs.existsSync;
|
||||
export const log: typeof IO.log = s => console.log(s);
|
||||
|
||||
export const readDirectory: typeof IO.readDirectory = (path, extension, exclude) => ts.sys.readDirectory(path, extension, exclude);
|
||||
export const readDirectory: typeof IO.readDirectory = (path, extension, exclude, include) => ts.sys.readDirectory(path, extension, exclude, include);
|
||||
|
||||
export function createDirectory(path: string) {
|
||||
if (!directoryExists(path)) {
|
||||
@ -740,8 +741,22 @@ namespace Harness {
|
||||
Http.writeToServerSync(serverRoot + path, "WRITE", contents);
|
||||
}
|
||||
|
||||
export function readDirectory(path: string, extension?: string, exclude?: string[]) {
|
||||
return listFiles(path).filter(f => !extension || ts.fileExtensionIs(f, extension));
|
||||
export function readDirectory(path: string, extension?: string[], exclude?: string[], include?: string[]) {
|
||||
const fs = new Utils.VirtualFileSystem(path, useCaseSensitiveFileNames());
|
||||
for (const file in listFiles(path)) {
|
||||
fs.addFile(file);
|
||||
}
|
||||
return ts.matchFiles(path, extension, exclude, include, useCaseSensitiveFileNames(), getCurrentDirectory(), path => {
|
||||
const entry = fs.traversePath(path);
|
||||
if (entry && entry.isDirectory()) {
|
||||
const directory = <Utils.VirtualDirectory>entry;
|
||||
return {
|
||||
files: ts.map(directory.getFiles(), f => f.name),
|
||||
directories: ts.map(directory.getDirectories(), d => d.name)
|
||||
};
|
||||
}
|
||||
return { files: [], directories: [] };
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1531,7 +1546,9 @@ namespace Harness {
|
||||
|
||||
// unit tests always list files explicitly
|
||||
const parseConfigHost: ts.ParseConfigHost = {
|
||||
readDirectory: (name) => []
|
||||
useCaseSensitiveFileNames: false,
|
||||
readDirectory: (name) => [],
|
||||
fileExists: (name) => true
|
||||
};
|
||||
|
||||
// check if project has tsconfig.json in the list of files
|
||||
|
||||
@ -288,6 +288,12 @@ namespace Harness.LanguageService {
|
||||
readDirectory(rootDir: string, extension: string): string {
|
||||
throw new Error("NYI");
|
||||
}
|
||||
readDirectoryNames(path: string): string {
|
||||
throw new Error("Not implemented.");
|
||||
}
|
||||
readFileNames(path: string): string {
|
||||
throw new Error("Not implemented.");
|
||||
}
|
||||
fileExists(fileName: string) { return this.getScriptInfo(fileName) !== undefined; }
|
||||
readFile(fileName: string) {
|
||||
const snapshot = this.nativeHost.getScriptSnapshot(fileName);
|
||||
@ -611,7 +617,7 @@ namespace Harness.LanguageService {
|
||||
return [];
|
||||
}
|
||||
|
||||
readDirectory(path: string, extension?: string): string[] {
|
||||
readDirectory(path: string, extension?: string[], exclude?: string[], include?: string[]): string[] {
|
||||
throw new Error("Not implemented Yet.");
|
||||
}
|
||||
|
||||
@ -695,4 +701,3 @@ namespace Harness.LanguageService {
|
||||
getPreProcessedFileInfo(fileName: string, fileContents: string): ts.PreProcessedFileInfo { throw new Error("getPreProcessedFileInfo is not available using the server interface."); }
|
||||
}
|
||||
}
|
||||
|
||||
@ -61,8 +61,9 @@ interface IOLog {
|
||||
}[];
|
||||
directoriesRead: {
|
||||
path: string,
|
||||
extension: string,
|
||||
extension: string[],
|
||||
exclude: string[],
|
||||
include: string[],
|
||||
result: string[]
|
||||
}[];
|
||||
}
|
||||
@ -217,9 +218,9 @@ namespace Playback {
|
||||
memoize(path => findResultByPath(wrapper, replayLog.filesRead, path).contents));
|
||||
|
||||
wrapper.readDirectory = recordReplay(wrapper.readDirectory, underlying)(
|
||||
(path, extension, exclude) => {
|
||||
const result = (<ts.System>underlying).readDirectory(path, extension, exclude);
|
||||
const logEntry = { path, extension, exclude, result };
|
||||
(path, extension, exclude, include) => {
|
||||
const result = (<ts.System>underlying).readDirectory(path, extension, exclude, include);
|
||||
const logEntry = { path, extension, exclude, include, result };
|
||||
recordLog.directoriesRead.push(logEntry);
|
||||
return result;
|
||||
},
|
||||
|
||||
@ -218,7 +218,12 @@ class ProjectRunner extends RunnerBase {
|
||||
}
|
||||
|
||||
const configObject = result.config;
|
||||
const configParseResult = ts.parseJsonConfigFileContent(configObject, { readDirectory }, ts.getDirectoryPath(configFileName), compilerOptions);
|
||||
const configParseHost: ts.ParseConfigHost = {
|
||||
useCaseSensitiveFileNames: Harness.IO.useCaseSensitiveFileNames(),
|
||||
fileExists,
|
||||
readDirectory,
|
||||
};
|
||||
const configParseResult = ts.parseJsonConfigFileContent(configObject, configParseHost, ts.getDirectoryPath(configFileName), compilerOptions);
|
||||
if (configParseResult.errors.length > 0) {
|
||||
return {
|
||||
moduleKind,
|
||||
@ -276,8 +281,8 @@ class ProjectRunner extends RunnerBase {
|
||||
: ts.normalizeSlashes(testCase.projectRoot) + "/" + ts.normalizeSlashes(fileName);
|
||||
}
|
||||
|
||||
function readDirectory(rootDir: string, extension: string, exclude: string[]): string[] {
|
||||
const harnessReadDirectoryResult = Harness.IO.readDirectory(getFileNameInTheProjectTest(rootDir), extension, exclude);
|
||||
function readDirectory(rootDir: string, extension: string[], exclude: string[], include: string[]): string[] {
|
||||
const harnessReadDirectoryResult = Harness.IO.readDirectory(getFileNameInTheProjectTest(rootDir), extension, exclude, include);
|
||||
const result: string[] = [];
|
||||
for (let i = 0; i < harnessReadDirectoryResult.length; i++) {
|
||||
result[i] = ts.getRelativePathToDirectoryOrUrl(testCase.projectRoot, harnessReadDirectoryResult[i],
|
||||
|
||||
@ -75,7 +75,12 @@ namespace RWC {
|
||||
if (tsconfigFile) {
|
||||
const tsconfigFileContents = getHarnessCompilerInputUnit(tsconfigFile.path);
|
||||
const parsedTsconfigFileContents = ts.parseConfigFileTextToJson(tsconfigFile.path, tsconfigFileContents.content);
|
||||
const configParseResult = ts.parseJsonConfigFileContent(parsedTsconfigFileContents.config, Harness.IO, ts.getDirectoryPath(tsconfigFile.path));
|
||||
const configParseHost: ts.ParseConfigHost = {
|
||||
useCaseSensitiveFileNames: Harness.IO.useCaseSensitiveFileNames(),
|
||||
fileExists: Harness.IO.fileExists,
|
||||
readDirectory: Harness.IO.readDirectory,
|
||||
};
|
||||
const configParseResult = ts.parseJsonConfigFileContent(parsedTsconfigFileContents.config, configParseHost, ts.getDirectoryPath(tsconfigFile.path));
|
||||
fileNames = configParseResult.fileNames;
|
||||
opts.options = ts.extend(opts.options, configParseResult.options);
|
||||
}
|
||||
|
||||
186
src/harness/virtualFileSystem.ts
Normal file
186
src/harness/virtualFileSystem.ts
Normal file
@ -0,0 +1,186 @@
|
||||
/// <reference path="harness.ts" />
|
||||
/// <reference path="..\compiler\commandLineParser.ts"/>
|
||||
namespace Utils {
|
||||
export class VirtualFileSystemEntry {
|
||||
fileSystem: VirtualFileSystem;
|
||||
name: string;
|
||||
|
||||
constructor(fileSystem: VirtualFileSystem, name: string) {
|
||||
this.fileSystem = fileSystem;
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
isDirectory() { return false; }
|
||||
isFile() { return false; }
|
||||
isFileSystem() { return false; }
|
||||
}
|
||||
|
||||
export class VirtualFile extends VirtualFileSystemEntry {
|
||||
content: string;
|
||||
isFile() { return true; }
|
||||
}
|
||||
|
||||
export abstract class VirtualFileSystemContainer extends VirtualFileSystemEntry {
|
||||
abstract getFileSystemEntries(): VirtualFileSystemEntry[];
|
||||
|
||||
getFileSystemEntry(name: string): VirtualFileSystemEntry {
|
||||
for (const entry of this.getFileSystemEntries()) {
|
||||
if (this.fileSystem.sameName(entry.name, name)) {
|
||||
return entry;
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
getDirectories(): VirtualDirectory[] {
|
||||
return <VirtualDirectory[]>ts.filter(this.getFileSystemEntries(), entry => entry.isDirectory());
|
||||
}
|
||||
|
||||
getFiles(): VirtualFile[] {
|
||||
return <VirtualFile[]>ts.filter(this.getFileSystemEntries(), entry => entry.isFile());
|
||||
}
|
||||
|
||||
getDirectory(name: string): VirtualDirectory {
|
||||
const entry = this.getFileSystemEntry(name);
|
||||
return entry.isDirectory() ? <VirtualDirectory>entry : undefined;
|
||||
}
|
||||
|
||||
getFile(name: string): VirtualFile {
|
||||
const entry = this.getFileSystemEntry(name);
|
||||
return entry.isFile() ? <VirtualFile>entry : undefined;
|
||||
}
|
||||
}
|
||||
|
||||
export class VirtualDirectory extends VirtualFileSystemContainer {
|
||||
private entries: VirtualFileSystemEntry[] = [];
|
||||
|
||||
isDirectory() { return true; }
|
||||
|
||||
getFileSystemEntries() { return this.entries.slice(); }
|
||||
|
||||
addDirectory(name: string): VirtualDirectory {
|
||||
const entry = this.getFileSystemEntry(name);
|
||||
if (entry === undefined) {
|
||||
const directory = new VirtualDirectory(this.fileSystem, name);
|
||||
this.entries.push(directory);
|
||||
return directory;
|
||||
}
|
||||
else if (entry.isDirectory()) {
|
||||
return <VirtualDirectory>entry;
|
||||
}
|
||||
else {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
addFile(name: string, content?: string): VirtualFile {
|
||||
const entry = this.getFileSystemEntry(name);
|
||||
if (entry === undefined) {
|
||||
const file = new VirtualFile(this.fileSystem, name);
|
||||
file.content = content;
|
||||
this.entries.push(file);
|
||||
return file;
|
||||
}
|
||||
else if (entry.isFile()) {
|
||||
const file = <VirtualFile>entry;
|
||||
file.content = content;
|
||||
return file;
|
||||
}
|
||||
else {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export class VirtualFileSystem extends VirtualFileSystemContainer {
|
||||
private root: VirtualDirectory;
|
||||
|
||||
currentDirectory: string;
|
||||
useCaseSensitiveFileNames: boolean;
|
||||
|
||||
constructor(currentDirectory: string, useCaseSensitiveFileNames: boolean) {
|
||||
super(undefined, "");
|
||||
this.fileSystem = this;
|
||||
this.root = new VirtualDirectory(this, "");
|
||||
this.currentDirectory = currentDirectory;
|
||||
this.useCaseSensitiveFileNames = useCaseSensitiveFileNames;
|
||||
}
|
||||
|
||||
isFileSystem() { return true; }
|
||||
|
||||
getFileSystemEntries() { return this.root.getFileSystemEntries(); }
|
||||
|
||||
addDirectory(path: string) {
|
||||
const components = ts.getNormalizedPathComponents(path, this.currentDirectory);
|
||||
let directory: VirtualDirectory = this.root;
|
||||
for (const component of components) {
|
||||
directory = directory.addDirectory(component);
|
||||
if (directory === undefined) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return directory;
|
||||
}
|
||||
|
||||
addFile(path: string, content?: string) {
|
||||
const absolutePath = ts.getNormalizedAbsolutePath(path, this.currentDirectory);
|
||||
const fileName = ts.getBaseFileName(path);
|
||||
const directoryPath = ts.getDirectoryPath(absolutePath);
|
||||
const directory = this.addDirectory(directoryPath);
|
||||
return directory ? directory.addFile(fileName, content) : undefined;
|
||||
}
|
||||
|
||||
fileExists(path: string) {
|
||||
const entry = this.traversePath(path);
|
||||
return entry !== undefined && entry.isFile();
|
||||
}
|
||||
|
||||
sameName(a: string, b: string) {
|
||||
return this.useCaseSensitiveFileNames ? a === b : a.toLowerCase() === b.toLowerCase();
|
||||
}
|
||||
|
||||
traversePath(path: string) {
|
||||
let directory: VirtualDirectory = this.root;
|
||||
for (const component of ts.getNormalizedPathComponents(path, this.currentDirectory)) {
|
||||
const entry = directory.getFileSystemEntry(component);
|
||||
if (entry === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
else if (entry.isDirectory()) {
|
||||
directory = <VirtualDirectory>entry;
|
||||
}
|
||||
else {
|
||||
return entry;
|
||||
}
|
||||
}
|
||||
|
||||
return directory;
|
||||
}
|
||||
}
|
||||
|
||||
export class MockParseConfigHost extends VirtualFileSystem implements ts.ParseConfigHost {
|
||||
constructor(currentDirectory: string, ignoreCase: boolean, files: string[]) {
|
||||
super(currentDirectory, ignoreCase);
|
||||
for (const file of files) {
|
||||
this.addFile(file);
|
||||
}
|
||||
}
|
||||
|
||||
readDirectory(path: string, extensions: string[], excludes: string[], includes: string[]) {
|
||||
return ts.matchFiles(path, extensions, excludes, includes, this.useCaseSensitiveFileNames, this.currentDirectory, (path: string) => this.getAccessibleFileSystemEntries(path));
|
||||
}
|
||||
|
||||
getAccessibleFileSystemEntries(path: string) {
|
||||
const entry = this.traversePath(path);
|
||||
if (entry && entry.isDirectory()) {
|
||||
const directory = <VirtualDirectory>entry;
|
||||
return {
|
||||
files: ts.map(directory.getFiles(), f => f.name),
|
||||
directories: ts.map(directory.getDirectories(), d => d.name)
|
||||
};
|
||||
}
|
||||
return { files: [], directories: [] };
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -375,6 +375,7 @@ namespace ts.server {
|
||||
export interface ProjectOptions {
|
||||
// these fields can be present in the project file
|
||||
files?: string[];
|
||||
wildcardDirectories?: ts.Map<ts.WatchDirectoryFlags>;
|
||||
compilerOptions?: ts.CompilerOptions;
|
||||
}
|
||||
|
||||
@ -383,6 +384,7 @@ namespace ts.server {
|
||||
projectFilename: string;
|
||||
projectFileWatcher: FileWatcher;
|
||||
directoryWatcher: FileWatcher;
|
||||
directoriesWatchedForWildcards: Map<FileWatcher>;
|
||||
// Used to keep track of what directories are watched for this project
|
||||
directoriesWatchedForTsconfig: string[] = [];
|
||||
program: ts.Program;
|
||||
@ -852,6 +854,8 @@ namespace ts.server {
|
||||
if (project.isConfiguredProject()) {
|
||||
project.projectFileWatcher.close();
|
||||
project.directoryWatcher.close();
|
||||
forEachValue(project.directoriesWatchedForWildcards, watcher => { watcher.close(); });
|
||||
delete project.directoriesWatchedForWildcards;
|
||||
this.configuredProjects = copyListRemovingItem(project, this.configuredProjects);
|
||||
}
|
||||
else {
|
||||
@ -1344,7 +1348,8 @@ namespace ts.server {
|
||||
else {
|
||||
const projectOptions: ProjectOptions = {
|
||||
files: parsedCommandLine.fileNames,
|
||||
compilerOptions: parsedCommandLine.options
|
||||
wildcardDirectories: parsedCommandLine.wildcardDirectories,
|
||||
compilerOptions: parsedCommandLine.options,
|
||||
};
|
||||
return { succeeded: true, projectOptions };
|
||||
}
|
||||
@ -1401,12 +1406,30 @@ namespace ts.server {
|
||||
}
|
||||
project.finishGraph();
|
||||
project.projectFileWatcher = this.host.watchFile(configFilename, _ => this.watchedProjectConfigFileChanged(project));
|
||||
this.log("Add recursive watcher for: " + ts.getDirectoryPath(configFilename));
|
||||
|
||||
const configDirectoryPath = ts.getDirectoryPath(configFilename);
|
||||
|
||||
this.log("Add recursive watcher for: " + configDirectoryPath);
|
||||
project.directoryWatcher = this.host.watchDirectory(
|
||||
ts.getDirectoryPath(configFilename),
|
||||
configDirectoryPath,
|
||||
path => this.directoryWatchedForSourceFilesChanged(project, path),
|
||||
/*recursive*/ true
|
||||
);
|
||||
|
||||
project.directoriesWatchedForWildcards = reduceProperties(projectOptions.wildcardDirectories, (watchers, flag, directory) => {
|
||||
if (comparePaths(configDirectoryPath, directory, ".", !this.host.useCaseSensitiveFileNames) !== Comparison.EqualTo) {
|
||||
const recursive = (flag & WatchDirectoryFlags.Recursive) !== 0;
|
||||
this.log(`Add ${ recursive ? "recursive " : ""}watcher for: ${directory}`);
|
||||
watchers[directory] = this.host.watchDirectory(
|
||||
directory,
|
||||
path => this.directoryWatchedForSourceFilesChanged(project, path),
|
||||
recursive
|
||||
);
|
||||
}
|
||||
|
||||
return watchers;
|
||||
}, <Map<FileWatcher>>{});
|
||||
|
||||
return { success: true, project: project, errors };
|
||||
}
|
||||
}
|
||||
|
||||
@ -10,7 +10,7 @@ namespace ts.JsTyping {
|
||||
directoryExists: (path: string) => boolean;
|
||||
fileExists: (fileName: string) => boolean;
|
||||
readFile: (path: string, encoding?: string) => string;
|
||||
readDirectory: (path: string, extension?: string, exclude?: string[], depth?: number) => string[];
|
||||
readDirectory: (rootDir: string, extensions: string[], excludes: string[], includes: string[], depth?: number) => string[];
|
||||
};
|
||||
|
||||
interface PackageJson {
|
||||
@ -187,7 +187,7 @@ namespace ts.JsTyping {
|
||||
}
|
||||
|
||||
const typingNames: string[] = [];
|
||||
const fileNames = host.readDirectory(nodeModulesPath, "*.json", /*exclude*/ undefined, /*depth*/ 2);
|
||||
const fileNames = host.readDirectory(nodeModulesPath, ["*.json"], /*excludes*/ undefined, /*includes*/ undefined, /*depth*/ 2);
|
||||
for (const fileName of fileNames) {
|
||||
const normalizedFileName = normalizePath(fileName);
|
||||
if (getBaseFileName(normalizedFileName) !== "package.json") {
|
||||
|
||||
@ -80,8 +80,9 @@ namespace ts {
|
||||
* @param exclude A JSON encoded string[] containing the paths to exclude
|
||||
* when enumerating the directory.
|
||||
*/
|
||||
readDirectory(rootDir: string, extension: string, exclude?: string, depth?: number): string;
|
||||
|
||||
readDirectory(rootDir: string, extension: string, basePaths?: string, excludeEx?: string, includeFileEx?: string, includeDirEx?: string, depth?: number): string;
|
||||
useCaseSensitiveFileNames?(): boolean;
|
||||
getCurrentDirectory(): string;
|
||||
trace(s: string): void;
|
||||
}
|
||||
|
||||
@ -437,8 +438,10 @@ namespace ts {
|
||||
|
||||
public directoryExists: (directoryName: string) => boolean;
|
||||
public realpath: (path: string) => string;
|
||||
public useCaseSensitiveFileNames: boolean;
|
||||
|
||||
constructor(private shimHost: CoreServicesShimHost) {
|
||||
this.useCaseSensitiveFileNames = this.shimHost.useCaseSensitiveFileNames ? this.shimHost.useCaseSensitiveFileNames() : false;
|
||||
if ("directoryExists" in this.shimHost) {
|
||||
this.directoryExists = directoryName => this.shimHost.directoryExists(directoryName);
|
||||
}
|
||||
@ -447,17 +450,34 @@ namespace ts {
|
||||
}
|
||||
}
|
||||
|
||||
public readDirectory(rootDir: string, extension: string, exclude: string[], depth?: number): string[] {
|
||||
public readDirectory(rootDir: string, extensions: string[], exclude: string[], include: string[], depth?: number): string[] {
|
||||
// Wrap the API changes for 2.0 release. This try/catch
|
||||
// should be removed once TypeScript 2.0 has shipped.
|
||||
let encoded: string;
|
||||
try {
|
||||
encoded = this.shimHost.readDirectory(rootDir, extension, JSON.stringify(exclude), depth);
|
||||
const pattern = getFileMatcherPatterns(rootDir, extensions, exclude, include,
|
||||
this.shimHost.useCaseSensitiveFileNames(), this.shimHost.getCurrentDirectory());
|
||||
return JSON.parse(this.shimHost.readDirectory(
|
||||
rootDir,
|
||||
JSON.stringify(extensions),
|
||||
JSON.stringify(pattern.basePaths),
|
||||
pattern.excludePattern,
|
||||
pattern.includeFilePattern,
|
||||
pattern.includeDirectoryPattern,
|
||||
depth
|
||||
));
|
||||
}
|
||||
catch (e) {
|
||||
encoded = this.shimHost.readDirectory(rootDir, extension, JSON.stringify(exclude));
|
||||
const results: string[] = [];
|
||||
for (const extension of extensions) {
|
||||
for (const file of this.readDirectoryFallback(rootDir, extension, exclude))
|
||||
{
|
||||
if (!contains(results, file)) {
|
||||
results.push(file);
|
||||
}
|
||||
}
|
||||
}
|
||||
return results;
|
||||
}
|
||||
return JSON.parse(encoded);
|
||||
}
|
||||
|
||||
public fileExists(fileName: string): boolean {
|
||||
@ -467,6 +487,10 @@ namespace ts {
|
||||
public readFile(fileName: string): string {
|
||||
return this.shimHost.readFile(fileName);
|
||||
}
|
||||
|
||||
private readDirectoryFallback(rootDir: string, extension: string, exclude: string[]) {
|
||||
return JSON.parse(this.shimHost.readDirectory(rootDir, extension, JSON.stringify(exclude)));
|
||||
}
|
||||
}
|
||||
|
||||
function simpleForwardCall(logger: Logger, actionDescription: string, action: () => any, logPerformance: boolean): any {
|
||||
|
||||
@ -47,7 +47,7 @@ namespace ts {
|
||||
return "";
|
||||
},
|
||||
getDirectories: (path: string) => [],
|
||||
readDirectory: (path: string, extension?: string, exclude?: string[]): string[] => {
|
||||
readDirectory: (path: string, extension?: string[], exclude?: string[], include?: string[]): string[] => {
|
||||
throw new Error("NYI");
|
||||
},
|
||||
exit: (exitCode?: number) => {
|
||||
@ -145,6 +145,7 @@ namespace ts {
|
||||
catch (e) {
|
||||
assert.isTrue(e.message.indexOf(`Could not find file: '${imported.name}'.`) === 0);
|
||||
}
|
||||
|
||||
assert.isTrue(fileExistsIsCalled);
|
||||
}
|
||||
{
|
||||
@ -220,4 +221,4 @@ namespace ts {
|
||||
assert.isTrue(diags.length === 0);
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
956
tests/cases/unittests/matchFiles.ts
Normal file
956
tests/cases/unittests/matchFiles.ts
Normal file
@ -0,0 +1,956 @@
|
||||
/// <reference path="..\..\..\src\harness\external\mocha.d.ts" />
|
||||
/// <reference path="..\..\..\src\harness\harness.ts" />
|
||||
/// <reference path="..\..\..\src\harness\virtualFileSystem.ts" />
|
||||
|
||||
namespace ts {
|
||||
const caseInsensitiveBasePath = "c:/dev/";
|
||||
const caseInsensitiveHost = new Utils.MockParseConfigHost(caseInsensitiveBasePath, /*useCaseSensitiveFileNames*/ false, [
|
||||
"c:/dev/a.ts",
|
||||
"c:/dev/a.d.ts",
|
||||
"c:/dev/a.js",
|
||||
"c:/dev/b.ts",
|
||||
"c:/dev/b.js",
|
||||
"c:/dev/c.d.ts",
|
||||
"c:/dev/z/a.ts",
|
||||
"c:/dev/z/abz.ts",
|
||||
"c:/dev/z/aba.ts",
|
||||
"c:/dev/z/b.ts",
|
||||
"c:/dev/z/bbz.ts",
|
||||
"c:/dev/z/bba.ts",
|
||||
"c:/dev/x/a.ts",
|
||||
"c:/dev/x/aa.ts",
|
||||
"c:/dev/x/b.ts",
|
||||
"c:/dev/x/y/a.ts",
|
||||
"c:/dev/x/y/b.ts",
|
||||
"c:/dev/js/a.js",
|
||||
"c:/dev/js/b.js",
|
||||
"c:/ext/ext.ts"
|
||||
]);
|
||||
|
||||
const caseSensitiveBasePath = "/dev/";
|
||||
const caseSensitiveHost = new Utils.MockParseConfigHost(caseSensitiveBasePath, /*useCaseSensitiveFileNames*/ true, [
|
||||
"/dev/a.ts",
|
||||
"/dev/a.d.ts",
|
||||
"/dev/a.js",
|
||||
"/dev/b.ts",
|
||||
"/dev/b.js",
|
||||
"/dev/A.ts",
|
||||
"/dev/B.ts",
|
||||
"/dev/c.d.ts",
|
||||
"/dev/z/a.ts",
|
||||
"/dev/z/abz.ts",
|
||||
"/dev/z/aba.ts",
|
||||
"/dev/z/b.ts",
|
||||
"/dev/z/bbz.ts",
|
||||
"/dev/z/bba.ts",
|
||||
"/dev/x/a.ts",
|
||||
"/dev/x/b.ts",
|
||||
"/dev/x/y/a.ts",
|
||||
"/dev/x/y/b.ts",
|
||||
"/dev/js/a.js",
|
||||
"/dev/js/b.js",
|
||||
]);
|
||||
|
||||
const caseInsensitiveMixedExtensionHost = new Utils.MockParseConfigHost(caseInsensitiveBasePath, /*useCaseSensitiveFileNames*/ false, [
|
||||
"c:/dev/a.ts",
|
||||
"c:/dev/a.d.ts",
|
||||
"c:/dev/a.js",
|
||||
"c:/dev/b.tsx",
|
||||
"c:/dev/b.d.ts",
|
||||
"c:/dev/b.jsx",
|
||||
"c:/dev/c.tsx",
|
||||
"c:/dev/c.js",
|
||||
"c:/dev/d.js",
|
||||
"c:/dev/e.jsx",
|
||||
"c:/dev/f.other"
|
||||
]);
|
||||
|
||||
const caseInsensitiveCommonFoldersHost = new Utils.MockParseConfigHost(caseInsensitiveBasePath, /*useCaseSensitiveFileNames*/ false, [
|
||||
"c:/dev/a.ts",
|
||||
"c:/dev/a.d.ts",
|
||||
"c:/dev/a.js",
|
||||
"c:/dev/b.ts",
|
||||
"c:/dev/node_modules/a.ts",
|
||||
"c:/dev/bower_components/a.ts",
|
||||
"c:/dev/jspm_packages/a.ts"
|
||||
]);
|
||||
|
||||
describe("matchFiles", () => {
|
||||
describe("with literal file list", () => {
|
||||
it("without exclusions", () => {
|
||||
const json = {
|
||||
files: [
|
||||
"a.ts",
|
||||
"b.ts"
|
||||
]
|
||||
};
|
||||
const expected: ts.ParsedCommandLine = {
|
||||
options: {},
|
||||
errors: [],
|
||||
fileNames: [
|
||||
"c:/dev/a.ts",
|
||||
"c:/dev/b.ts"
|
||||
],
|
||||
wildcardDirectories: {},
|
||||
};
|
||||
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath);
|
||||
assert.deepEqual(actual.fileNames, expected.fileNames);
|
||||
assert.deepEqual(actual.wildcardDirectories, expected.wildcardDirectories);
|
||||
assert.deepEqual(actual.errors, expected.errors);
|
||||
});
|
||||
it("missing files are still present", () => {
|
||||
const json = {
|
||||
files: [
|
||||
"z.ts",
|
||||
"x.ts"
|
||||
]
|
||||
};
|
||||
const expected: ts.ParsedCommandLine = {
|
||||
options: {},
|
||||
errors: [],
|
||||
fileNames: [
|
||||
"c:/dev/z.ts",
|
||||
"c:/dev/x.ts"
|
||||
],
|
||||
wildcardDirectories: {},
|
||||
};
|
||||
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath);
|
||||
assert.deepEqual(actual.fileNames, expected.fileNames);
|
||||
assert.deepEqual(actual.wildcardDirectories, expected.wildcardDirectories);
|
||||
assert.deepEqual(actual.errors, expected.errors);
|
||||
});
|
||||
it("are not removed due to excludes", () => {
|
||||
const json = {
|
||||
files: [
|
||||
"a.ts",
|
||||
"b.ts"
|
||||
],
|
||||
exclude: [
|
||||
"b.ts"
|
||||
]
|
||||
};
|
||||
const expected: ts.ParsedCommandLine = {
|
||||
options: {},
|
||||
errors: [],
|
||||
fileNames: [
|
||||
"c:/dev/a.ts",
|
||||
"c:/dev/b.ts"
|
||||
],
|
||||
wildcardDirectories: {},
|
||||
};
|
||||
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath);
|
||||
assert.deepEqual(actual.fileNames, expected.fileNames);
|
||||
assert.deepEqual(actual.wildcardDirectories, expected.wildcardDirectories);
|
||||
assert.deepEqual(actual.errors, expected.errors);
|
||||
});
|
||||
});
|
||||
|
||||
describe("with literal include list", () => {
|
||||
it("without exclusions", () => {
|
||||
const json = {
|
||||
include: [
|
||||
"a.ts",
|
||||
"b.ts"
|
||||
]
|
||||
};
|
||||
const expected: ts.ParsedCommandLine = {
|
||||
options: {},
|
||||
errors: [],
|
||||
fileNames: [
|
||||
"c:/dev/a.ts",
|
||||
"c:/dev/b.ts"
|
||||
],
|
||||
wildcardDirectories: {},
|
||||
};
|
||||
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath);
|
||||
assert.deepEqual(actual.fileNames, expected.fileNames);
|
||||
assert.deepEqual(actual.wildcardDirectories, expected.wildcardDirectories);
|
||||
assert.deepEqual(actual.errors, expected.errors);
|
||||
});
|
||||
it("with non .ts file extensions are excluded", () => {
|
||||
const json = {
|
||||
include: [
|
||||
"a.js",
|
||||
"b.js"
|
||||
]
|
||||
};
|
||||
const expected: ts.ParsedCommandLine = {
|
||||
options: {},
|
||||
errors: [],
|
||||
fileNames: [],
|
||||
wildcardDirectories: {},
|
||||
};
|
||||
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath);
|
||||
assert.deepEqual(actual.fileNames, expected.fileNames);
|
||||
assert.deepEqual(actual.wildcardDirectories, expected.wildcardDirectories);
|
||||
assert.deepEqual(actual.errors, expected.errors);
|
||||
});
|
||||
it("with missing files are excluded", () => {
|
||||
const json = {
|
||||
include: [
|
||||
"z.ts",
|
||||
"x.ts"
|
||||
]
|
||||
};
|
||||
const expected: ts.ParsedCommandLine = {
|
||||
options: {},
|
||||
errors: [],
|
||||
fileNames: [],
|
||||
wildcardDirectories: {},
|
||||
};
|
||||
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath);
|
||||
assert.deepEqual(actual.fileNames, expected.fileNames);
|
||||
assert.deepEqual(actual.wildcardDirectories, expected.wildcardDirectories);
|
||||
assert.deepEqual(actual.errors, expected.errors);
|
||||
});
|
||||
it("with literal excludes", () => {
|
||||
const json = {
|
||||
include: [
|
||||
"a.ts",
|
||||
"b.ts"
|
||||
],
|
||||
exclude: [
|
||||
"b.ts"
|
||||
]
|
||||
};
|
||||
const expected: ts.ParsedCommandLine = {
|
||||
options: {},
|
||||
errors: [],
|
||||
fileNames: [
|
||||
"c:/dev/a.ts"
|
||||
],
|
||||
wildcardDirectories: {},
|
||||
};
|
||||
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath);
|
||||
assert.deepEqual(actual.fileNames, expected.fileNames);
|
||||
assert.deepEqual(actual.wildcardDirectories, expected.wildcardDirectories);
|
||||
assert.deepEqual(actual.errors, expected.errors);
|
||||
});
|
||||
it("with wildcard excludes", () => {
|
||||
const json = {
|
||||
include: [
|
||||
"a.ts",
|
||||
"b.ts",
|
||||
"z/a.ts",
|
||||
"z/abz.ts",
|
||||
"z/aba.ts",
|
||||
"x/b.ts"
|
||||
],
|
||||
exclude: [
|
||||
"*.ts",
|
||||
"z/??z.ts",
|
||||
"*/b.ts"
|
||||
]
|
||||
};
|
||||
const expected: ts.ParsedCommandLine = {
|
||||
options: {},
|
||||
errors: [],
|
||||
fileNames: [
|
||||
"c:/dev/z/a.ts",
|
||||
"c:/dev/z/aba.ts"
|
||||
],
|
||||
wildcardDirectories: {},
|
||||
};
|
||||
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath);
|
||||
assert.deepEqual(actual.fileNames, expected.fileNames);
|
||||
assert.deepEqual(actual.wildcardDirectories, expected.wildcardDirectories);
|
||||
assert.deepEqual(actual.errors, expected.errors);
|
||||
});
|
||||
it("with recursive excludes", () => {
|
||||
const json = {
|
||||
include: [
|
||||
"a.ts",
|
||||
"b.ts",
|
||||
"x/a.ts",
|
||||
"x/b.ts",
|
||||
"x/y/a.ts",
|
||||
"x/y/b.ts"
|
||||
],
|
||||
exclude: [
|
||||
"**/b.ts"
|
||||
]
|
||||
};
|
||||
const expected: ts.ParsedCommandLine = {
|
||||
options: {},
|
||||
errors: [],
|
||||
fileNames: [
|
||||
"c:/dev/a.ts",
|
||||
"c:/dev/x/a.ts",
|
||||
"c:/dev/x/y/a.ts"
|
||||
],
|
||||
wildcardDirectories: {},
|
||||
};
|
||||
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath);
|
||||
assert.deepEqual(actual.fileNames, expected.fileNames);
|
||||
assert.deepEqual(actual.wildcardDirectories, expected.wildcardDirectories);
|
||||
assert.deepEqual(actual.errors, expected.errors);
|
||||
});
|
||||
it("with case sensitive exclude", () => {
|
||||
const json = {
|
||||
include: [
|
||||
"B.ts"
|
||||
],
|
||||
exclude: [
|
||||
"**/b.ts"
|
||||
]
|
||||
};
|
||||
const expected: ts.ParsedCommandLine = {
|
||||
options: {},
|
||||
errors: [],
|
||||
fileNames: [
|
||||
"/dev/B.ts"
|
||||
],
|
||||
wildcardDirectories: {},
|
||||
};
|
||||
const actual = ts.parseJsonConfigFileContent(json, caseSensitiveHost, caseSensitiveBasePath);
|
||||
assert.deepEqual(actual.fileNames, expected.fileNames);
|
||||
assert.deepEqual(actual.wildcardDirectories, expected.wildcardDirectories);
|
||||
assert.deepEqual(actual.errors, expected.errors);
|
||||
});
|
||||
it("with common package folders and no exclusions", () => {
|
||||
const json = {
|
||||
include: [
|
||||
"a.ts",
|
||||
"b.ts",
|
||||
"node_modules/a.ts",
|
||||
"bower_components/a.ts",
|
||||
"jspm_packages/a.ts"
|
||||
]
|
||||
};
|
||||
const expected: ts.ParsedCommandLine = {
|
||||
options: {},
|
||||
errors: [],
|
||||
fileNames: [
|
||||
"c:/dev/a.ts",
|
||||
"c:/dev/b.ts"
|
||||
],
|
||||
wildcardDirectories: {},
|
||||
};
|
||||
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveCommonFoldersHost, caseInsensitiveBasePath);
|
||||
assert.deepEqual(actual.fileNames, expected.fileNames);
|
||||
assert.deepEqual(actual.wildcardDirectories, expected.wildcardDirectories);
|
||||
assert.deepEqual(actual.errors, expected.errors);
|
||||
});
|
||||
it("with common package folders and exclusions", () => {
|
||||
const json = {
|
||||
include: [
|
||||
"a.ts",
|
||||
"b.ts",
|
||||
"node_modules/a.ts",
|
||||
"bower_components/a.ts",
|
||||
"jspm_packages/a.ts"
|
||||
],
|
||||
exclude: [
|
||||
"a.ts",
|
||||
"b.ts"
|
||||
]
|
||||
};
|
||||
const expected: ts.ParsedCommandLine = {
|
||||
options: {},
|
||||
errors: [],
|
||||
fileNames: [
|
||||
"c:/dev/bower_components/a.ts",
|
||||
"c:/dev/jspm_packages/a.ts",
|
||||
"c:/dev/node_modules/a.ts"
|
||||
],
|
||||
wildcardDirectories: {},
|
||||
};
|
||||
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveCommonFoldersHost, caseInsensitiveBasePath);
|
||||
assert.deepEqual(actual.fileNames, expected.fileNames);
|
||||
assert.deepEqual(actual.wildcardDirectories, expected.wildcardDirectories);
|
||||
assert.deepEqual(actual.errors, expected.errors);
|
||||
});
|
||||
it("with common package folders and empty exclude", () => {
|
||||
const json = {
|
||||
include: [
|
||||
"a.ts",
|
||||
"b.ts",
|
||||
"node_modules/a.ts",
|
||||
"bower_components/a.ts",
|
||||
"jspm_packages/a.ts"
|
||||
],
|
||||
exclude: <string[]>[]
|
||||
};
|
||||
const expected: ts.ParsedCommandLine = {
|
||||
options: {},
|
||||
errors: [],
|
||||
fileNames: [
|
||||
"c:/dev/a.ts",
|
||||
"c:/dev/b.ts",
|
||||
"c:/dev/bower_components/a.ts",
|
||||
"c:/dev/jspm_packages/a.ts",
|
||||
"c:/dev/node_modules/a.ts"
|
||||
],
|
||||
wildcardDirectories: {},
|
||||
};
|
||||
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveCommonFoldersHost, caseInsensitiveBasePath);
|
||||
assert.deepEqual(actual.fileNames, expected.fileNames);
|
||||
assert.deepEqual(actual.wildcardDirectories, expected.wildcardDirectories);
|
||||
assert.deepEqual(actual.errors, expected.errors);
|
||||
});
|
||||
});
|
||||
|
||||
describe("with wildcard include list", () => {
|
||||
it("same named declarations are excluded", () => {
|
||||
const json = {
|
||||
include: [
|
||||
"*.ts"
|
||||
]
|
||||
};
|
||||
const expected: ts.ParsedCommandLine = {
|
||||
options: {},
|
||||
errors: [],
|
||||
fileNames: [
|
||||
"c:/dev/a.ts",
|
||||
"c:/dev/b.ts",
|
||||
"c:/dev/c.d.ts"
|
||||
],
|
||||
wildcardDirectories: {
|
||||
"c:/dev": ts.WatchDirectoryFlags.None
|
||||
},
|
||||
};
|
||||
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath);
|
||||
assert.deepEqual(actual.fileNames, expected.fileNames);
|
||||
assert.deepEqual(actual.wildcardDirectories, expected.wildcardDirectories);
|
||||
assert.deepEqual(actual.errors, expected.errors);
|
||||
});
|
||||
it("`*` matches only ts files", () => {
|
||||
const json = {
|
||||
include: [
|
||||
"*"
|
||||
]
|
||||
};
|
||||
const expected: ts.ParsedCommandLine = {
|
||||
options: {},
|
||||
errors: [],
|
||||
fileNames: [
|
||||
"c:/dev/a.ts",
|
||||
"c:/dev/b.ts",
|
||||
"c:/dev/c.d.ts"
|
||||
],
|
||||
wildcardDirectories: {
|
||||
"c:/dev": ts.WatchDirectoryFlags.None
|
||||
},
|
||||
};
|
||||
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath);
|
||||
assert.deepEqual(actual.fileNames, expected.fileNames);
|
||||
assert.deepEqual(actual.wildcardDirectories, expected.wildcardDirectories);
|
||||
assert.deepEqual(actual.errors, expected.errors);
|
||||
});
|
||||
it("`?` matches only a single character", () => {
|
||||
const json = {
|
||||
include: [
|
||||
"x/?.ts"
|
||||
]
|
||||
};
|
||||
const expected: ts.ParsedCommandLine = {
|
||||
options: {},
|
||||
errors: [],
|
||||
fileNames: [
|
||||
"c:/dev/x/a.ts",
|
||||
"c:/dev/x/b.ts"
|
||||
],
|
||||
wildcardDirectories: {
|
||||
"c:/dev/x": ts.WatchDirectoryFlags.None
|
||||
},
|
||||
};
|
||||
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath);
|
||||
assert.deepEqual(actual.fileNames, expected.fileNames);
|
||||
assert.deepEqual(actual.wildcardDirectories, expected.wildcardDirectories);
|
||||
assert.deepEqual(actual.errors, expected.errors);
|
||||
});
|
||||
it("with recursive directory", () => {
|
||||
const json = {
|
||||
include: [
|
||||
"**/a.ts"
|
||||
]
|
||||
};
|
||||
const expected: ts.ParsedCommandLine = {
|
||||
options: {},
|
||||
errors: [],
|
||||
fileNames: [
|
||||
"c:/dev/a.ts",
|
||||
"c:/dev/x/a.ts",
|
||||
"c:/dev/x/y/a.ts",
|
||||
"c:/dev/z/a.ts"
|
||||
],
|
||||
wildcardDirectories: {
|
||||
"c:/dev": ts.WatchDirectoryFlags.Recursive
|
||||
},
|
||||
};
|
||||
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath);
|
||||
assert.deepEqual(actual.fileNames, expected.fileNames);
|
||||
assert.deepEqual(actual.wildcardDirectories, expected.wildcardDirectories);
|
||||
assert.deepEqual(actual.errors, expected.errors);
|
||||
});
|
||||
it("with multiple recursive directories", () => {
|
||||
const json = {
|
||||
include: [
|
||||
"x/y/**/a.ts",
|
||||
"x/**/a.ts",
|
||||
"z/**/a.ts"
|
||||
]
|
||||
};
|
||||
const expected: ts.ParsedCommandLine = {
|
||||
options: {},
|
||||
errors: [],
|
||||
fileNames: [
|
||||
"c:/dev/x/a.ts",
|
||||
"c:/dev/x/y/a.ts",
|
||||
"c:/dev/z/a.ts"
|
||||
],
|
||||
wildcardDirectories: {
|
||||
"c:/dev/x": ts.WatchDirectoryFlags.Recursive,
|
||||
"c:/dev/z": ts.WatchDirectoryFlags.Recursive
|
||||
},
|
||||
};
|
||||
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath);
|
||||
assert.deepEqual(actual.fileNames, expected.fileNames);
|
||||
assert.deepEqual(actual.wildcardDirectories, expected.wildcardDirectories);
|
||||
assert.deepEqual(actual.errors, expected.errors);
|
||||
});
|
||||
it("case sensitive", () => {
|
||||
const json = {
|
||||
include: [
|
||||
"**/A.ts"
|
||||
]
|
||||
};
|
||||
const expected: ts.ParsedCommandLine = {
|
||||
options: {},
|
||||
errors: [],
|
||||
fileNames: [
|
||||
"/dev/A.ts"
|
||||
],
|
||||
wildcardDirectories: {
|
||||
"/dev": ts.WatchDirectoryFlags.Recursive
|
||||
},
|
||||
};
|
||||
const actual = ts.parseJsonConfigFileContent(json, caseSensitiveHost, caseSensitiveBasePath);
|
||||
assert.deepEqual(actual.fileNames, expected.fileNames);
|
||||
assert.deepEqual(actual.wildcardDirectories, expected.wildcardDirectories);
|
||||
assert.deepEqual(actual.errors, expected.errors);
|
||||
});
|
||||
it("with missing files are excluded", () => {
|
||||
const json = {
|
||||
include: [
|
||||
"*/z.ts"
|
||||
]
|
||||
};
|
||||
const expected: ts.ParsedCommandLine = {
|
||||
options: {},
|
||||
errors: [],
|
||||
fileNames: [],
|
||||
wildcardDirectories: {
|
||||
"c:/dev": ts.WatchDirectoryFlags.Recursive
|
||||
},
|
||||
};
|
||||
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath);
|
||||
assert.deepEqual(actual.fileNames, expected.fileNames);
|
||||
assert.deepEqual(actual.wildcardDirectories, expected.wildcardDirectories);
|
||||
assert.deepEqual(actual.errors, expected.errors);
|
||||
});
|
||||
it("always include literal files", () => {
|
||||
const json = {
|
||||
files: [
|
||||
"a.ts"
|
||||
],
|
||||
include: [
|
||||
"*/z.ts"
|
||||
],
|
||||
exclude: [
|
||||
"**/a.ts"
|
||||
]
|
||||
};
|
||||
const expected: ts.ParsedCommandLine = {
|
||||
options: {},
|
||||
errors: [],
|
||||
fileNames: [
|
||||
"c:/dev/a.ts"
|
||||
],
|
||||
wildcardDirectories: {
|
||||
"c:/dev": ts.WatchDirectoryFlags.Recursive
|
||||
},
|
||||
};
|
||||
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath);
|
||||
assert.deepEqual(actual.fileNames, expected.fileNames);
|
||||
assert.deepEqual(actual.wildcardDirectories, expected.wildcardDirectories);
|
||||
assert.deepEqual(actual.errors, expected.errors);
|
||||
});
|
||||
it("exclude folders", () => {
|
||||
const json = {
|
||||
include: [
|
||||
"**/*"
|
||||
],
|
||||
exclude: [
|
||||
"z",
|
||||
"x"
|
||||
]
|
||||
};
|
||||
const expected: ts.ParsedCommandLine = {
|
||||
options: {},
|
||||
errors: [],
|
||||
fileNames: [
|
||||
"c:/dev/a.ts",
|
||||
"c:/dev/b.ts",
|
||||
"c:/dev/c.d.ts"
|
||||
],
|
||||
wildcardDirectories: {
|
||||
"c:/dev": ts.WatchDirectoryFlags.Recursive
|
||||
}
|
||||
};
|
||||
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath);
|
||||
assert.deepEqual(actual.fileNames, expected.fileNames);
|
||||
assert.deepEqual(actual.wildcardDirectories, expected.wildcardDirectories);
|
||||
assert.deepEqual(actual.errors, expected.errors);
|
||||
});
|
||||
it("with common package folders and no exclusions", () => {
|
||||
const json = {
|
||||
include: [
|
||||
"**/a.ts"
|
||||
]
|
||||
};
|
||||
const expected: ts.ParsedCommandLine = {
|
||||
options: {},
|
||||
errors: [],
|
||||
fileNames: [
|
||||
"c:/dev/a.ts"
|
||||
],
|
||||
wildcardDirectories: {
|
||||
"c:/dev": ts.WatchDirectoryFlags.Recursive
|
||||
},
|
||||
};
|
||||
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveCommonFoldersHost, caseInsensitiveBasePath);
|
||||
assert.deepEqual(actual.fileNames, expected.fileNames);
|
||||
assert.deepEqual(actual.wildcardDirectories, expected.wildcardDirectories);
|
||||
assert.deepEqual(actual.errors, expected.errors);
|
||||
});
|
||||
it("with common package folders and exclusions", () => {
|
||||
const json = {
|
||||
include: [
|
||||
"**/a.ts"
|
||||
],
|
||||
exclude: [
|
||||
"a.ts"
|
||||
]
|
||||
};
|
||||
const expected: ts.ParsedCommandLine = {
|
||||
options: {},
|
||||
errors: [],
|
||||
fileNames: [
|
||||
"c:/dev/bower_components/a.ts",
|
||||
"c:/dev/jspm_packages/a.ts",
|
||||
"c:/dev/node_modules/a.ts"
|
||||
],
|
||||
wildcardDirectories: {
|
||||
"c:/dev": ts.WatchDirectoryFlags.Recursive
|
||||
},
|
||||
};
|
||||
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveCommonFoldersHost, caseInsensitiveBasePath);
|
||||
assert.deepEqual(actual.fileNames, expected.fileNames);
|
||||
assert.deepEqual(actual.wildcardDirectories, expected.wildcardDirectories);
|
||||
assert.deepEqual(actual.errors, expected.errors);
|
||||
});
|
||||
it("with common package folders and empty exclude", () => {
|
||||
const json = {
|
||||
include: [
|
||||
"**/a.ts"
|
||||
],
|
||||
exclude: <string[]>[]
|
||||
};
|
||||
const expected: ts.ParsedCommandLine = {
|
||||
options: {},
|
||||
errors: [],
|
||||
fileNames: [
|
||||
"c:/dev/a.ts",
|
||||
"c:/dev/bower_components/a.ts",
|
||||
"c:/dev/jspm_packages/a.ts",
|
||||
"c:/dev/node_modules/a.ts"
|
||||
],
|
||||
wildcardDirectories: {
|
||||
"c:/dev": ts.WatchDirectoryFlags.Recursive
|
||||
},
|
||||
};
|
||||
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveCommonFoldersHost, caseInsensitiveBasePath);
|
||||
assert.deepEqual(actual.fileNames, expected.fileNames);
|
||||
assert.deepEqual(actual.wildcardDirectories, expected.wildcardDirectories);
|
||||
assert.deepEqual(actual.errors, expected.errors);
|
||||
});
|
||||
it("exclude .js files when allowJs=false", () => {
|
||||
const json = {
|
||||
compilerOptions: {
|
||||
allowJs: false
|
||||
},
|
||||
include: [
|
||||
"js/*"
|
||||
]
|
||||
};
|
||||
const expected: ts.ParsedCommandLine = {
|
||||
options: {
|
||||
allowJs: false
|
||||
},
|
||||
errors: [],
|
||||
fileNames: [],
|
||||
wildcardDirectories: {
|
||||
"c:/dev/js": ts.WatchDirectoryFlags.None
|
||||
}
|
||||
};
|
||||
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath);
|
||||
assert.deepEqual(actual.fileNames, expected.fileNames);
|
||||
assert.deepEqual(actual.wildcardDirectories, expected.wildcardDirectories);
|
||||
assert.deepEqual(actual.errors, expected.errors);
|
||||
});
|
||||
it("include .js files when allowJs=true", () => {
|
||||
const json = {
|
||||
compilerOptions: {
|
||||
allowJs: true
|
||||
},
|
||||
include: [
|
||||
"js/*"
|
||||
]
|
||||
};
|
||||
const expected: ts.ParsedCommandLine = {
|
||||
options: {
|
||||
allowJs: true
|
||||
},
|
||||
errors: [],
|
||||
fileNames: [
|
||||
"c:/dev/js/a.js",
|
||||
"c:/dev/js/b.js"
|
||||
],
|
||||
wildcardDirectories: {
|
||||
"c:/dev/js": ts.WatchDirectoryFlags.None
|
||||
}
|
||||
};
|
||||
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath);
|
||||
assert.deepEqual(actual.fileNames, expected.fileNames);
|
||||
assert.deepEqual(actual.wildcardDirectories, expected.wildcardDirectories);
|
||||
assert.deepEqual(actual.errors, expected.errors);
|
||||
});
|
||||
it("include paths outside of the project", () => {
|
||||
const json = {
|
||||
include: [
|
||||
"*",
|
||||
"c:/ext/*"
|
||||
]
|
||||
};
|
||||
const expected: ts.ParsedCommandLine = {
|
||||
options: {},
|
||||
errors: [],
|
||||
fileNames: [
|
||||
"c:/dev/a.ts",
|
||||
"c:/dev/b.ts",
|
||||
"c:/dev/c.d.ts",
|
||||
"c:/ext/ext.ts",
|
||||
],
|
||||
wildcardDirectories: {
|
||||
"c:/dev": ts.WatchDirectoryFlags.None,
|
||||
"c:/ext": ts.WatchDirectoryFlags.None
|
||||
}
|
||||
};
|
||||
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath);
|
||||
assert.deepEqual(actual.fileNames, expected.fileNames);
|
||||
assert.deepEqual(actual.wildcardDirectories, expected.wildcardDirectories);
|
||||
assert.deepEqual(actual.errors, expected.errors);
|
||||
});
|
||||
it("with jsx=none, allowJs=false", () => {
|
||||
const json = {
|
||||
compilerOptions: {
|
||||
allowJs: false
|
||||
}
|
||||
};
|
||||
const expected: ts.ParsedCommandLine = {
|
||||
options: {
|
||||
allowJs: false
|
||||
},
|
||||
errors: [],
|
||||
fileNames: [
|
||||
"c:/dev/a.ts",
|
||||
"c:/dev/b.tsx",
|
||||
"c:/dev/c.tsx",
|
||||
],
|
||||
wildcardDirectories: {
|
||||
"c:/dev": ts.WatchDirectoryFlags.Recursive
|
||||
}
|
||||
};
|
||||
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveMixedExtensionHost, caseInsensitiveBasePath);
|
||||
assert.deepEqual(actual.fileNames, expected.fileNames);
|
||||
assert.deepEqual(actual.wildcardDirectories, expected.wildcardDirectories);
|
||||
assert.deepEqual(actual.errors, expected.errors);
|
||||
});
|
||||
it("with jsx=preserve, allowJs=false", () => {
|
||||
const json = {
|
||||
compilerOptions: {
|
||||
jsx: "preserve",
|
||||
allowJs: false
|
||||
}
|
||||
};
|
||||
const expected: ts.ParsedCommandLine = {
|
||||
options: {
|
||||
jsx: ts.JsxEmit.Preserve,
|
||||
allowJs: false
|
||||
},
|
||||
errors: [],
|
||||
fileNames: [
|
||||
"c:/dev/a.ts",
|
||||
"c:/dev/b.tsx",
|
||||
"c:/dev/c.tsx",
|
||||
],
|
||||
wildcardDirectories: {
|
||||
"c:/dev": ts.WatchDirectoryFlags.Recursive
|
||||
}
|
||||
};
|
||||
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveMixedExtensionHost, caseInsensitiveBasePath);
|
||||
assert.deepEqual(actual.fileNames, expected.fileNames);
|
||||
assert.deepEqual(actual.wildcardDirectories, expected.wildcardDirectories);
|
||||
assert.deepEqual(actual.errors, expected.errors);
|
||||
});
|
||||
it("with jsx=none, allowJs=true", () => {
|
||||
const json = {
|
||||
compilerOptions: {
|
||||
allowJs: true
|
||||
}
|
||||
};
|
||||
const expected: ts.ParsedCommandLine = {
|
||||
options: {
|
||||
allowJs: true
|
||||
},
|
||||
errors: [],
|
||||
fileNames: [
|
||||
"c:/dev/a.ts",
|
||||
"c:/dev/b.tsx",
|
||||
"c:/dev/c.tsx",
|
||||
"c:/dev/d.js",
|
||||
"c:/dev/e.jsx",
|
||||
],
|
||||
wildcardDirectories: {
|
||||
"c:/dev": ts.WatchDirectoryFlags.Recursive
|
||||
}
|
||||
};
|
||||
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveMixedExtensionHost, caseInsensitiveBasePath);
|
||||
assert.deepEqual(actual.fileNames, expected.fileNames);
|
||||
assert.deepEqual(actual.wildcardDirectories, expected.wildcardDirectories);
|
||||
assert.deepEqual(actual.errors, expected.errors);
|
||||
});
|
||||
it("with jsx=preserve, allowJs=true", () => {
|
||||
const json = {
|
||||
compilerOptions: {
|
||||
jsx: "preserve",
|
||||
allowJs: true
|
||||
}
|
||||
};
|
||||
const expected: ts.ParsedCommandLine = {
|
||||
options: {
|
||||
jsx: ts.JsxEmit.Preserve,
|
||||
allowJs: true
|
||||
},
|
||||
errors: [],
|
||||
fileNames: [
|
||||
"c:/dev/a.ts",
|
||||
"c:/dev/b.tsx",
|
||||
"c:/dev/c.tsx",
|
||||
"c:/dev/d.js",
|
||||
"c:/dev/e.jsx",
|
||||
],
|
||||
wildcardDirectories: {
|
||||
"c:/dev": ts.WatchDirectoryFlags.Recursive
|
||||
}
|
||||
};
|
||||
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveMixedExtensionHost, caseInsensitiveBasePath);
|
||||
assert.deepEqual(actual.fileNames, expected.fileNames);
|
||||
assert.deepEqual(actual.wildcardDirectories, expected.wildcardDirectories);
|
||||
assert.deepEqual(actual.errors, expected.errors);
|
||||
});
|
||||
describe("with trailing recursive directory", () => {
|
||||
it("in includes", () => {
|
||||
const json = {
|
||||
include: [
|
||||
"**"
|
||||
]
|
||||
};
|
||||
const expected: ts.ParsedCommandLine = {
|
||||
options: {},
|
||||
errors: [
|
||||
ts.createCompilerDiagnostic(ts.Diagnostics.File_specification_cannot_end_in_a_recursive_directory_wildcard_Asterisk_Asterisk_Colon_0, "**")
|
||||
],
|
||||
fileNames: [],
|
||||
wildcardDirectories: {}
|
||||
};
|
||||
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath);
|
||||
assert.deepEqual(actual.fileNames, expected.fileNames);
|
||||
assert.deepEqual(actual.wildcardDirectories, expected.wildcardDirectories);
|
||||
assert.deepEqual(actual.errors, expected.errors);
|
||||
});
|
||||
it("in excludes", () => {
|
||||
const json = {
|
||||
include: [
|
||||
"**/*"
|
||||
],
|
||||
exclude: [
|
||||
"**"
|
||||
]
|
||||
};
|
||||
const expected: ts.ParsedCommandLine = {
|
||||
options: {},
|
||||
errors: [],
|
||||
fileNames: [],
|
||||
wildcardDirectories: {}
|
||||
};
|
||||
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath);
|
||||
assert.deepEqual(actual.fileNames, expected.fileNames);
|
||||
assert.deepEqual(actual.wildcardDirectories, expected.wildcardDirectories);
|
||||
assert.deepEqual(actual.errors, expected.errors);
|
||||
});
|
||||
});
|
||||
describe("with multiple recursive directory patterns", () => {
|
||||
it("in includes", () => {
|
||||
const json = {
|
||||
include: [
|
||||
"**/x/**/*"
|
||||
]
|
||||
};
|
||||
const expected: ts.ParsedCommandLine = {
|
||||
options: {},
|
||||
errors: [
|
||||
ts.createCompilerDiagnostic(ts.Diagnostics.File_specification_cannot_contain_multiple_recursive_directory_wildcards_Asterisk_Asterisk_Colon_0, "**/x/**/*")
|
||||
],
|
||||
fileNames: [],
|
||||
wildcardDirectories: {}
|
||||
};
|
||||
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath);
|
||||
assert.deepEqual(actual.fileNames, expected.fileNames);
|
||||
assert.deepEqual(actual.wildcardDirectories, expected.wildcardDirectories);
|
||||
assert.deepEqual(actual.errors, expected.errors);
|
||||
});
|
||||
it("in excludes", () => {
|
||||
const json = {
|
||||
include: [
|
||||
"**/a.ts"
|
||||
],
|
||||
exclude: [
|
||||
"**/x/**"
|
||||
]
|
||||
};
|
||||
const expected: ts.ParsedCommandLine = {
|
||||
options: {},
|
||||
errors: [
|
||||
ts.createCompilerDiagnostic(ts.Diagnostics.File_specification_cannot_contain_multiple_recursive_directory_wildcards_Asterisk_Asterisk_Colon_0, "**/x/**")
|
||||
],
|
||||
fileNames: [
|
||||
"c:/dev/a.ts",
|
||||
"c:/dev/x/a.ts",
|
||||
"c:/dev/x/y/a.ts",
|
||||
"c:/dev/z/a.ts"
|
||||
],
|
||||
wildcardDirectories: {
|
||||
"c:/dev": ts.WatchDirectoryFlags.Recursive
|
||||
}
|
||||
};
|
||||
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath);
|
||||
assert.deepEqual(actual.fileNames, expected.fileNames);
|
||||
assert.deepEqual(actual.wildcardDirectories, expected.wildcardDirectories);
|
||||
assert.deepEqual(actual.errors, expected.errors);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
@ -16,36 +16,16 @@ namespace ts {
|
||||
|
||||
function assertParseErrorWithExcludesKeyword(jsonText: string) {
|
||||
const parsed = ts.parseConfigFileTextToJson("/apath/tsconfig.json", jsonText);
|
||||
const parsedCommand = ts.parseJsonConfigFileContent(parsed, ts.sys, "tests/cases/unittests");
|
||||
assert.isTrue(undefined !== parsedCommand.errors);
|
||||
const parsedCommand = ts.parseJsonConfigFileContent(parsed.config, ts.sys, "tests/cases/unittests");
|
||||
assert.isTrue(parsedCommand.errors && parsedCommand.errors.length === 1 &&
|
||||
parsedCommand.errors[0].code === ts.Diagnostics.Unknown_option_excludes_Did_you_mean_exclude.code);
|
||||
}
|
||||
|
||||
function assertParseFileList(jsonText: string, configFileName: string, basePath: string, allFileList: string[], expectedFileList: string[]) {
|
||||
const json = JSON.parse(jsonText);
|
||||
const host: ParseConfigHost = { readDirectory: mockReadDirectory };
|
||||
const host: ParseConfigHost = new Utils.MockParseConfigHost(basePath, true, allFileList);
|
||||
const parsed = ts.parseJsonConfigFileContent(json, host, basePath, /*existingOptions*/ undefined, configFileName);
|
||||
assert.isTrue(arrayIsEqualTo(parsed.fileNames.sort(), expectedFileList.sort()));
|
||||
|
||||
function mockReadDirectory(rootDir: string, extension: string, exclude: string[]): string[] {
|
||||
const result: string[] = [];
|
||||
const fullExcludeDirectories = ts.map(exclude, directory => combinePaths(rootDir, directory));
|
||||
for (const file of allFileList) {
|
||||
let shouldExclude = false;
|
||||
for (const fullExcludeDirectorie of fullExcludeDirectories) {
|
||||
if (file.indexOf(fullExcludeDirectorie) >= 0) {
|
||||
shouldExclude = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (shouldExclude) {
|
||||
continue;
|
||||
}
|
||||
if (fileExtensionIs(file, extension)) {
|
||||
result.push(file);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
it("returns empty config for file with only whitespaces", () => {
|
||||
@ -121,19 +101,19 @@ namespace ts {
|
||||
assertParseResult(
|
||||
`{
|
||||
"compilerOptions": {
|
||||
"lib": "es5"
|
||||
"lib": ["es5"]
|
||||
}
|
||||
}`, {
|
||||
config: { compilerOptions: { lib: "es5" } }
|
||||
config: { compilerOptions: { lib: ["es5"] } }
|
||||
});
|
||||
|
||||
assertParseResult(
|
||||
`{
|
||||
"compilerOptions": {
|
||||
"lib": "es5,es6"
|
||||
"lib": ["es5", "es6"]
|
||||
}
|
||||
}`, {
|
||||
config: { compilerOptions: { lib: "es5,es6" } }
|
||||
config: { compilerOptions: { lib: ["es5", "es6"] } }
|
||||
});
|
||||
});
|
||||
|
||||
@ -141,7 +121,7 @@ namespace ts {
|
||||
assertParseErrorWithExcludesKeyword(
|
||||
`{
|
||||
"compilerOptions": {
|
||||
"lib": "es5"
|
||||
"lib": ["es5"]
|
||||
},
|
||||
"excludes": [
|
||||
"foge.ts"
|
||||
|
||||
@ -92,23 +92,6 @@ namespace ts {
|
||||
}
|
||||
}
|
||||
|
||||
function readDirectory(folder: FSEntry, ext: string, excludes: Path[], result: string[]): void {
|
||||
if (!folder || !isFolder(folder) || contains(excludes, folder.path)) {
|
||||
return;
|
||||
}
|
||||
for (const entry of folder.entries) {
|
||||
if (contains(excludes, entry.path)) {
|
||||
continue;
|
||||
}
|
||||
if (isFolder(entry)) {
|
||||
readDirectory(entry, ext, excludes, result);
|
||||
}
|
||||
else if (fileExtensionIs(entry.path, ext)) {
|
||||
result.push(entry.fullPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function checkNumberOfConfiguredProjects(projectService: server.ProjectService, expected: number) {
|
||||
assert.equal(projectService.configuredProjects.length, expected, `expected ${expected} configured project(s)`);
|
||||
}
|
||||
@ -201,10 +184,26 @@ namespace ts {
|
||||
}
|
||||
}
|
||||
|
||||
readDirectory(path: string, ext: string, excludes: string[]): string[] {
|
||||
const result: string[] = [];
|
||||
readDirectory(this.fs.get(this.toPath(path)), ext, map(excludes, e => toPath(e, path, this.getCanonicalFileName)), result);
|
||||
return result;
|
||||
readDirectory(path: string, extensions?: string[], exclude?: string[], include?: string[]): string[] {
|
||||
const that = this;
|
||||
return ts.matchFiles(path, extensions, exclude, include, this.useCaseSensitiveFileNames, this.getCurrentDirectory(), (dir) => {
|
||||
const result: FileSystemEntries = {
|
||||
directories: [],
|
||||
files : []
|
||||
};
|
||||
const dirEntry = that.fs.get(that.toPath(dir));
|
||||
if (isFolder(dirEntry)) {
|
||||
dirEntry.entries.forEach((entry) => {
|
||||
if (isFolder(entry)) {
|
||||
result.directories.push(entry.fullPath);
|
||||
}
|
||||
else if (isFile(entry)) {
|
||||
result.files.push(entry.fullPath);
|
||||
}
|
||||
});
|
||||
}
|
||||
return result;
|
||||
});
|
||||
}
|
||||
|
||||
watchDirectory(directoryName: string, callback: DirectoryWatcherCallback, recursive: boolean): DirectoryWatcher {
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user