aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--src/resolver/package_json.zig99
-rw-r--r--src/resolver/resolver.zig191
-rw-r--r--test/bun.js/resolve.test.js82
-rw-r--r--test/bun.js/serve.test.ts40
4 files changed, 307 insertions, 105 deletions
diff --git a/src/resolver/package_json.zig b/src/resolver/package_json.zig
index 69dc2e8dc..e33c5ac60 100644
--- a/src/resolver/package_json.zig
+++ b/src/resolver/package_json.zig
@@ -113,6 +113,7 @@ pub const PackageJSON = struct {
browser_map: BrowserMap,
exports: ?ExportsMap = null,
+ imports: ?ExportsMap = null,
pub inline fn isAppPackage(this: *const PackageJSON) bool {
return this.hash == 0xDEADBEEF;
@@ -709,6 +710,12 @@ pub const PackageJSON = struct {
}
}
+ if (json.asProperty("imports")) |imports_prop| {
+ if (ExportsMap.parse(r.allocator, &json_source, r.log, imports_prop.expr)) |imports_map| {
+ package_json.imports = imports_map;
+ }
+ }
+
// used by `bun run`
if (include_scripts) {
read_scripts: {
@@ -1000,27 +1007,32 @@ pub const ESModule = struct {
Exact,
Inexact, // This means we may need to try CommonJS-style extension suffixes
- // Module specifier is an invalid URL, package name or package subpath specifier.
+ /// Module specifier is an invalid URL, package name or package subpath specifier.
InvalidModuleSpecifier,
- // package.json configuration is invalid or contains an invalid configuration.
+ /// package.json configuration is invalid or contains an invalid configuration.
InvalidPackageConfiguration,
- // Package exports or imports define a target module for the package that is an invalid type or string target.
+ /// Package exports or imports define a target module for the package that is an invalid type or string target.
InvalidPackageTarget,
- // Package exports do not define or permit a target subpath in the package for the given module.
+ /// Package exports do not define or permit a target subpath in the package for the given module.
PackagePathNotExported,
- // The package or module requested does not exist.
+ /// The package or module requested does not exist.
ModuleNotFound,
- // The resolved path corresponds to a directory, which is not a supported target for module imports.
+ /// The resolved path corresponds to a directory, which is not a supported target for module imports.
UnsupportedDirectoryImport,
- // When a package path is explicitly set to null, that means it's not exported.
+ /// When a package path is explicitly set to null, that means it's not exported.
PackagePathDisabled,
+ // The internal #import specifier was not found
+ PackageImportNotDefined,
+
+ PackageResolve,
+
pub inline fn isUndefined(this: Status) bool {
return switch (this) {
.Undefined, .UndefinedNoConditionsMatch => true,
@@ -1075,8 +1087,40 @@ pub const ESModule = struct {
threadlocal var resolved_path_buf_percent: [bun.MAX_PATH_BYTES]u8 = undefined;
pub fn resolve(r: *const ESModule, package_url: string, subpath: string, exports: ExportsMap.Entry) Resolution {
- var result = r.resolveExports(package_url, subpath, exports);
+ return finalize(
+ r.resolveExports(package_url, subpath, exports),
+ );
+ }
+ pub fn resolveImports(r: *const ESModule, specifier: string, imports: ExportsMap.Entry) Resolution {
+ if (imports.data != .map) {
+ return .{
+ .status = .InvalidPackageConfiguration,
+ .debug = .{
+ .token = logger.Range.None,
+ },
+ };
+ }
+
+ const result = r.resolveImportsExports(
+ specifier,
+ imports,
+ true,
+ "/",
+ );
+
+ switch (result.status) {
+ .Undefined, .Null => {
+ return .{ .status = .PackageImportNotDefined, .debug = .{ .token = result.debug.token } };
+ },
+ else => {
+ return finalize(result);
+ },
+ }
+ }
+
+ pub fn finalize(result_: Resolution) Resolution {
+ var result = result_;
if (result.status != .Exact and result.status != .Inexact) {
return result;
}
@@ -1149,13 +1193,13 @@ pub const ESModule = struct {
}
if (main_export.data != .@"null") {
- const result = r.resolveTarget(package_url, main_export, "", false);
+ const result = r.resolveTarget(package_url, main_export, "", false, false);
if (result.status != .Null and result.status != .Undefined) {
return result;
}
}
} else if (exports.data == .map and exports.keysStartWithDot()) {
- const result = r.resolveImportsExports(subpath, exports, package_url);
+ const result = r.resolveImportsExports(subpath, exports, false, package_url);
if (result.status != .Null and result.status != .Undefined) {
return result;
}
@@ -1176,6 +1220,7 @@ pub const ESModule = struct {
r: *const ESModule,
match_key: string,
match_obj: ExportsMap.Entry,
+ is_imports: bool,
package_url: string,
) Resolution {
if (r.debug_logs) |logs| {
@@ -1188,7 +1233,7 @@ pub const ESModule = struct {
log.addNoteFmt("Found \"{s}\"", .{match_key}) catch unreachable;
}
- return r.resolveTarget(package_url, target, "", false);
+ return r.resolveTarget(package_url, target, "", is_imports, false);
}
}
@@ -1206,7 +1251,7 @@ pub const ESModule = struct {
log.addNoteFmt("The key \"{s}\" matched with \"{s}\" left over", .{ expansion.key, subpath }) catch unreachable;
}
- return r.resolveTarget(package_url, target, subpath, true);
+ return r.resolveTarget(package_url, target, subpath, is_imports, true);
}
}
@@ -1217,7 +1262,7 @@ pub const ESModule = struct {
log.addNoteFmt("The key \"{s}\" matched with \"{s}\" left over", .{ expansion.key, subpath }) catch unreachable;
}
- var result = r.resolveTarget(package_url, target, subpath, false);
+ var result = r.resolveTarget(package_url, target, subpath, is_imports, false);
result.status = if (result.status == .Exact)
// Return the object { resolved, exact: false }.
.Inexact
@@ -1250,6 +1295,7 @@ pub const ESModule = struct {
package_url: string,
target: ExportsMap.Entry,
subpath: string,
+ internal: bool,
comptime pattern: bool,
) Resolution {
switch (target.data) {
@@ -1276,11 +1322,34 @@ pub const ESModule = struct {
}
}
+ // If target does not start with "./", then...
if (!strings.startsWith(str, "./")) {
if (r.debug_logs) |log| {
log.addNoteFmt("The target \"{s}\" is invalid because it doesn't start with a \"./\"", .{str}) catch unreachable;
}
+ if (internal and !strings.hasPrefixComptime(str, "../") and !strings.hasPrefix(str, "/")) {
+ if (comptime pattern) {
+ // Return the URL resolution of resolvedTarget with every instance of "*" replaced with subpath.
+ const len = std.mem.replacementSize(u8, str, "*", subpath);
+ _ = std.mem.replace(u8, str, "*", subpath, &resolve_target_buf2);
+ const result = resolve_target_buf2[0..len];
+ if (r.debug_logs) |log| {
+ log.addNoteFmt("Subsituted \"{s}\" for \"*\" in \".{s}\" to get \".{s}\" ", .{ subpath, str, result }) catch unreachable;
+ }
+
+ return Resolution{ .path = result, .status = .PackageResolve, .debug = .{ .token = target.first_token } };
+ } else {
+ var parts2 = [_]string{ str, subpath };
+ const result = resolve_path.joinStringBuf(&resolve_target_buf2, parts2, .auto);
+ if (r.debug_logs) |log| {
+ log.addNoteFmt("Resolved \".{s}\" to \".{s}\"", .{ str, result }) catch unreachable;
+ }
+
+ return Resolution{ .path = result, .status = .PackageResolve, .debug = .{ .token = target.first_token } };
+ }
+ }
+
return Resolution{ .path = str, .status = .InvalidPackageTarget, .debug = .{ .token = target.first_token } };
}
@@ -1340,7 +1409,7 @@ pub const ESModule = struct {
log.addNoteFmt("The key \"{s}\" matched", .{key}) catch unreachable;
}
- var result = r.resolveTarget(package_url, slice.items(.value)[i], subpath, pattern);
+ var result = r.resolveTarget(package_url, slice.items(.value)[i], subpath, internal, pattern);
if (result.status.isUndefined()) {
did_find_map_entry = true;
last_map_entry_i = i;
@@ -1431,7 +1500,7 @@ pub const ESModule = struct {
for (array) |targetValue| {
// Let resolved be the result, continuing the loop on any Invalid Package Target error.
- const result = r.resolveTarget(package_url, targetValue, subpath, pattern);
+ const result = r.resolveTarget(package_url, targetValue, subpath, internal, pattern);
if (result.status == .InvalidPackageTarget or result.status == .Null) {
last_debug = result.debug;
last_exception = result.status;
diff --git a/src/resolver/resolver.zig b/src/resolver/resolver.zig
index 93cdec82e..6496ce118 100644
--- a/src/resolver/resolver.zig
+++ b/src/resolver/resolver.zig
@@ -1007,7 +1007,7 @@ pub const Resolver = struct {
if (remapped.len == 0) {
// "browser": {"module": false}
// does the module exist in the filesystem?
- if (r.loadNodeModules(import_path, kind, source_dir_info)) |node_module| {
+ if (r.loadNodeModules(import_path, kind, source_dir_info, false)) |node_module| {
var pair = node_module.path_pair;
pair.primary.is_disabled = true;
if (pair.secondary != null) {
@@ -1183,7 +1183,13 @@ pub const Resolver = struct {
threadlocal var esm_subpath_buf: [512]u8 = undefined;
threadlocal var esm_absolute_package_path: [bun.MAX_PATH_BYTES]u8 = undefined;
threadlocal var esm_absolute_package_path_joined: [bun.MAX_PATH_BYTES]u8 = undefined;
- pub fn loadNodeModules(r: *ThisResolver, import_path: string, kind: ast.ImportKind, _dir_info: *DirInfo) ?MatchResult {
+ pub fn loadNodeModules(
+ r: *ThisResolver,
+ import_path: string,
+ kind: ast.ImportKind,
+ _dir_info: *DirInfo,
+ forbid_imports: bool,
+ ) ?MatchResult {
var dir_info = _dir_info;
if (r.debug_logs) |*debug| {
debug.addNoteFmt("Searching for {s} in \"node_modules\" directories starting from \"{s}\"", .{ import_path, dir_info.abs_path }) catch {};
@@ -1221,6 +1227,44 @@ pub const Resolver = struct {
const esm_ = ESModule.Package.parse(import_path, &esm_subpath_buf);
+ if (import_path[0] == '#' and !forbid_imports) {
+ if (esm_ != null) {
+ if (dir_info.package_json) |package_json| {
+ load_from_imports_map: {
+ const imports_map = package_json.imports orelse break :load_from_imports_map;
+
+ if (import_path.len == 1 or strings.hasPrefix(import_path, "#/")) {
+ if (r.debug_logs) |*debug| {
+ debug.addNoteFmt("The path \"{s}\" must not equal \"#\" and must not start with \"#/\"", .{import_path}) catch {};
+ }
+ return null;
+ }
+
+ const esmodule = ESModule{
+ .conditions = switch (kind) {
+ ast.ImportKind.require, ast.ImportKind.require_resolve => r.opts.conditions.require,
+ else => r.opts.conditions.import,
+ },
+ .allocator = r.allocator,
+ .debug_logs = if (r.debug_logs) |*debug| debug else null,
+ };
+
+ const esm_resolution = esmodule.resolveImports(import_path, imports_map.root);
+
+ if (esm_resolution.status == .PackageResolve)
+ return r.loadNodeModules(
+ esm_resolution.path,
+ kind,
+ dir_info,
+ true,
+ );
+
+ return r.handleESMResolution(esm_resolution, dir_info.abs_path, kind, package_json);
+ }
+ }
+ }
+ }
+
// Then check for the package in any enclosing "node_modules" directories
while (true) {
// Skip directories that are themselves called "node_modules", since we
@@ -1259,73 +1303,10 @@ pub const Resolver = struct {
// want problems due to Windows paths, which are very unlike URL
// paths. We also want to avoid any "%" characters in the absolute
// directory path accidentally being interpreted as URL escapes.
- var esm_resolution = esmodule.resolve("/", esm.subpath, exports_map.root);
-
- if ((esm_resolution.status == .Inexact or esm_resolution.status == .Exact) and
- esm_resolution.path.len > 0 and esm_resolution.path[0] == '/')
- {
- const abs_esm_path: string = brk: {
- var parts = [_]string{
- abs_package_path,
- esm_resolution.path[1..],
- };
- break :brk r.fs.absBuf(&parts, &esm_absolute_package_path_joined);
- };
-
- switch (esm_resolution.status) {
- .Exact => {
- const resolved_dir_info = (r.dirInfoCached(std.fs.path.dirname(abs_esm_path).?) catch null) orelse {
- esm_resolution.status = .ModuleNotFound;
- return null;
- };
- const entries = resolved_dir_info.getEntries() orelse {
- esm_resolution.status = .ModuleNotFound;
- return null;
- };
- const entry_query = entries.get(std.fs.path.basename(abs_esm_path)) orelse {
- esm_resolution.status = .ModuleNotFound;
- return null;
- };
-
- if (entry_query.entry.kind(&r.fs.fs) == .dir) {
- esm_resolution.status = .UnsupportedDirectoryImport;
- return null;
- }
-
- const absolute_out_path = brk: {
- if (entry_query.entry.abs_path.isEmpty()) {
- entry_query.entry.abs_path =
- PathString.init(r.fs.dirname_store.append(@TypeOf(abs_esm_path), abs_esm_path) catch unreachable);
- }
- break :brk entry_query.entry.abs_path.slice();
- };
-
- return MatchResult{
- .path_pair = PathPair{
- .primary = Path.initWithNamespace(absolute_out_path, "file"),
- },
- .dirname_fd = entries.fd,
- .file_fd = entry_query.entry.cache.fd,
- .dir_info = resolved_dir_info,
- .diff_case = entry_query.diff_case,
- .is_node_module = true,
- .package_json = resolved_dir_info.package_json orelse package_json,
- };
- },
- .Inexact => {
- // If this was resolved against an expansion key ending in a "/"
- // instead of a "*", we need to try CommonJS-style implicit
- // extension and/or directory detection.
- if (r.loadAsFileOrDirectory(abs_esm_path, kind)) |*res| {
- res.is_node_module = true;
- res.package_json = res.package_json orelse package_json;
- return res.*;
- }
- esm_resolution.status = .ModuleNotFound;
- return null;
- },
- else => unreachable,
- }
+ const esm_resolution = esmodule.resolve("/", esm.subpath, exports_map.root);
+
+ if (r.handleESMResolution(esm_resolution, abs_package_path, kind, package_json)) |res| {
+ return res;
}
}
}
@@ -1347,9 +1328,79 @@ pub const Resolver = struct {
return null;
}
+ fn handleESMResolution(r: *ThisResolver, esm_resolution_: ESModule.Resolution, abs_package_path: string, kind: ast.ImportKind, package_json: *PackageJSON) ?MatchResult {
+ var esm_resolution = esm_resolution_;
+ if (!((esm_resolution.status == .Inexact or esm_resolution.status == .Exact) and
+ esm_resolution.path.len > 0 and esm_resolution.path[0] == '/'))
+ return null;
+
+ const abs_esm_path: string = brk: {
+ var parts = [_]string{
+ abs_package_path,
+ esm_resolution.path[1..],
+ };
+ break :brk r.fs.absBuf(&parts, &esm_absolute_package_path_joined);
+ };
+
+ switch (esm_resolution.status) {
+ .Exact => {
+ const resolved_dir_info = (r.dirInfoCached(std.fs.path.dirname(abs_esm_path).?) catch null) orelse {
+ esm_resolution.status = .ModuleNotFound;
+ return null;
+ };
+ const entries = resolved_dir_info.getEntries() orelse {
+ esm_resolution.status = .ModuleNotFound;
+ return null;
+ };
+ const entry_query = entries.get(std.fs.path.basename(abs_esm_path)) orelse {
+ esm_resolution.status = .ModuleNotFound;
+ return null;
+ };
+
+ if (entry_query.entry.kind(&r.fs.fs) == .dir) {
+ esm_resolution.status = .UnsupportedDirectoryImport;
+ return null;
+ }
+
+ const absolute_out_path = brk: {
+ if (entry_query.entry.abs_path.isEmpty()) {
+ entry_query.entry.abs_path =
+ PathString.init(r.fs.dirname_store.append(@TypeOf(abs_esm_path), abs_esm_path) catch unreachable);
+ }
+ break :brk entry_query.entry.abs_path.slice();
+ };
+
+ return MatchResult{
+ .path_pair = PathPair{
+ .primary = Path.initWithNamespace(absolute_out_path, "file"),
+ },
+ .dirname_fd = entries.fd,
+ .file_fd = entry_query.entry.cache.fd,
+ .dir_info = resolved_dir_info,
+ .diff_case = entry_query.diff_case,
+ .is_node_module = true,
+ .package_json = resolved_dir_info.package_json orelse package_json,
+ };
+ },
+ .Inexact => {
+ // If this was resolved against an expansion key ending in a "/"
+ // instead of a "*", we need to try CommonJS-style implicit
+ // extension and/or directory detection.
+ if (r.loadAsFileOrDirectory(abs_esm_path, kind)) |*res| {
+ res.is_node_module = true;
+ res.package_json = res.package_json orelse package_json;
+ return res.*;
+ }
+ esm_resolution.status = .ModuleNotFound;
+ return null;
+ },
+ else => unreachable,
+ }
+ }
+
pub fn resolveWithoutRemapping(r: *ThisResolver, source_dir_info: *DirInfo, import_path: string, kind: ast.ImportKind) ?MatchResult {
if (isPackagePath(import_path)) {
- return r.loadNodeModules(import_path, kind, source_dir_info);
+ return r.loadNodeModules(import_path, kind, source_dir_info, false);
} else {
const paths = [_]string{ source_dir_info.abs_path, import_path };
var resolved = r.fs.absBuf(&paths, &resolve_without_remapping_buf);
diff --git a/test/bun.js/resolve.test.js b/test/bun.js/resolve.test.js
index d081823e6..ea7b6cf2d 100644
--- a/test/bun.js/resolve.test.js
+++ b/test/bun.js/resolve.test.js
@@ -2,6 +2,48 @@ import { it, expect } from "bun:test";
import { mkdirSync, writeFileSync } from "fs";
import { join } from "path";
+it("#imports", async () => {
+ await writePackageJSONImportsFixture();
+
+ const baz = await import.meta.resolve(
+ "#foo",
+ join(await import.meta.resolve("package-json-imports/baz"), "../")
+ );
+ expect(baz.endsWith("foo/private-foo.js")).toBe(true);
+
+ const subpath = await import.meta.resolve(
+ "#foo/bar",
+ join(await import.meta.resolve("package-json-imports/baz"), "../")
+ );
+ expect(subpath.endsWith("foo/private-foo.js")).toBe(true);
+
+ const react = await import.meta.resolve(
+ "#internal-react",
+ join(await import.meta.resolve("package-json-imports/baz"), "../")
+ );
+ expect(react.endsWith("/react/index.js")).toBe(true);
+
+ // Check that #foo is not resolved to the package.json file.
+ try {
+ await import.meta.resolve("#foo");
+ throw new Error("Test failed");
+ } catch (exception) {
+ expect(exception instanceof ResolveError).toBe(true);
+ expect(exception.referrer).toBe(import.meta.path);
+ expect(exception.name).toBe("ResolveError");
+ }
+
+ // Chcek that package-json-imports/#foo doesn't work
+ try {
+ await import.meta.resolve("package-json-imports/#foo");
+ throw new Error("Test failed");
+ } catch (exception) {
+ expect(exception instanceof ResolveError).toBe(true);
+ expect(exception.referrer).toBe(import.meta.path);
+ expect(exception.name).toBe("ResolveError");
+ }
+});
+
it("import.meta.resolve", async () => {
expect(await import.meta.resolve("./resolve.test.js")).toBe(import.meta.path);
@@ -150,3 +192,43 @@ function writePackageJSONExportsFixture() {
)
);
}
+
+function writePackageJSONImportsFixture() {
+ try {
+ mkdirSync(
+ join(import.meta.dir, "./node_modules/package-json-imports/foo"),
+ {
+ recursive: true,
+ }
+ );
+ } catch (exception) {}
+ writeFileSync(
+ join(import.meta.dir, "./node_modules/package-json-imports/foo/bar.js"),
+ "export const bar = 1;"
+ );
+ writeFileSync(
+ join(
+ import.meta.dir,
+ "./node_modules/package-json-imports/foo/private-foo.js"
+ ),
+ "export {bar} from 'package-json-imports/#foo';"
+ );
+ writeFileSync(
+ join(import.meta.dir, "./node_modules/package-json-imports/package.json"),
+ JSON.stringify(
+ {
+ name: "package-json-imports",
+ exports: {
+ "./baz": "./foo/bar.js",
+ },
+ imports: {
+ "#foo": "./foo/private-foo.js",
+ "#foo/bar": "./foo/private-foo.js",
+ "#internal-react": "react",
+ },
+ },
+ null,
+ 2
+ )
+ );
+}
diff --git a/test/bun.js/serve.test.ts b/test/bun.js/serve.test.ts
index ba505feba..95428c028 100644
--- a/test/bun.js/serve.test.ts
+++ b/test/bun.js/serve.test.ts
@@ -39,7 +39,7 @@ describe("streaming", () => {
},
});
- const response = await fetch(`http://localhost:${server.port}`);
+ const response = await fetch(`http://127.0.0.1:${server.port}`);
if (response.status > 0) {
expect(response.status).toBe(500);
expect(await response.text()).toBe("fail");
@@ -77,7 +77,7 @@ describe("streaming", () => {
},
});
- const response = await fetch(`http://localhost:${server.port}`);
+ const response = await fetch(`http://127.0.0.1:${server.port}`);
// connection terminated
if (response.status > 0) {
expect(response.status).toBe(200);
@@ -110,7 +110,7 @@ describe("streaming", () => {
},
});
- const response = await fetch(`http://localhost:${server.port}`);
+ const response = await fetch(`http://127.0.0.1:${server.port}`);
const text = await response.text();
expect(text.length).toBe(textToExpect.length);
expect(text).toBe(textToExpect);
@@ -134,7 +134,7 @@ describe("streaming", () => {
);
},
});
- const response = await fetch(`http://localhost:${server.port}`);
+ const response = await fetch(`http://127.0.0.1:${server.port}`);
expect(await response.text()).toBe(textToExpect);
server.stop();
});
@@ -157,7 +157,7 @@ describe("streaming", () => {
},
});
- const response = await fetch(`http://localhost:${server.port}`);
+ const response = await fetch(`http://127.0.0.1:${server.port}`);
expect(response.status).toBe(500);
} catch (e) {
if (!e || !(e instanceof TestPass)) {
@@ -191,7 +191,7 @@ describe("streaming", () => {
},
});
- const response = await fetch(`http://localhost:${server.port}`);
+ const response = await fetch(`http://127.0.0.1:${server.port}`);
expect(response.status).toBe(500);
expect(await response.text()).toBe("Fail");
expect(pass).toBe(true);
@@ -222,7 +222,7 @@ describe("streaming", () => {
);
},
});
- const response = await fetch(`http://localhost:${server.port}`);
+ const response = await fetch(`http://127.0.0.1:${server.port}`);
expect(await response.text()).toBe(textToExpect);
server.stop();
});
@@ -244,7 +244,7 @@ describe("streaming", () => {
);
},
});
- const response = await fetch(`http://localhost:${server.port}`);
+ const response = await fetch(`http://127.0.0.1:${server.port}`);
const text = await response.text();
expect(text).toBe(textToExpect);
server.stop();
@@ -270,7 +270,7 @@ describe("streaming", () => {
);
},
});
- const response = await fetch(`http://localhost:${server.port}`);
+ const response = await fetch(`http://127.0.0.1:${server.port}`);
expect(await response.text()).toBe(textToExpect);
server.stop();
});
@@ -295,7 +295,7 @@ describe("streaming", () => {
);
},
});
- const response = await fetch(`http://localhost:${server.port}`);
+ const response = await fetch(`http://127.0.0.1:${server.port}`);
expect(await response.text()).toBe(textToExpect);
server.stop();
});
@@ -324,7 +324,7 @@ describe("streaming", () => {
);
},
});
- const response = await fetch(`http://localhost:${server.port}`);
+ const response = await fetch(`http://127.0.0.1:${server.port}`);
expect(await response.text()).toBe(textToExpect);
server.stop();
});
@@ -337,7 +337,7 @@ it("should work for a hello world", async () => {
return new Response(`Hello, world!`);
},
});
- const response = await fetch(`http://localhost:${server.port}`);
+ const response = await fetch(`http://127.0.0.1:${server.port}`);
expect(await response.text()).toBe("Hello, world!");
server.stop();
});
@@ -352,7 +352,7 @@ it("should work for a blob", async () => {
return new Response(new Blob([textToExpect]));
},
});
- const response = await fetch(`http://localhost:${server.port}`);
+ const response = await fetch(`http://127.0.0.1:${server.port}`);
expect(await response.text()).toBe(textToExpect);
server.stop();
});
@@ -367,7 +367,7 @@ it("should work for a blob stream", async () => {
return new Response(new Blob([textToExpect]).stream());
},
});
- const response = await fetch(`http://localhost:${server.port}`);
+ const response = await fetch(`http://127.0.0.1:${server.port}`);
expect(await response.text()).toBe(textToExpect);
server.stop();
});
@@ -382,7 +382,7 @@ it("should work for a file", async () => {
return new Response(file(fixture));
},
});
- const response = await fetch(`http://localhost:${server.port}`);
+ const response = await fetch(`http://127.0.0.1:${server.port}`);
expect(await response.text()).toBe(textToExpect);
server.stop();
});
@@ -397,7 +397,7 @@ it("should work for a file stream", async () => {
return new Response(file(fixture).stream());
},
});
- const response = await fetch(`http://localhost:${server.port}`);
+ const response = await fetch(`http://127.0.0.1:${server.port}`);
expect(await response.text()).toBe(textToExpect);
server.stop();
});
@@ -416,7 +416,7 @@ it("fetch should work with headers", async () => {
});
},
});
- const response = await fetch(`http://localhost:${server.port}`, {
+ const response = await fetch(`http://127.0.0.1:${server.port}`, {
headers: {
"X-Foo": "bar",
},
@@ -444,7 +444,7 @@ it(`should work for a file ${count} times serial`, async () => {
// it's hard to say if this only happens here due to some weird stuff with the test runner
// or if it's "real" issue
for (let i = 0; i < count; i++) {
- const response = await fetch(`http://localhost:${server.port}`);
+ const response = await fetch(`http://127.0.0.1:${server.port}`);
expect(await response.text()).toBe(textToExpect);
}
@@ -467,7 +467,7 @@ it(`should work for text ${count} times serial`, async () => {
// it's hard to say if this only happens here due to some weird stuff with the test runner
// or if it's "real" issue
for (let i = 0; i < count; i++) {
- const response = await fetch(`http://localhost:${server.port}`);
+ const response = await fetch(`http://127.0.0.1:${server.port}`);
expect(await response.text()).toBe(textToExpect);
}
@@ -488,7 +488,7 @@ it(`should work for ArrayBuffer ${count} times serial`, async () => {
// it's hard to say if this only happens here due to some weird stuff with the test runner
// or if it's "real" issue
for (let i = 0; i < count; i++) {
- const response = await fetch(`http://localhost:${server.port}`);
+ const response = await fetch(`http://127.0.0.1:${server.port}`);
expect(await response.text()).toBe(textToExpect);
}