test: cover expanded tree metadata flows

This commit is contained in:
narugo1992
2026-04-21 16:06:20 +08:00
parent f37b515e5c
commit 5d6be2a5fc
6 changed files with 1447 additions and 231 deletions

2
.gitignore vendored
View File

@@ -79,7 +79,7 @@ htmlcov/
.cache
nosetests.xml
coverage.xml
coverage-ui/
coverage-ui*/
*.cover
*.py.cover
.hypothesis/

View File

@@ -14,7 +14,8 @@ const mocks = vi.hoisted(() => ({
},
repoApi: {
getInfo: vi.fn(),
listTree: vi.fn(),
listTreeAll: vi.fn(),
getPathsInfo: vi.fn(),
listCommits: vi.fn(),
},
likesApi: {
@@ -22,9 +23,6 @@ const mocks = vi.hoisted(() => ({
like: vi.fn(),
unlike: vi.fn(),
},
axios: {
get: vi.fn(),
},
}));
vi.mock("vue-router/auto", () => ({
@@ -36,19 +34,23 @@ vi.mock("@/utils/api", () => ({
likesAPI: mocks.likesApi,
}));
vi.mock("axios", () => ({
get: mocks.axios.get,
default: {
get: mocks.axios.get,
},
}));
import RepoViewer from "@/components/repo/RepoViewer.vue";
function deferred() {
let resolve;
let reject;
const promise = new Promise((res, rej) => {
resolve = res;
reject = rej;
});
return { promise, resolve, reject };
}
describe("RepoViewer path handling", () => {
beforeEach(() => {
vi.clearAllMocks();
setActivePinia(createPinia());
vi.spyOn(console, "error").mockImplementation(() => {});
mocks.repoApi.getInfo.mockResolvedValue({
data: {
@@ -70,11 +72,7 @@ describe("RepoViewer path handling", () => {
);
});
function mountViewer(props, treeEntries) {
mocks.repoApi.listTree.mockResolvedValue({
data: treeEntries,
});
function mountViewer(props) {
return mount(RepoViewer, {
props: {
repoType: "dataset",
@@ -100,28 +98,67 @@ describe("RepoViewer path handling", () => {
});
}
it("does not duplicate directory paths when the tree API returns repo-root paths", async () => {
const wrapper = mountViewer(
it("loads repo-root tree entries, merges expanded path info, and links commits", async () => {
mocks.repoApi.listTreeAll.mockResolvedValue([
{
currentPath: "catalog",
type: "directory",
path: "catalog/section-01",
size: 0,
lastModified: "2026-04-21T13:53:12.000000Z",
},
[
]);
mocks.repoApi.getPathsInfo.mockResolvedValue({
data: [
{
type: "directory",
path: "catalog/section-01",
size: 10,
lastModified: "2026-04-21T13:53:12.000000Z",
lastCommit: {
id: "commit-1",
title: "Add section summary",
date: "2026-04-21T13:53:12.000000Z",
},
},
],
);
});
const wrapper = mountViewer({ currentPath: "catalog" });
await flushPromises();
await flushPromises();
expect(mocks.repoApi.listTreeAll).toHaveBeenCalledWith(
"dataset",
"open-media-lab",
"hierarchy-crawl-fixtures",
"main",
"/catalog",
{ recursive: false },
);
expect(mocks.repoApi.getPathsInfo).toHaveBeenCalledWith(
"dataset",
"open-media-lab",
"hierarchy-crawl-fixtures",
"main",
["catalog/section-01"],
true,
);
const row = wrapper
.findAll('[class*="cursor-pointer"]')
.find((node) => node.text().includes("section-01"));
expect(row).toBeTruthy();
expect(wrapper.text()).toContain("Add section summary");
const commitLink = wrapper
.findAll('a[data-router-link="true"]')
.find(
(node) =>
node.attributes("href") ===
"/datasets/open-media-lab/hierarchy-crawl-fixtures/commit/commit-1",
);
expect(commitLink).toBeTruthy();
await row.trigger("click");
expect(mocks.router.push).toHaveBeenCalledWith(
@@ -129,33 +166,174 @@ describe("RepoViewer path handling", () => {
);
});
it("does not duplicate file paths when the tree API returns repo-root paths", async () => {
const wrapper = mountViewer(
it("keeps repo-root file navigation working when expanded path info fails", async () => {
mocks.repoApi.listTreeAll.mockResolvedValue([
{
name: "table-scan-fixtures",
currentPath: "metadata",
type: "file",
path: "metadata/features.json",
size: 42,
lastModified: "2026-04-21T13:53:39.000000Z",
},
[
{
type: "file",
path: "metadata/features.json",
size: 42,
lastModified: "2026-04-21T13:53:39.000000Z",
},
],
);
]);
mocks.repoApi.getPathsInfo.mockRejectedValue(new Error("expand failed"));
const wrapper = mountViewer({
name: "table-scan-fixtures",
currentPath: "metadata",
});
await flushPromises();
await flushPromises();
const row = wrapper
.findAll('[class*="cursor-pointer"]')
.find((node) => node.text().includes("features.json"));
expect(row).toBeTruthy();
await row.trigger("click");
expect(mocks.router.push).toHaveBeenCalledWith(
"/datasets/open-media-lab/table-scan-fixtures/blob/main/metadata/features.json",
);
});
it("ignores stale tree responses after the current path changes", async () => {
const firstTree = deferred();
const secondTree = deferred();
mocks.repoApi.listTreeAll.mockImplementation(
(type, namespace, name, branch, path) => {
if (path === "/catalog") {
return firstTree.promise;
}
if (path === "/catalog-next") {
return secondTree.promise;
}
return Promise.resolve([]);
},
);
mocks.repoApi.getPathsInfo.mockResolvedValue({
data: [{ type: "file", path: "catalog-next/new.txt", size: 1 }],
});
const wrapper = mountViewer({ currentPath: "catalog" });
await flushPromises();
await wrapper.setProps({ currentPath: "catalog-next" });
secondTree.resolve([
{
type: "file",
path: "catalog-next/new.txt",
size: 1,
lastModified: "2026-04-21T13:53:39.000000Z",
},
]);
await flushPromises();
await flushPromises();
firstTree.resolve([
{
type: "file",
path: "catalog/old.txt",
size: 1,
lastModified: "2026-04-21T13:53:39.000000Z",
},
]);
await flushPromises();
await flushPromises();
expect(wrapper.text()).toContain("new.txt");
expect(wrapper.text()).not.toContain("old.txt");
expect(mocks.repoApi.getPathsInfo).toHaveBeenCalledTimes(1);
expect(mocks.repoApi.getPathsInfo).toHaveBeenCalledWith(
"dataset",
"open-media-lab",
"hierarchy-crawl-fixtures",
"main",
["catalog-next/new.txt"],
true,
);
});
it("ignores stale expanded path info responses after a newer request wins", async () => {
const firstPathsInfo = deferred();
mocks.repoApi.listTreeAll.mockImplementation(
(type, namespace, name, branch, path) => {
if (path === "/catalog") {
return Promise.resolve([
{
type: "file",
path: "catalog/old.txt",
size: 1,
lastModified: "2026-04-21T13:53:39.000000Z",
},
]);
}
if (path === "/catalog-next") {
return Promise.resolve([
{
type: "file",
path: "catalog-next/new.txt",
size: 1,
lastModified: "2026-04-21T13:53:39.000000Z",
},
]);
}
return Promise.resolve([]);
},
);
mocks.repoApi.getPathsInfo.mockImplementation(
(type, namespace, name, branch, paths) => {
if (paths[0] === "catalog/old.txt") {
return firstPathsInfo.promise;
}
return Promise.resolve({
data: [
{
type: "file",
path: "catalog-next/new.txt",
size: 3,
lastCommit: {
id: "commit-2",
title: "Ship new tree row",
date: "2026-04-21T13:53:39.000000Z",
},
},
],
});
},
);
const wrapper = mountViewer({ currentPath: "catalog" });
await flushPromises();
await flushPromises();
await wrapper.setProps({ currentPath: "catalog-next" });
await flushPromises();
await flushPromises();
firstPathsInfo.resolve({
data: [
{
type: "file",
path: "catalog/old.txt",
size: 99,
lastCommit: {
id: "commit-1",
title: "Old tree row",
date: "2026-04-21T13:53:39.000000Z",
},
},
],
});
await flushPromises();
await flushPromises();
expect(wrapper.text()).toContain("new.txt");
expect(wrapper.text()).toContain("Ship new tree row");
expect(wrapper.text()).not.toContain("Old tree row");
});
});

View File

@@ -291,6 +291,66 @@ describe("frontend API client", () => {
});
});
it("follows paginated tree Link headers and submits expanded paths-info forms", async () => {
const { apiClient, repoAPI } = await loadModules();
const getSpy = vi
.spyOn(apiClient, "get")
.mockResolvedValueOnce({
data: [{ path: "docs" }],
headers: {
link: '<https://hub.local/api/models/alice/demo/tree/main/docs?cursor=page-2>; rel="next"',
},
})
.mockResolvedValueOnce({
data: [{ path: "docs/guide.md" }],
headers: {},
});
const postSpy = vi.spyOn(apiClient, "post").mockResolvedValue({ data: [] });
const allEntries = await repoAPI.listTreeAll(
"model",
"alice",
"demo",
"main",
"/docs",
{ recursive: false },
);
await repoAPI.getPathsInfo(
"model",
"alice",
"demo",
"main",
["docs", "docs/guide.md"],
true,
);
expect(allEntries).toEqual([{ path: "docs" }, { path: "docs/guide.md" }]);
expect(getSpy).toHaveBeenNthCalledWith(
1,
"/api/models/alice/demo/tree/main/docs",
{ params: { recursive: false } },
);
expect(getSpy).toHaveBeenNthCalledWith(
2,
"https://hub.local/api/models/alice/demo/tree/main/docs?cursor=page-2",
);
expect(postSpy).toHaveBeenCalledTimes(1);
expect(postSpy.mock.calls[0][0]).toBe(
"/api/models/alice/demo/paths-info/main",
);
expect(postSpy.mock.calls[0][1]).toBeInstanceOf(URLSearchParams);
expect(postSpy.mock.calls[0][1].toString()).toBe(
"paths=docs&paths=docs%2Fguide.md&expand=true",
);
expect(postSpy.mock.calls[0][2]).toEqual({
headers: {
"Content-Type": "application/x-www-form-urlencoded",
},
});
});
it("builds NDJSON commits for ignored, regular, LFS, and editor flows", async () => {
const originalFileReader = globalThis.FileReader;
globalThis.FileReader = class {

View File

@@ -123,6 +123,111 @@ async def test_with_repo_fallback_returns_original_response_on_fallback_miss(mon
assert result is original
@pytest.mark.asyncio
async def test_with_repo_fallback_forwards_tree_and_paths_info_expand_parameters(monkeypatch):
forwarded_tree_calls = []
forwarded_paths_info_calls = []
monkeypatch.setattr(
fallback_decorators,
"get_merged_external_tokens",
lambda user, header_tokens: {"https://hf.local": "token"},
)
async def fake_try_fallback_tree(*args, **kwargs):
forwarded_tree_calls.append((args, kwargs))
return {"tree": True}
async def fake_try_fallback_paths_info(*args, **kwargs):
forwarded_paths_info_calls.append((args, kwargs))
return [{"path": "README.md"}]
monkeypatch.setattr(fallback_decorators, "try_fallback_tree", fake_try_fallback_tree)
monkeypatch.setattr(
fallback_decorators,
"try_fallback_paths_info",
fake_try_fallback_paths_info,
)
@fallback_decorators.with_repo_fallback("tree")
async def tree_handler(
namespace: str,
name: str,
revision: str,
path: str = "",
recursive: bool = False,
expand: bool = False,
limit: int | None = None,
cursor: str | None = None,
request=None,
user=None,
):
raise HTTPException(status_code=404, detail="missing")
@fallback_decorators.with_repo_fallback("paths_info")
async def paths_info_handler(
repo_type=None,
namespace: str = "",
repo_name: str = "",
revision: str = "",
paths=None,
expand: bool = False,
request=None,
user=None,
):
raise HTTPException(status_code=404, detail="missing")
tree_request = _request("/api/models/owner/demo/tree/main/docs")
tree_result = await tree_handler(
namespace="owner",
name="demo",
revision="main",
path="docs",
recursive=True,
expand=True,
limit=25,
cursor="page-1",
request=tree_request,
user="owner-user",
)
assert tree_result == {"tree": True}
assert forwarded_tree_calls == [
(
("model", "owner", "demo", "main", "docs"),
{
"recursive": True,
"expand": True,
"limit": 25,
"cursor": "page-1",
"user_tokens": {"https://hf.local": "token"},
},
)
]
paths_info_request = _request("/api/models/owner/demo/paths-info/main")
repo_type = SimpleNamespace(value="model")
paths_info_result = await paths_info_handler(
repo_type=repo_type,
namespace="owner",
repo_name="demo",
revision="main",
paths=["README.md", "docs"],
expand=True,
request=paths_info_request,
user="owner-user",
)
assert paths_info_result == [{"path": "README.md"}]
assert forwarded_paths_info_calls == [
(
("model", "owner", "demo", "main", ["README.md", "docs"]),
{
"expand": True,
"user_tokens": {"https://hf.local": "token"},
},
)
]
@pytest.mark.asyncio
async def test_with_list_aggregation_merges_local_and_external_results(monkeypatch):
monkeypatch.setattr(

View File

@@ -268,7 +268,17 @@ async def test_try_fallback_info_tree_and_paths_info_cover_success_paths(monkeyp
"https://source.local",
"GET",
"/api/models/owner/demo/tree/main/folder/file.txt",
_json_response(200, [{"path": "folder/file.txt"}]),
httpx.Response(
200,
json=[{"path": "folder/file.txt"}],
headers={
"content-type": "application/json",
"link": '</api/models/owner/demo/tree/main/folder/file.txt?cursor=page-2>; rel="next"',
},
request=httpx.Request(
"GET", "https://source.local/api/models/owner/demo/tree/main/folder/file.txt"
),
),
)
FakeFallbackClient.queue(
"https://source.local",
@@ -278,23 +288,42 @@ async def test_try_fallback_info_tree_and_paths_info_cover_success_paths(monkeyp
)
info = await fallback_ops.try_fallback_info("model", "owner", "demo")
tree = await fallback_ops.try_fallback_tree("model", "owner", "demo", "main", "/folder/file.txt")
tree = await fallback_ops.try_fallback_tree(
"model",
"owner",
"demo",
"main",
"/folder/file.txt",
recursive=True,
expand=True,
limit=25,
cursor="page-1",
)
paths_info = await fallback_ops.try_fallback_paths_info(
"model",
"owner",
"demo",
"main",
["folder/file.txt"],
expand=True,
)
assert info["_source"] == "Source"
assert info["_source_url"] == "https://source.local"
assert tree == [{"path": "folder/file.txt"}]
assert tree.status_code == 200
assert tree.body == b'[{"path":"folder/file.txt"}]'
assert tree.headers["link"] == '</api/models/owner/demo/tree/main/folder/file.txt?cursor=page-2>; rel="next"'
assert paths_info == [{"path": "folder/file.txt", "type": "file"}]
assert cache.set_calls[0][0][:3] == ("model", "owner", "demo")
assert FakeFallbackClient.calls[1][3]["params"] == {
"recursive": True,
"expand": True,
"limit": 25,
"cursor": "page-1",
}
assert FakeFallbackClient.calls[-1][3]["data"] == {
"paths": ["folder/file.txt"],
"expand": False,
"expand": True,
}

File diff suppressed because it is too large Load Diff