forked from TrueCloudLab/rclone
onedrive: work around duplicated directory listing entries
When paging big directories onedrive sometimes duplicates the last item on one page as the first item of the next page. This patch detects that and skips the duplicated item with an error message. See: https://forum.rclone.org/t/unexpected-duplicates-on-onedrive-with-0s-in-filename/23164
This commit is contained in:
parent
d0f8b4f479
commit
6071db565c
1 changed files with 8 additions and 0 deletions
|
@ -897,6 +897,7 @@ func (f *Fs) listAll(ctx context.Context, dirID string, directoriesOnly bool, fi
|
|||
// Top parameter asks for bigger pages of data
|
||||
// https://dev.onedrive.com/odata/optional-query-parameters.htm
|
||||
opts := f.newOptsCall(dirID, "GET", "/children?$top=1000")
|
||||
lastID := "\x00"
|
||||
OUTER:
|
||||
for {
|
||||
var result api.ListChildrenResponse
|
||||
|
@ -911,6 +912,10 @@ OUTER:
|
|||
if len(result.Value) == 0 {
|
||||
break
|
||||
}
|
||||
if result.Value[0].ID == lastID {
|
||||
fs.Errorf(f, "Skipping duplicate entry %q in directory %q", lastID, dirID)
|
||||
result.Value = result.Value[1:]
|
||||
}
|
||||
for i := range result.Value {
|
||||
item := &result.Value[i]
|
||||
isFolder := item.GetFolder() != nil
|
||||
|
@ -937,6 +942,9 @@ OUTER:
|
|||
}
|
||||
opts.Path = ""
|
||||
opts.RootURL = result.NextLink
|
||||
if len(result.Value) > 0 {
|
||||
lastID = result.Value[len(result.Value)-1].ID
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
|
Loading…
Add table
Reference in a new issue