[#139] Allow registry to export gRPC objects
All checks were successful
DCO action / DCO (pull_request) Successful in 1m0s
Tests and linters / Tests (1.22) (pull_request) Successful in 2m26s
Tests and linters / Tests with -race (pull_request) Successful in 2m42s
Tests and linters / Tests (1.21) (pull_request) Successful in 2m57s
Tests and linters / Lint (pull_request) Successful in 3m15s
All checks were successful
DCO action / DCO (pull_request) Successful in 1m0s
Tests and linters / Tests (1.22) (pull_request) Successful in 2m26s
Tests and linters / Tests with -race (pull_request) Successful in 2m42s
Tests and linters / Tests (1.21) (pull_request) Successful in 2m57s
Tests and linters / Lint (pull_request) Successful in 3m15s
Signed-off-by: Ekaterina Lebedeva <ekaterina.lebedeva@yadro.com>
This commit is contained in:
parent
4aaa50c8ed
commit
3dd559a7b1
1 changed files with 45 additions and 6 deletions
|
@ -37,6 +37,7 @@ func (o *ObjExporter) ExportJSONPreGen(fileName string) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
bucketMap := make(map[string]struct{})
|
bucketMap := make(map[string]struct{})
|
||||||
|
containerMap := make(map[string]struct{})
|
||||||
|
|
||||||
count, err := o.selector.Count()
|
count, err := o.selector.Count()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -50,7 +51,7 @@ func (o *ObjExporter) ExportJSONPreGen(fileName string) error {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
if _, err = f.WriteString(fmt.Sprintf(`%s{"bucket":"%s","object":"%s"}`, comma, info.S3Bucket, info.S3Key)); err != nil {
|
if err = writeObjectInfo(comma, info, f); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -58,15 +59,54 @@ func (o *ObjExporter) ExportJSONPreGen(fileName string) error {
|
||||||
comma = ","
|
comma = ","
|
||||||
}
|
}
|
||||||
|
|
||||||
bucketMap[info.S3Bucket] = struct{}{}
|
if info.S3Bucket != "" {
|
||||||
|
bucketMap[info.S3Bucket] = struct{}{}
|
||||||
|
}
|
||||||
|
if info.CID != "" {
|
||||||
|
containerMap[info.CID] = struct{}{}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if _, err = f.WriteString(`],"buckets":[`); err != nil {
|
if _, err = f.WriteString(`]`); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(bucketMap) > 0 {
|
||||||
|
if err = writeContainerInfo("buckets", bucketMap, f); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if len(containerMap) > 0 {
|
||||||
|
if err = writeContainerInfo("containers", containerMap, f); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, err = f.WriteString(`}`); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func writeObjectInfo(comma string, info *ObjectInfo, f *os.File) (err error) {
|
||||||
|
var res string
|
||||||
|
if info.S3Bucket != "" || info.S3Key != "" {
|
||||||
|
res = fmt.Sprintf(`%s{"bucket":"%s","object":"%s"}`, comma, info.S3Bucket, info.S3Key)
|
||||||
|
} else {
|
||||||
|
res = fmt.Sprintf(`%s{"cid":"%s","oid":"%s"}`, comma, info.CID, info.OID)
|
||||||
|
}
|
||||||
|
_, err = f.WriteString(res)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
func writeContainerInfo(attrName string, bucketMap map[string]struct{}, f *os.File) (err error) {
|
||||||
|
if _, err = f.WriteString(fmt.Sprintf(`,"%s":[`, attrName)); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
i := 0
|
i := 0
|
||||||
comma = ""
|
comma := ""
|
||||||
for bucket := range bucketMap {
|
for bucket := range bucketMap {
|
||||||
if _, err = f.WriteString(fmt.Sprintf(`%s"%s"`, comma, bucket)); err != nil {
|
if _, err = f.WriteString(fmt.Sprintf(`%s"%s"`, comma, bucket)); err != nil {
|
||||||
return err
|
return err
|
||||||
|
@ -76,7 +116,6 @@ func (o *ObjExporter) ExportJSONPreGen(fileName string) error {
|
||||||
}
|
}
|
||||||
i++
|
i++
|
||||||
}
|
}
|
||||||
|
_, err = f.WriteString(`]`)
|
||||||
_, err = f.WriteString(`]}`)
|
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue