You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 
 

211 lines
6.8 KiB

  1. // Copyright 2017, Google LLC
  2. //
  3. // Licensed under the Apache License, Version 2.0 (the "License");
  4. // you may not use this file except in compliance with the License.
  5. // You may obtain a copy of the License at
  6. //
  7. // http://www.apache.org/licenses/LICENSE-2.0
  8. //
  9. // Unless required by applicable law or agreed to in writing, software
  10. // distributed under the License is distributed on an "AS IS" BASIS,
  11. // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. // See the License for the specific language governing permissions and
  13. // limitations under the License.
  14. package vision
  15. import (
  16. "context"
  17. "fmt"
  18. "reflect"
  19. "testing"
  20. "github.com/golang/protobuf/proto"
  21. pb "google.golang.org/genproto/googleapis/cloud/vision/v1"
  22. "google.golang.org/genproto/googleapis/rpc/status"
  23. "google.golang.org/grpc"
  24. "google.golang.org/grpc/codes"
  25. )
  26. var batchResponse = &pb.BatchAnnotateImagesResponse{
  27. Responses: []*pb.AnnotateImageResponse{{
  28. FaceAnnotations: []*pb.FaceAnnotation{
  29. {RollAngle: 1}, {RollAngle: 2}},
  30. LandmarkAnnotations: []*pb.EntityAnnotation{{Mid: "landmark"}},
  31. LogoAnnotations: []*pb.EntityAnnotation{{Mid: "logo"}},
  32. LabelAnnotations: []*pb.EntityAnnotation{{Mid: "label"}},
  33. TextAnnotations: []*pb.EntityAnnotation{{Mid: "text"}},
  34. FullTextAnnotation: &pb.TextAnnotation{Text: "full"},
  35. SafeSearchAnnotation: &pb.SafeSearchAnnotation{Spoof: pb.Likelihood_POSSIBLE},
  36. ImagePropertiesAnnotation: &pb.ImageProperties{DominantColors: &pb.DominantColorsAnnotation{}},
  37. CropHintsAnnotation: &pb.CropHintsAnnotation{CropHints: []*pb.CropHint{{Confidence: 0.5}}},
  38. WebDetection: &pb.WebDetection{WebEntities: []*pb.WebDetection_WebEntity{{EntityId: "web"}}},
  39. }},
  40. }
  41. // Verify that all the "shortcut" methods use the underlying
  42. // BatchAnnotateImages RPC correctly.
  43. func TestClientMethods(t *testing.T) {
  44. ctx := context.Background()
  45. c, err := NewImageAnnotatorClient(ctx, clientOpt)
  46. if err != nil {
  47. t.Fatal(err)
  48. }
  49. mockImageAnnotator.resps = []proto.Message{batchResponse}
  50. img := &pb.Image{Source: &pb.ImageSource{ImageUri: "http://foo.jpg"}}
  51. ictx := &pb.ImageContext{LanguageHints: []string{"en", "fr"}}
  52. req := &pb.AnnotateImageRequest{
  53. Image: img,
  54. ImageContext: ictx,
  55. Features: []*pb.Feature{
  56. {Type: pb.Feature_LABEL_DETECTION, MaxResults: 3},
  57. {Type: pb.Feature_FACE_DETECTION, MaxResults: 4},
  58. },
  59. }
  60. for i, test := range []struct {
  61. call func() (interface{}, error)
  62. wantFeatures []*pb.Feature
  63. wantRes interface{}
  64. }{
  65. {
  66. func() (interface{}, error) { return c.AnnotateImage(ctx, req) },
  67. req.Features, batchResponse.Responses[0],
  68. },
  69. {
  70. func() (interface{}, error) { return c.DetectFaces(ctx, img, ictx, 2) },
  71. []*pb.Feature{{Type: pb.Feature_FACE_DETECTION, MaxResults: 2}},
  72. batchResponse.Responses[0].FaceAnnotations,
  73. },
  74. {
  75. func() (interface{}, error) { return c.DetectLandmarks(ctx, img, ictx, 2) },
  76. []*pb.Feature{{Type: pb.Feature_LANDMARK_DETECTION, MaxResults: 2}},
  77. batchResponse.Responses[0].LandmarkAnnotations,
  78. },
  79. {
  80. func() (interface{}, error) { return c.DetectLogos(ctx, img, ictx, 2) },
  81. []*pb.Feature{{Type: pb.Feature_LOGO_DETECTION, MaxResults: 2}},
  82. batchResponse.Responses[0].LogoAnnotations,
  83. },
  84. {
  85. func() (interface{}, error) { return c.DetectLabels(ctx, img, ictx, 2) },
  86. []*pb.Feature{{Type: pb.Feature_LABEL_DETECTION, MaxResults: 2}},
  87. batchResponse.Responses[0].LabelAnnotations,
  88. },
  89. {
  90. func() (interface{}, error) { return c.DetectTexts(ctx, img, ictx, 2) },
  91. []*pb.Feature{{Type: pb.Feature_TEXT_DETECTION, MaxResults: 2}},
  92. batchResponse.Responses[0].TextAnnotations,
  93. },
  94. {
  95. func() (interface{}, error) { return c.DetectDocumentText(ctx, img, ictx) },
  96. []*pb.Feature{{Type: pb.Feature_DOCUMENT_TEXT_DETECTION, MaxResults: 0}},
  97. batchResponse.Responses[0].FullTextAnnotation,
  98. },
  99. {
  100. func() (interface{}, error) { return c.DetectSafeSearch(ctx, img, ictx) },
  101. []*pb.Feature{{Type: pb.Feature_SAFE_SEARCH_DETECTION, MaxResults: 0}},
  102. batchResponse.Responses[0].SafeSearchAnnotation,
  103. },
  104. {
  105. func() (interface{}, error) { return c.DetectImageProperties(ctx, img, ictx) },
  106. []*pb.Feature{{Type: pb.Feature_IMAGE_PROPERTIES, MaxResults: 0}},
  107. batchResponse.Responses[0].ImagePropertiesAnnotation,
  108. },
  109. {
  110. func() (interface{}, error) { return c.DetectWeb(ctx, img, ictx) },
  111. []*pb.Feature{{Type: pb.Feature_WEB_DETECTION, MaxResults: 0}},
  112. batchResponse.Responses[0].WebDetection,
  113. },
  114. {
  115. func() (interface{}, error) { return c.CropHints(ctx, img, ictx) },
  116. []*pb.Feature{{Type: pb.Feature_CROP_HINTS, MaxResults: 0}},
  117. batchResponse.Responses[0].CropHintsAnnotation,
  118. },
  119. {
  120. func() (interface{}, error) { return c.LocalizeObjects(ctx, img, ictx) },
  121. []*pb.Feature{{Type: pb.Feature_OBJECT_LOCALIZATION, MaxResults: 0}},
  122. batchResponse.Responses[0].LocalizedObjectAnnotations,
  123. },
  124. {
  125. func() (interface{}, error) { return c.ProductSearch(ctx, img, ictx) },
  126. []*pb.Feature{{Type: pb.Feature_PRODUCT_SEARCH, MaxResults: 0}},
  127. batchResponse.Responses[0].ProductSearchResults,
  128. },
  129. } {
  130. mockImageAnnotator.reqs = nil
  131. res, err := test.call()
  132. if err != nil {
  133. t.Fatal(err)
  134. }
  135. got := mockImageAnnotator.reqs[0]
  136. want := &pb.BatchAnnotateImagesRequest{
  137. Requests: []*pb.AnnotateImageRequest{{
  138. Image: img,
  139. ImageContext: ictx,
  140. Features: test.wantFeatures,
  141. }},
  142. }
  143. if !testEqual(got, want) {
  144. t.Errorf("#%d:\ngot %v\nwant %v", i, got, want)
  145. }
  146. if got, want := res, test.wantRes; !testEqual(got, want) {
  147. t.Errorf("#%d:\ngot %v\nwant %v", i, got, want)
  148. }
  149. }
  150. }
  151. func testEqual(a, b interface{}) bool {
  152. if a == nil && b == nil {
  153. return true
  154. }
  155. if a == nil || b == nil {
  156. return false
  157. }
  158. t := reflect.TypeOf(a)
  159. if t != reflect.TypeOf(b) {
  160. return false
  161. }
  162. if am, ok := a.(proto.Message); ok {
  163. return proto.Equal(am, b.(proto.Message))
  164. }
  165. if t.Kind() != reflect.Slice {
  166. panic(fmt.Sprintf("testEqual can only handle proto.Message and slices, got %s", t))
  167. }
  168. va := reflect.ValueOf(a)
  169. vb := reflect.ValueOf(b)
  170. if va.Len() != vb.Len() {
  171. return false
  172. }
  173. for i := 0; i < va.Len(); i++ {
  174. if !testEqual(va.Index(i).Interface(), vb.Index(i).Interface()) {
  175. return false
  176. }
  177. }
  178. return true
  179. }
  180. func TestAnnotateOneError(t *testing.T) {
  181. ctx := context.Background()
  182. c, err := NewImageAnnotatorClient(ctx, clientOpt)
  183. if err != nil {
  184. t.Fatal(err)
  185. }
  186. mockImageAnnotator.resps = []proto.Message{
  187. &pb.BatchAnnotateImagesResponse{
  188. Responses: []*pb.AnnotateImageResponse{{
  189. Error: &status.Status{Code: int32(codes.NotFound), Message: "not found"},
  190. }},
  191. },
  192. }
  193. _, err = c.annotateOne(ctx,
  194. &pb.Image{Source: &pb.ImageSource{ImageUri: "http://foo.jpg"}},
  195. nil, pb.Feature_LOGO_DETECTION, 1, nil)
  196. if c := grpc.Code(err); c != codes.NotFound {
  197. t.Errorf("got %v, want NotFound", c)
  198. }
  199. }