質問編集履歴

7

誤字修正

2019/01/03 01:08

投稿

sunsetblue
sunsetblue

スコア14

test CHANGED
File without changes
test CHANGED
@@ -6,7 +6,7 @@
6
6
 
7
7
  下記GitHubのページとQiitaのページを参照しましたが、OCRした文字情報がlabelResultsに表示されず、
8
8
 
9
- Xcodeログフィールに表示されています。
9
+ Xcodeのコンソールに表示されています。
10
10
 
11
11
  OCRをすると、「labelResults」テキストビューに、下記コードの「request.httpBody = data」以下が表示されます。
12
12
 

6

画像を添付

2019/01/03 01:08

投稿

sunsetblue
sunsetblue

スコア14

test CHANGED
File without changes
test CHANGED
@@ -457,3 +457,5 @@
457
457
  }
458
458
 
459
459
  ```
460
+
461
+ ![イメージ説明](476ff4fec1fe5103d93fcecd7b304581.png)

5

誤字修正

2019/01/03 00:18

投稿

sunsetblue
sunsetblue

スコア14

test CHANGED
File without changes
test CHANGED
@@ -20,470 +20,440 @@
20
20
 
21
21
 
22
22
 
23
-
24
-
25
-
26
-
27
- ### 発生している問題・エラーメッセージ
28
-
29
-
30
-
31
- 画像の文字情報をOCRするiOSアプリを作成しています。
32
-
33
-
34
-
35
- 下記GitHubのページとQiitaのページを参照しましたが、OCRした文字情報がlabelResultsに表示されず、
36
-
37
- Xcode上のログフィールドに表示されています。
38
-
39
- OCRをすると、「labelResults」テキストビューに、下記コードの「request.httpBody = data」以下が表示されます。
40
-
41
- どのようにすればよいかご教授いただけないでしょうか。
42
-
43
- エラーメッセージ
23
+ ```Swift
24
+
25
+
26
+
27
+ import UIKit
28
+
29
+ import SwiftyJSON
30
+
31
+
32
+
33
+
34
+
35
+ class ViewController: UIViewController, UIImagePickerControllerDelegate, UINavigationControllerDelegate {
36
+
37
+ let imagePicker = UIImagePickerController()
38
+
39
+ let session = URLSession.shared
40
+
41
+
42
+
43
+ @IBOutlet weak var imageView: UIImageView!
44
+
45
+ @IBOutlet weak var spinner: UIActivityIndicatorView!
46
+
47
+ @IBOutlet weak var labelResults: UITextView!
48
+
49
+ @IBOutlet weak var faceResults: UITextView!
50
+
51
+
52
+
53
+ var googleAPIKey = "YOUR_API_KEY"
54
+
55
+ var googleURL: URL {
56
+
57
+ return URL(string: "https://vision.googleapis.com/v1/images:annotate?key=(googleAPIKey)")!
58
+
59
+ }
60
+
61
+
62
+
63
+ @IBAction func loadImageButtonTapped(_ sender: UIButton) {
64
+
65
+ imagePicker.allowsEditing = false
66
+
67
+ imagePicker.sourceType = .photoLibrary
68
+
69
+
70
+
71
+ present(imagePicker, animated: true, completion: nil)
72
+
73
+ }
74
+
75
+
76
+
77
+ override func viewDidLoad() {
78
+
79
+ super.viewDidLoad()
80
+
81
+ // Do any additional setup after loading the view, typically from a nib.
82
+
83
+ imagePicker.delegate = self
84
+
85
+ labelResults.isHidden = true
86
+
87
+ faceResults.isHidden = true
88
+
89
+ spinner.hidesWhenStopped = true
90
+
91
+ }
92
+
93
+
94
+
95
+ override func didReceiveMemoryWarning() {
96
+
97
+ super.didReceiveMemoryWarning()
98
+
99
+ // Dispose of any resources that can be recreated.
100
+
101
+ }
102
+
103
+ }
104
+
105
+
106
+
107
+
108
+
109
+ /// Image processing
110
+
111
+ extension ViewController {
112
+
113
+
114
+
115
+ func analyzeResults(_ dataToParse: Data) {
116
+
117
+
118
+
119
+ // Update UI on the main thread
120
+
121
+ DispatchQueue.main.async(execute: {
122
+
123
+
124
+
125
+
126
+
127
+ // Use SwiftyJSON to parse results
128
+
129
+ let json = JSON(data: dataToParse)
130
+
131
+ let errorObj: JSON = json["error"]
132
+
133
+
134
+
135
+ self.spinner.stopAnimating()
136
+
137
+ self.imageView.isHidden = true
138
+
139
+ self.labelResults.isHidden = false
140
+
141
+ self.faceResults.isHidden = false
142
+
143
+ self.faceResults.text = ""
144
+
145
+
146
+
147
+ // Check for errors
148
+
149
+ if (errorObj.dictionaryValue != [:]) {
150
+
151
+ self.labelResults.text = "Error code (errorObj["code"]): (errorObj["message"])"
152
+
153
+ } else {
154
+
155
+ // Parse the response
156
+
157
+ print(json)
158
+
159
+ let responses: JSON = json["responses"][0]
160
+
161
+
162
+
163
+
164
+
165
+ // Get label annotations
166
+
167
+ let labelAnnotations: JSON = responses["textAnnotations"]
168
+
169
+ let numLabels: Int = labelAnnotations.count
170
+
171
+ var labels: Array<String> = []
172
+
173
+ if numLabels > 0 {
174
+
175
+
176
+
177
+ var labelResultsText:String = "Labels found: "
178
+
179
+ for index in 0..<1 {
180
+
181
+ let label = labelAnnotations[index]["description"].stringValue
182
+
183
+ labels.append(label)
184
+
185
+ }
186
+
187
+ for label in labels {
188
+
189
+ // if it's not the last item add a comma
190
+
191
+ if labels[labels.count - 1] != label {
192
+
193
+ labelResultsText += "(label), "
194
+
195
+ } else {
196
+
197
+ labelResultsText += "(label)"
198
+
199
+ }
200
+
201
+ }
202
+
203
+ self.labelResults.text = labelResultsText
204
+
205
+ }
206
+
207
+ }
208
+
209
+ })
210
+
211
+
212
+
213
+ }
214
+
215
+
216
+
217
+ func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [String : Any]) {
218
+
219
+ if let pickedImage = info[UIImagePickerControllerOriginalImage] as? UIImage {
220
+
221
+ imageView.contentMode = .scaleAspectFit
222
+
223
+ imageView.isHidden = true // You could optionally display the image here by setting imageView.image = pickedImage
224
+
225
+ spinner.startAnimating()
226
+
227
+ faceResults.isHidden = true
228
+
229
+ labelResults.isHidden = true
230
+
231
+
232
+
233
+ // Base64 encode the image and create the request
234
+
235
+ let binaryImageData = base64EncodeImage(pickedImage)
236
+
237
+ createRequest(with: binaryImageData)
238
+
239
+ }
240
+
241
+
242
+
243
+ dismiss(animated: true, completion: nil)
244
+
245
+ }
246
+
247
+
248
+
249
+ func imagePickerControllerDidCancel(_ picker: UIImagePickerController) {
250
+
251
+ dismiss(animated: true, completion: nil)
252
+
253
+ }
254
+
255
+
256
+
257
+ func resizeImage(_ imageSize: CGSize, image: UIImage) -> Data {
258
+
259
+ UIGraphicsBeginImageContext(imageSize)
260
+
261
+ image.draw(in: CGRect(x: 0, y: 0, width: imageSize.width, height: imageSize.height))
262
+
263
+ let newImage = UIGraphicsGetImageFromCurrentImageContext()
264
+
265
+ let resizedImage = UIImagePNGRepresentation(newImage!)
266
+
267
+ UIGraphicsEndImageContext()
268
+
269
+ return resizedImage!
270
+
271
+ }
272
+
273
+ }
274
+
275
+
276
+
277
+
278
+
279
+ /// Networking
280
+
281
+ extension ViewController {
282
+
283
+ func base64EncodeImage(_ image: UIImage) -> String {
284
+
285
+ var imagedata = UIImagePNGRepresentation(image)
286
+
287
+
288
+
289
+ // Resize the image if it exceeds the 2MB API limit
290
+
291
+ if (imagedata?.count > 2097152) {
292
+
293
+ let oldSize: CGSize = image.size
294
+
295
+ let newSize: CGSize = CGSize(width: 800, height: oldSize.height / oldSize.width * 800)
296
+
297
+ imagedata = resizeImage(newSize, image: image)
298
+
299
+ }
300
+
301
+
302
+
303
+ return imagedata!.base64EncodedString(options: .endLineWithCarriageReturn)
304
+
305
+ }
306
+
307
+
308
+
309
+ func createRequest(with imageBase64: String) {
310
+
311
+ // Create our request URL
312
+
313
+
314
+
315
+ var request = URLRequest(url: googleURL)
316
+
317
+ request.httpMethod = "POST"
318
+
319
+ request.addValue("application/json", forHTTPHeaderField: "Content-Type")
320
+
321
+ request.addValue(Bundle.main.bundleIdentifier ?? "", forHTTPHeaderField: "X-Ios-Bundle-Identifier")
322
+
323
+
324
+
325
+ // Build our API request
326
+
327
+ let jsonRequest = [
328
+
329
+ "requests": [
330
+
331
+ "image": [
332
+
333
+ "content": imageBase64
334
+
335
+ ],
336
+
337
+ "features": [
338
+
339
+ [
340
+
341
+ "type": "TEXT_DETECTION",
342
+
343
+ "maxResults": 1
344
+
345
+ ],
346
+
347
+ ]
348
+
349
+ ]
350
+
351
+ ]
352
+
353
+ let jsonObject = JSON(jsonDictionary: jsonRequest)
354
+
355
+
356
+
357
+ // Serialize the JSON
358
+
359
+ guard let data = try? jsonObject.rawData() else {
360
+
361
+ return
362
+
363
+ }
364
+
365
+
366
+
367
+ request.httpBody = data
368
+
369
+
370
+
371
+ // Run the request on a background thread
372
+
373
+ DispatchQueue.global().async { self.runRequestOnBackgroundThread(request) }
374
+
375
+ }
376
+
377
+
378
+
379
+ func runRequestOnBackgroundThread(_ request: URLRequest) {
380
+
381
+ // run the request
382
+
383
+
384
+
385
+ let task: URLSessionDataTask = session.dataTask(with: request) { (data, response, error) in
386
+
387
+ guard let data = data, error == nil else {
388
+
389
+ print(error?.localizedDescription ?? "")
390
+
391
+ return
392
+
393
+ }
394
+
395
+
396
+
397
+ self.analyzeResults(data)
398
+
399
+ }
400
+
401
+
402
+
403
+ task.resume()
404
+
405
+ }
406
+
407
+ }
408
+
409
+
410
+
411
+
412
+
413
+ // FIXME: comparison operators with optionals were removed from the Swift Standard Libary.
414
+
415
+ // Consider refactoring the code to use the non-optional operators.
416
+
417
+ fileprivate func < <T : Comparable>(lhs: T?, rhs: T?) -> Bool {
418
+
419
+ switch (lhs, rhs) {
420
+
421
+ case let (l?, r?):
422
+
423
+ return l < r
424
+
425
+ case (nil, _?):
426
+
427
+ return true
428
+
429
+ default:
430
+
431
+ return false
432
+
433
+ }
434
+
435
+ }
436
+
437
+
438
+
439
+ // FIXME: comparison operators with optionals were removed from the Swift Standard Libary.
440
+
441
+ // Consider refactoring the code to use the non-optional operators.
442
+
443
+ fileprivate func > <T : Comparable>(lhs: T?, rhs: T?) -> Bool {
444
+
445
+ switch (lhs, rhs) {
446
+
447
+ case let (l?, r?):
448
+
449
+ return l > r
450
+
451
+ default:
452
+
453
+ return rhs < lhs
454
+
455
+ }
456
+
457
+ }
44
458
 
45
459
  ```
46
-
47
-
48
-
49
-
50
-
51
-
52
-
53
- ```Swift
54
-
55
-
56
-
57
- import UIKit
58
-
59
- import SwiftyJSON
60
-
61
-
62
-
63
-
64
-
65
- class ViewController: UIViewController, UIImagePickerControllerDelegate, UINavigationControllerDelegate {
66
-
67
- let imagePicker = UIImagePickerController()
68
-
69
- let session = URLSession.shared
70
-
71
-
72
-
73
- @IBOutlet weak var imageView: UIImageView!
74
-
75
- @IBOutlet weak var spinner: UIActivityIndicatorView!
76
-
77
- @IBOutlet weak var labelResults: UITextView!
78
-
79
- @IBOutlet weak var faceResults: UITextView!
80
-
81
-
82
-
83
- var googleAPIKey = "YOUR_API_KEY"
84
-
85
- var googleURL: URL {
86
-
87
- return URL(string: "https://vision.googleapis.com/v1/images:annotate?key=(googleAPIKey)")!
88
-
89
- }
90
-
91
-
92
-
93
- @IBAction func loadImageButtonTapped(_ sender: UIButton) {
94
-
95
- imagePicker.allowsEditing = false
96
-
97
- imagePicker.sourceType = .photoLibrary
98
-
99
-
100
-
101
- present(imagePicker, animated: true, completion: nil)
102
-
103
- }
104
-
105
-
106
-
107
- override func viewDidLoad() {
108
-
109
- super.viewDidLoad()
110
-
111
- // Do any additional setup after loading the view, typically from a nib.
112
-
113
- imagePicker.delegate = self
114
-
115
- labelResults.isHidden = true
116
-
117
- faceResults.isHidden = true
118
-
119
- spinner.hidesWhenStopped = true
120
-
121
- }
122
-
123
-
124
-
125
- override func didReceiveMemoryWarning() {
126
-
127
- super.didReceiveMemoryWarning()
128
-
129
- // Dispose of any resources that can be recreated.
130
-
131
- }
132
-
133
- }
134
-
135
-
136
-
137
-
138
-
139
- /// Image processing
140
-
141
- extension ViewController {
142
-
143
-
144
-
145
- func analyzeResults(_ dataToParse: Data) {
146
-
147
-
148
-
149
- // Update UI on the main thread
150
-
151
- DispatchQueue.main.async(execute: {
152
-
153
-
154
-
155
-
156
-
157
- // Use SwiftyJSON to parse results
158
-
159
- let json = JSON(data: dataToParse)
160
-
161
- let errorObj: JSON = json["error"]
162
-
163
-
164
-
165
- self.spinner.stopAnimating()
166
-
167
- self.imageView.isHidden = true
168
-
169
- self.labelResults.isHidden = false
170
-
171
- self.faceResults.isHidden = false
172
-
173
- self.faceResults.text = ""
174
-
175
-
176
-
177
- // Check for errors
178
-
179
- if (errorObj.dictionaryValue != [:]) {
180
-
181
- self.labelResults.text = "Error code (errorObj["code"]): (errorObj["message"])"
182
-
183
- } else {
184
-
185
- // Parse the response
186
-
187
- print(json)
188
-
189
- let responses: JSON = json["responses"][0]
190
-
191
-
192
-
193
-
194
-
195
- // Get label annotations
196
-
197
- let labelAnnotations: JSON = responses["textAnnotations"]
198
-
199
- let numLabels: Int = labelAnnotations.count
200
-
201
- var labels: Array<String> = []
202
-
203
- if numLabels > 0 {
204
-
205
-
206
-
207
- var labelResultsText:String = "Labels found: "
208
-
209
- for index in 0..<1 {
210
-
211
- let label = labelAnnotations[index]["description"].stringValue
212
-
213
- labels.append(label)
214
-
215
- }
216
-
217
- for label in labels {
218
-
219
- // if it's not the last item add a comma
220
-
221
- if labels[labels.count - 1] != label {
222
-
223
- labelResultsText += "(label), "
224
-
225
- } else {
226
-
227
- labelResultsText += "(label)"
228
-
229
- }
230
-
231
- }
232
-
233
- self.labelResults.text = labelResultsText
234
-
235
- }
236
-
237
- }
238
-
239
- })
240
-
241
-
242
-
243
- }
244
-
245
-
246
-
247
- func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [String : Any]) {
248
-
249
- if let pickedImage = info[UIImagePickerControllerOriginalImage] as? UIImage {
250
-
251
- imageView.contentMode = .scaleAspectFit
252
-
253
- imageView.isHidden = true // You could optionally display the image here by setting imageView.image = pickedImage
254
-
255
- spinner.startAnimating()
256
-
257
- faceResults.isHidden = true
258
-
259
- labelResults.isHidden = true
260
-
261
-
262
-
263
- // Base64 encode the image and create the request
264
-
265
- let binaryImageData = base64EncodeImage(pickedImage)
266
-
267
- createRequest(with: binaryImageData)
268
-
269
- }
270
-
271
-
272
-
273
- dismiss(animated: true, completion: nil)
274
-
275
- }
276
-
277
-
278
-
279
- func imagePickerControllerDidCancel(_ picker: UIImagePickerController) {
280
-
281
- dismiss(animated: true, completion: nil)
282
-
283
- }
284
-
285
-
286
-
287
- func resizeImage(_ imageSize: CGSize, image: UIImage) -> Data {
288
-
289
- UIGraphicsBeginImageContext(imageSize)
290
-
291
- image.draw(in: CGRect(x: 0, y: 0, width: imageSize.width, height: imageSize.height))
292
-
293
- let newImage = UIGraphicsGetImageFromCurrentImageContext()
294
-
295
- let resizedImage = UIImagePNGRepresentation(newImage!)
296
-
297
- UIGraphicsEndImageContext()
298
-
299
- return resizedImage!
300
-
301
- }
302
-
303
- }
304
-
305
-
306
-
307
-
308
-
309
- /// Networking
310
-
311
- extension ViewController {
312
-
313
- func base64EncodeImage(_ image: UIImage) -> String {
314
-
315
- var imagedata = UIImagePNGRepresentation(image)
316
-
317
-
318
-
319
- // Resize the image if it exceeds the 2MB API limit
320
-
321
- if (imagedata?.count > 2097152) {
322
-
323
- let oldSize: CGSize = image.size
324
-
325
- let newSize: CGSize = CGSize(width: 800, height: oldSize.height / oldSize.width * 800)
326
-
327
- imagedata = resizeImage(newSize, image: image)
328
-
329
- }
330
-
331
-
332
-
333
- return imagedata!.base64EncodedString(options: .endLineWithCarriageReturn)
334
-
335
- }
336
-
337
-
338
-
339
- func createRequest(with imageBase64: String) {
340
-
341
- // Create our request URL
342
-
343
-
344
-
345
- var request = URLRequest(url: googleURL)
346
-
347
- request.httpMethod = "POST"
348
-
349
- request.addValue("application/json", forHTTPHeaderField: "Content-Type")
350
-
351
- request.addValue(Bundle.main.bundleIdentifier ?? "", forHTTPHeaderField: "X-Ios-Bundle-Identifier")
352
-
353
-
354
-
355
- // Build our API request
356
-
357
- let jsonRequest = [
358
-
359
- "requests": [
360
-
361
- "image": [
362
-
363
- "content": imageBase64
364
-
365
- ],
366
-
367
- "features": [
368
-
369
- [
370
-
371
- "type": "TEXT_DETECTION",
372
-
373
- "maxResults": 1
374
-
375
- ],
376
-
377
- ]
378
-
379
- ]
380
-
381
- ]
382
-
383
- let jsonObject = JSON(jsonDictionary: jsonRequest)
384
-
385
-
386
-
387
- // Serialize the JSON
388
-
389
- guard let data = try? jsonObject.rawData() else {
390
-
391
- return
392
-
393
- }
394
-
395
-
396
-
397
- request.httpBody = data
398
-
399
-
400
-
401
- // Run the request on a background thread
402
-
403
- DispatchQueue.global().async { self.runRequestOnBackgroundThread(request) }
404
-
405
- }
406
-
407
-
408
-
409
- func runRequestOnBackgroundThread(_ request: URLRequest) {
410
-
411
- // run the request
412
-
413
-
414
-
415
- let task: URLSessionDataTask = session.dataTask(with: request) { (data, response, error) in
416
-
417
- guard let data = data, error == nil else {
418
-
419
- print(error?.localizedDescription ?? "")
420
-
421
- return
422
-
423
- }
424
-
425
-
426
-
427
- self.analyzeResults(data)
428
-
429
- }
430
-
431
-
432
-
433
- task.resume()
434
-
435
- }
436
-
437
- }
438
-
439
-
440
-
441
-
442
-
443
- // FIXME: comparison operators with optionals were removed from the Swift Standard Libary.
444
-
445
- // Consider refactoring the code to use the non-optional operators.
446
-
447
- fileprivate func < <T : Comparable>(lhs: T?, rhs: T?) -> Bool {
448
-
449
- switch (lhs, rhs) {
450
-
451
- case let (l?, r?):
452
-
453
- return l < r
454
-
455
- case (nil, _?):
456
-
457
- return true
458
-
459
- default:
460
-
461
- return false
462
-
463
- }
464
-
465
- }
466
-
467
-
468
-
469
- // FIXME: comparison operators with optionals were removed from the Swift Standard Libary.
470
-
471
- // Consider refactoring the code to use the non-optional operators.
472
-
473
- fileprivate func > <T : Comparable>(lhs: T?, rhs: T?) -> Bool {
474
-
475
- switch (lhs, rhs) {
476
-
477
- case let (l?, r?):
478
-
479
- return l > r
480
-
481
- default:
482
-
483
- return rhs < lhs
484
-
485
- }
486
-
487
- }
488
-
489
- ```

4

誤字修正

2019/01/02 12:43

投稿

sunsetblue
sunsetblue

スコア14

test CHANGED
@@ -1 +1 @@
1
- teSwiftでGoogle Vision APIを使用してOCRした文字情報をテキストビューに表示する方法
1
+ SwiftでGoogle Vision APIを使用してOCRした文字情報をテキストビューに表示する方法
test CHANGED
File without changes

3

テンプレートに合わせて記述

2019/01/02 05:49

投稿

sunsetblue
sunsetblue

スコア14

test CHANGED
@@ -1 +1 @@
1
- SwiftでGoogle Vision APIを使用してOCRした文字情報をテキストビューに表示する方法
1
+ teSwiftでGoogle Vision APIを使用してOCRした文字情報をテキストビューに表示する方法
test CHANGED
@@ -1,3 +1,5 @@
1
+ ### SwiftでGoogle Vision APIを使用してOCRした文字情報をテキストビューに表示する方法
2
+
1
3
  画像の文字情報をOCRするiOSアプリを作成しています。
2
4
 
3
5
 
@@ -18,7 +20,39 @@
18
20
 
19
21
 
20
22
 
23
+
24
+
25
+
26
+
27
+ ### 発生している問題・エラーメッセージ
28
+
29
+
30
+
31
+ 画像の文字情報をOCRするiOSアプリを作成しています。
32
+
33
+
34
+
35
+ 下記GitHubのページとQiitaのページを参照しましたが、OCRした文字情報がlabelResultsに表示されず、
36
+
37
+ Xcode上のログフィールドに表示されています。
38
+
39
+ OCRをすると、「labelResults」テキストビューに、下記コードの「request.httpBody = data」以下が表示されます。
40
+
41
+ どのようにすればよいかご教授いただけないでしょうか。
42
+
43
+ エラーメッセージ
44
+
45
+ ```
46
+
47
+
48
+
49
+
50
+
51
+
52
+
21
- ```ここに言語を入力
53
+ ```Swift
54
+
55
+
22
56
 
23
57
  import UIKit
24
58
 
@@ -26,6 +60,8 @@
26
60
 
27
61
 
28
62
 
63
+
64
+
29
65
  class ViewController: UIViewController, UIImagePickerControllerDelegate, UINavigationControllerDelegate {
30
66
 
31
67
  let imagePicker = UIImagePickerController()
@@ -142,11 +178,7 @@
142
178
 
143
179
  if (errorObj.dictionaryValue != [:]) {
144
180
 
145
- self.labelResults.text = "Error code (errorObj["code"]): ```ここに言語を入力
181
+ self.labelResults.text = "Error code (errorObj["code"]): (errorObj["message"])"
146
-
147
- \
148
-
149
- ```(errorObj["message"])"
150
182
 
151
183
  } else {
152
184
 
@@ -158,71 +190,7 @@
158
190
 
159
191
 
160
192
 
161
- /*
193
+
162
-
163
- // Get face annotations
164
-
165
- let faceAnnotations: JSON = responses["faceAnnotations"]
166
-
167
- if faceAnnotations != nil {
168
-
169
- let emotions: Array<String> = ["joy", "sorrow", "surprise", "anger"]
170
-
171
-
172
-
173
- let numPeopleDetected:Int = faceAnnotations.count
174
-
175
-
176
-
177
- self.faceResults.text = "People detected: (numPeopleDetected)\n\nEmotions detected:\n"
178
-
179
-
180
-
181
- var emotionTotals: [String: Double] = ["sorrow": 0, "joy": 0, "surprise": 0, "anger": 0]
182
-
183
- var emotionLikelihoods: [String: Double] = ["VERY_LIKELY": 0.9, "LIKELY": 0.75, "POSSIBLE": 0.5, "UNLIKELY":0.25, "VERY_UNLIKELY": 0.0]
184
-
185
-
186
-
187
- for index in 0..<numPeopleDetected {
188
-
189
- let personData:JSON = faceAnnotations[index]
190
-
191
-
192
-
193
- // Sum all the detected emotions
194
-
195
- for emotion in emotions {
196
-
197
- let lookup = emotion + "Likelihood"
198
-
199
- let result:String = personData[lookup].stringValue
200
-
201
- emotionTotals[emotion]! += emotionLikelihoods[result]!
202
-
203
- }
204
-
205
- }
206
-
207
- // Get emotion likelihood as a % and display in UI
208
-
209
- for (emotion, total) in emotionTotals {
210
-
211
- let likelihood:Double = total / Double(numPeopleDetected)
212
-
213
- let percent: Int = Int(round(likelihood * 100))
214
-
215
- self.faceResults.text! += "(emotion): (percent)%\n"
216
-
217
- }
218
-
219
- } else {
220
-
221
- self.faceResults.text = "No faces found"
222
-
223
- }*/
224
-
225
-
226
194
 
227
195
  // Get label annotations
228
196
 
@@ -266,12 +234,6 @@
266
234
 
267
235
  }
268
236
 
269
- //} else {
270
-
271
- // self.labelResults.text = labelResultsText //"No labels found"
272
-
273
- //}
274
-
275
237
  }
276
238
 
277
239
  })

2

コードの挿入

2019/01/02 05:48

投稿

sunsetblue
sunsetblue

スコア14

test CHANGED
File without changes
test CHANGED
@@ -18,7 +18,9 @@
18
18
 
19
19
 
20
20
 
21
+ ```ここに言語を入力
22
+
21
- ```import UIKit
23
+ import UIKit
22
24
 
23
25
  import SwiftyJSON
24
26
 
@@ -522,6 +524,4 @@
522
524
 
523
525
  }
524
526
 
525
- コード
526
-
527
527
  ```

1

コードの挿入

2019/01/02 05:41

投稿

sunsetblue
sunsetblue

スコア14

test CHANGED
File without changes
test CHANGED
@@ -18,9 +18,7 @@
18
18
 
19
19
 
20
20
 
21
-
22
-
23
- import UIKit
21
+ ```import UIKit
24
22
 
25
23
  import SwiftyJSON
26
24
 
@@ -142,7 +140,11 @@
142
140
 
143
141
  if (errorObj.dictionaryValue != [:]) {
144
142
 
145
- self.labelResults.text = "Error code (errorObj["code"]): (errorObj["message"])"
143
+ self.labelResults.text = "Error code (errorObj["code"]): ```ここに言語を入力
144
+
145
+ \
146
+
147
+ ```(errorObj["message"])"
146
148
 
147
149
  } else {
148
150
 
@@ -519,3 +521,7 @@
519
521
  }
520
522
 
521
523
  }
524
+
525
+ コード
526
+
527
+ ```