@@ -11,6 +11,7 @@ import {
1111} from "nativescript-plugin-firebase/mlkit/imagelabeling" ;
1212import { MLKitLandmarkRecognitionCloudResult } from "nativescript-plugin-firebase/mlkit/landmarkrecognition" ;
1313import { MLKitNaturalLanguageIdentificationResult } from "nativescript-plugin-firebase/mlkit/naturallanguageidentification" ;
14+ import { MLKitSmartReplyConversationMessage } from "nativescript-plugin-firebase/mlkit/smartreply" ;
1415import { MLKitRecognizeTextResult } from "nativescript-plugin-firebase/mlkit/textrecognition" ;
1516import * as fileSystemModule from "tns-core-modules/file-system" ;
1617import { ImageAsset } from "tns-core-modules/image-asset" ;
@@ -38,7 +39,8 @@ export class MLKitComponent {
3839 "Image labeling (cloud)" ,
3940 "Custom model" ,
4041 "Landmark recognition (cloud)" ,
41- "Language identification (on device)"
42+ "Language identification" ,
43+ "Smart Reply"
4244 ] ;
4345
4446 private mlkitOnDeviceFeatures : Array < string > = [
@@ -191,8 +193,10 @@ export class MLKitComponent {
191193 this . recognizeLandmarkCloud ( imageSource ) ;
192194 } else if ( pickedItem === "Custom model" ) {
193195 this . customModel ( imageSource ) ;
194- } else if ( pickedItem === "Language identification (on device) " ) {
196+ } else if ( pickedItem === "Language identification" ) {
195197 this . languageIdentification ( imageSource ) ;
198+ } else if ( pickedItem === "Smart reply" ) {
199+ this . smartReply ( imageSource ) ;
196200 }
197201 } ) ;
198202 }
@@ -246,21 +250,41 @@ export class MLKitComponent {
246250 // First recognize text, then get its language
247251 firebase . mlkit . textrecognition . recognizeTextOnDevice ( {
248252 image : imageSource
249- } ) . then (
250- ( result : MLKitRecognizeTextResult ) => {
251- firebase . mlkit . naturallanguageidentification . identifyNaturalLanguage ( {
252- text : result . text
253- } ) . then (
254- ( languageIdResult : MLKitNaturalLanguageIdentificationResult ) => {
255- alert ( {
256- title : `Result` ,
257- message : `Language code: ${ languageIdResult . languageCode } ` ,
258- okButtonText : "OK"
259- } ) ;
260- } )
261- . catch ( errorMessage => console . log ( "ML Kit error: " + errorMessage ) ) ;
262- } )
263- . catch ( errorMessage => console . log ( "ML Kit error: " + errorMessage ) ) ;
253+ } ) . then ( ( result : MLKitRecognizeTextResult ) => {
254+ firebase . mlkit . naturallanguageidentification . identifyNaturalLanguage ( {
255+ text : result . text
256+ } ) . then ( ( languageIdResult : MLKitNaturalLanguageIdentificationResult ) => {
257+ alert ( {
258+ title : `Result` ,
259+ message : `Language code: ${ languageIdResult . languageCode } ` ,
260+ okButtonText : "OK"
261+ } ) ;
262+ } ) . catch ( errorMessage => console . log ( "ML Kit error: " + errorMessage ) ) ;
263+ } ) . catch ( errorMessage => console . log ( "ML Kit error: " + errorMessage ) ) ;
264+ }
265+
266+ // it would be easier to hardcode the conversation, but this fits better with the other image-based examples
267+ private smartReply ( imageSource : ImageSource ) : void {
268+ firebase . mlkit . textrecognition . recognizeTextOnDevice ( {
269+ image : imageSource
270+ } ) . then ( ( result : MLKitRecognizeTextResult ) => {
271+ const messages : Array < MLKitSmartReplyConversationMessage > = [ ] ;
272+ result . blocks . forEach ( block => messages . push ( {
273+ text : block . text ,
274+ userId : "abc" ,
275+ localUser : false ,
276+ timestamp : new Date ( ) . getTime ( )
277+ } ) ) ;
278+ firebase . mlkit . smartreply . suggestReplies ( {
279+ messages
280+ } ) . then ( ( result : Array < string > ) => {
281+ alert ( {
282+ title : `Suggestions` ,
283+ message : JSON . stringify ( result ) ,
284+ okButtonText : "OK"
285+ } ) ;
286+ } ) . catch ( errorMessage => console . log ( "ML Kit error: " + errorMessage ) ) ;
287+ } ) . catch ( errorMessage => console . log ( "ML Kit error: " + errorMessage ) ) ;
264288 }
265289
266290 private customModel ( imageSource : ImageSource ) : void {
0 commit comments