{ "name": "parades", "version": "0.0.1", "main": "index.js", "dependencies": { "actions-on-google": "^1.1.1" } }
const ApiAiApp = require('actions-on-google').ApiAiApp; function parade(app) { app.ask(`Chinese New Year Parade in Chinatown from 6pm to 9pm.`); } exports.parades = function(request, response) { var app = new ApiAiApp({request: request, response: response}); var actionMap = new Map(); actionMap.set("inquiry.parades", parade); app.handleRequest(actionMap); };
ask()
ApiAiApp
handleRequest()
tell()
SpeechRecognitionService.m
#define API_KEY @"YOUR_API_KEY"
NSDictionary *paramsDictionary = @{@"requests":@[ @{@"image": @{@"content":binaryImageData}, @"features":@[ @{@"type":@"LANDMARK_DETECTION", @"maxResults":@1}]}]};
#import AVSpeechUtterance *utterance = [[AVSpeechUtterance alloc] initWithString:message]; AVSpeechSynthesizer *synthesizer = [[AVSpeechSynthesizer alloc] init]; [synthesizer speakUtterance:utterance];
recognitionConfig.languageCode = @"en-US";
recognitionConfig.languageCode = @"zh-Hans";
#import AVSpeechUtterance *utterance = [[AVSpeechUtterance alloc] initWithString:message]; utterance.voice = [AVSpeechSynthesisVoice voiceWithLanguage:@"zh-Hans"]; AVSpeechSynthesizer *synthesizer = [[AVSpeechSynthesizer alloc] init]; [synthesizer speakUtterance:utterance];