Skip to content

Commit

Permalink
[[FIX]] Instantiation of clients on every load (#12)
Browse files Browse the repository at this point in the history
  • Loading branch information
jmendiara authored Dec 15, 2016
1 parent b3f4caf commit f437c94
Show file tree
Hide file tree
Showing 7 changed files with 138 additions and 117 deletions.
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@
"dependencies": {
"@slack/client": "^3.6.0",
"@telefonica/object-storage": "^2.2.0",
"alfalfa": "^2.1.0",
"alfalfa": "^2.2.0",
"bingspeech-api-client": "^2.0.0",
"botbuilder": "3.4.4",
"express": "^4.14.0",
Expand Down
6 changes: 3 additions & 3 deletions src/bot.ts
Original file line number Diff line number Diff line change
Expand Up @@ -55,14 +55,14 @@ export class Bot extends BotBuilder.UniversalBot {
logger.debug('Bot supported languages', supportedLanguages);

let middlewares = [
Audio,
Audio(),
DirectLinePrompts,
Logger,
Normalizer,
LanguageDetector(supportedLanguages),
Admin,
EventHub,
Slack
EventHub(),
Slack()
];
this.use(...middlewares);

Expand Down
4 changes: 2 additions & 2 deletions src/middlewares/audio.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ describe('Audio Middleware', () => {
.reply(200, new Buffer('fake-audio-binary-contents'));

let session = fakeBotSession();
let middleware: BotBuilder.ISessionMiddleware = audio.default.botbuilder as BotBuilder.ISessionMiddleware;
let middleware: BotBuilder.ISessionMiddleware = audio.default().botbuilder as BotBuilder.ISessionMiddleware;

middleware(session, () => {
expect(session.message.text).to.eq('This is a text');
Expand All @@ -45,7 +45,7 @@ describe('Audio Middleware', () => {
.reply(200);

let session = fakeBotSession();
let middleware: BotBuilder.ISessionMiddleware = audio.default.botbuilder as BotBuilder.ISessionMiddleware;
let middleware: BotBuilder.ISessionMiddleware = audio.default().botbuilder as BotBuilder.ISessionMiddleware;

middleware(session, () => {
expect(session.message.text).to.eq(''); // not replaced
Expand Down
160 changes: 83 additions & 77 deletions src/middlewares/audio.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,94 +23,100 @@ import * as request from 'request';
import Therror from 'therror';

import { ObjectStorage } from '@telefonica/object-storage';
import { BingSpeechClient, VoiceRecognitionResponse } from 'bingspeech-api-client';

const streamifier = require('streamifier');
const storage = new ObjectStorage();

import { BingSpeechClient, VoiceRecognitionResponse } from 'bingspeech-api-client';
export default function factory(): BotBuilder.IMiddlewareMap {
if (!process.env.MICROSOFT_BING_SPEECH_KEY || !process.env.S3_ENDPOINT) {
logger.warn('Audio Middleware is disabled. MICROSOFT_BING_SPEECH_KEY and S3_ENDPOINT env vars needed');
return {
// To avoid botbuilder console.warn trace!! WTF
botbuilder: (session: BotBuilder.Session, next: Function) => next()
};
}

if (!process.env.MICROSOFT_BING_SPEECH_KEY) {
logger.warn('No MICROSOFT_BING_SPEECH_KEY');
}
const storage = new ObjectStorage();

const bingSpeechClient = new BingSpeechClient(process.env.MICROSOFT_BING_SPEECH_KEY);
const bingSpeechClient = new BingSpeechClient(process.env.MICROSOFT_BING_SPEECH_KEY);

const SUPPORTED_CONTENT_TYPES = ['audio/vnd.wave', 'audio/wav', 'audio/wave', 'audio/x-wav'];
export default {
const SUPPORTED_CONTENT_TYPES = ['audio/vnd.wave', 'audio/wav', 'audio/wave', 'audio/x-wav'];
return {
botbuilder: (session: BotBuilder.Session, next: Function) => {
let hasAttachment = session.message.type === 'message' &&
session.message &&
session.message.attachments &&
session.message.attachments.length > 0;

if (!hasAttachment) {
return next();
}

let attachment = session.message.attachments[0]; // XXX support multiple attachments

let isAudio = attachment.contentType.startsWith('audio/');
if (!isAudio) {
return next();
}

let isValidAudioAttachment = SUPPORTED_CONTENT_TYPES.indexOf(attachment.contentType) >= 0;

if (!isValidAudioAttachment) {
logger.warn(`Audio format not supported ${attachment.contentType}`);
session.send('Sorry, I do not understand your audio message');
return next(new Therror(`Audio format not supported ${attachment.contentType}`));
}

let contentUrl = attachment.contentUrl;
let voiceRecognitionResult: VoiceRecognitionResponse;

downloadRemoteResource(contentUrl)
.then(buffer => bingSpeechClient.recognize(buffer))
.then(voiceResult => {
logger.info({bingspeech: voiceResult}, 'Bing Speech transcoding succeeded');
voiceRecognitionResult = voiceResult;
return evaluateVoiceResponse(voiceResult);
})
.then(valid => {
if (valid) {
session.message.text = voiceRecognitionResult.header.lexical;
}
})
.then(() => next())
.catch(err => {
logger.warn(err, 'Audio middleware: Bing Speech transcoding failed');
next(err);
});
let hasAttachment = session.message.type === 'message' &&
session.message &&
session.message.attachments &&
session.message.attachments.length > 0;

if (!hasAttachment) {
return next();
}

let attachment = session.message.attachments[0]; // XXX support multiple attachments

let isAudio = attachment.contentType.startsWith('audio/');
if (!isAudio) {
return next();
}

let isValidAudioAttachment = SUPPORTED_CONTENT_TYPES.indexOf(attachment.contentType) >= 0;

if (!isValidAudioAttachment) {
logger.warn(`Audio format not supported ${attachment.contentType}`);
session.send('Sorry, I do not understand your audio message');
return next(new Therror(`Audio format not supported ${attachment.contentType}`));
}

let contentUrl = attachment.contentUrl;
let voiceRecognitionResult: VoiceRecognitionResponse;

downloadRemoteResource(contentUrl)
.then(buffer => bingSpeechClient.recognize(buffer))
.then(voiceResult => {
logger.info({bingspeech: voiceResult}, 'Bing Speech transcoding succeeded');
voiceRecognitionResult = voiceResult;
return evaluateVoiceResponse(voiceResult);
})
.then(valid => {
if (valid) {
session.message.text = voiceRecognitionResult.header.lexical;
}
})
.then(() => next())
.catch(err => {
logger.warn(err, 'Audio middleware: Bing Speech transcoding failed');
next(err);
});
},
send: (event: BotBuilder.IMessage, next: Function) => {
let audioOutputEnabled = process.env.ENABLE_AUDIO_OUTPUT === 'true';

//
// TODO determine whether the client sent an audio attachment (input) and supports audio (output).
// it is not so easy because we don't have the Session here.
//

if (!audioOutputEnabled || !event.text) {
return next();
}

bingSpeechClient.synthesize(event.text)
.then(response => storage.upload(streamifier.createReadStream(response.wave)))
.then(url => {
event.attachments = event.attachments || [];
event.attachments.push({
contentType: 'audio/wave',
contentUrl: url
});
})
.then(() => next())
.catch(err => {
logger.warn(err, 'Audio middleware: voice synthesis failed');
next(err);
let audioOutputEnabled = process.env.ENABLE_AUDIO_OUTPUT === 'true';

//
// TODO determine whether the client sent an audio attachment (input) and supports audio (output).
// it is not so easy because we don't have the Session here.
//

if (!audioOutputEnabled || !event.text) {
return next();
}

bingSpeechClient.synthesize(event.text)
.then(response => storage.upload(streamifier.createReadStream(response.wave)))
.then(url => {
event.attachments = event.attachments || [];
event.attachments.push({
contentType: 'audio/wave',
contentUrl: url
});
})
.then(() => next())
.catch(err => {
logger.warn(err, 'Audio middleware: voice synthesis failed');
next(err);
});
}
} as BotBuilder.IMiddlewareMap;
} as BotBuilder.IMiddlewareMap;
}

/**
* TODO this won't scale. Avoid the need of loading a resource in memory.
Expand Down
20 changes: 14 additions & 6 deletions src/middlewares/eventhub.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,14 +23,22 @@ import * as crypto from 'crypto';
/**
* Sends the incoming message received by the bot to Azure Event Hub
*/
export default {
export default function factory(): BotBuilder.IMiddlewareMap {
if (!process.env.EVENTHUB_NAMESPACE) {
logger.warn('Eventhub Middleware is disabled. EVENTHUB_NAMESPACE env var needed');
return {
// To avoid botbuilder console.warn trace!! WTF
botbuilder: (session: BotBuilder.Session, next: Function) => next()
};
}

return {
botbuilder: (session: BotBuilder.Session, next: Function) => {
if (process.env.EVENTHUB_NAMESPACE) {
sendEventHub(session.message); // best-effort, no callback
}
next();
sendEventHub(session.message); // best-effort, no callback
next();
}
} as BotBuilder.IMiddlewareMap;
} as BotBuilder.IMiddlewareMap;
}

function sendEventHub(payload: any) {
// Event Hubs parameters
Expand Down
12 changes: 8 additions & 4 deletions src/middlewares/language-detector.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ export default function factory(supportedLanguages: string[]): BotBuilder.IMiddl
botbuilder: (session: BotBuilder.Session, next: Function) => {
resolveLocale(session, supportedLanguages)
.then((locale) => setSessionLocale(session, locale))
.then(() => next())
.then((locale) => next())
.catch((err) => next(err));
}
} as BotBuilder.IMiddlewareMap;
Expand Down Expand Up @@ -60,7 +60,7 @@ function detectClientLocale(message: BotBuilder.IMessage): string {
return null;
}

function setSessionLocale(session: BotBuilder.Session, locale: string): Promise<void> {
function setSessionLocale(session: BotBuilder.Session, locale: string): Promise<string> {
return new Promise((resolve, reject) => {
session.preferredLocale(locale, (err) => {
if (err) {
Expand All @@ -71,8 +71,12 @@ function setSessionLocale(session: BotBuilder.Session, locale: string): Promise<
// Save the locale as part of userData because a fallback value might be needed in future messages
session.userData.preferredLocale = locale;

logger.info({preferredLocale: session.preferredLocale(), textLocale: session.message.textLocale}, 'Language detector');
return resolve();
logger.info({
preferredLocale: session.preferredLocale(),
textLocale: session.message.textLocale
}, 'Language detector');

resolve(locale);
});
});
}
51 changes: 27 additions & 24 deletions src/middlewares/slack.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,32 +17,35 @@

import * as BotBuilder from 'botbuilder';
import * as logger from 'logops';
const IncomingWebhooks = require('@slack/client').IncomingWebhook;

/**
* Notify Slack about not recognized utterances
*/
export default {
send: (event: BotBuilder.IEvent, next: Function) => {
if (process.env.SLACK_WEBHOOK_URL) {
if (event.sourceEvent.intent === 'None') {
notifySlack(event.sourceEvent.text, process.env.SLACK_WEBHOOK_URL);
}
}
export default function factory(): BotBuilder.IMiddlewareMap {
if (!process.env.SLACK_WEBHOOK_URL) {
logger.warn('Slack Middleware is disabled. SLACK_WEBHOOK_URL env var needed');
return {
// To avoid botbuilder console.warn trace!! WTF
botbuilder: (session: BotBuilder.Session, next: Function) => next()
};
}

next();
}
} as BotBuilder.IMiddlewareMap;
let webhook = new IncomingWebhooks(process.env.SLACK_WEBHOOK_URL);

/**
* Fire and forget slack notification.
*/
function notifySlack(text: string, webhookUrl: string) {
var IncomingWebhooks = require('@slack/client').IncomingWebhook;
return {
/**
* Notify Slack about not recognized utterances
*/
send: (event: BotBuilder.IEvent, next: Function) => {
if (event.sourceEvent.intent === 'None') {
// Fire and forget slack notification.
webhook.send({
text: `Not able to classify: ${event.sourceEvent.text}`,
channel: process.env.SLACK_CHANNEL || 'bot-classify',
username: process.env.BOT_NAME || 'bot'
});
}

let webhook = new IncomingWebhooks(webhookUrl);
webhook.send({
text: `Not able to classify: ${text}`,
channel: process.env.SLACK_CHANNEL || 'bot-classify',
username: process.env.BOT_NAME || 'bot'
});
next();
}
} as BotBuilder.IMiddlewareMap;
}

0 comments on commit f437c94

Please sign in to comment.