Dive into secure and efficient coding practices with our curated list of the top 10 examples showcasing 'node-nlp' in functional components in JavaScript. Our advanced machine learning engine meticulously scans each line of code, cross-referencing millions of open source libraries to ensure your implementation is not just functional, but also robust and secure. Elevate your React applications to new heights by mastering the art of handling side effects, API calls, and asynchronous operations with confidence and precision.
postHelp: (req, res) => {
const {message} = req.allParams()
const manager = new NlpManager({languages: ['en']})
let timeout = 3000
if (fs.existsSync('./.tmp/model.nlp')) {
manager.load('./.tmp/model.nlp')
} else {
timeout = 0
// Adds the utterances and intents for the NLP
manager.addDocument('en', 'Say about you', 'agent.acquaintance')
manager.addDocument('en', 'Why are you here', 'agent.acquaintance')
manager.addDocument('en', 'What is your personality',
'agent.acquaintance')
manager.addDocument('en', 'Describe yourself', 'agent.acquaintance')
manager.addDocument('en', 'Tell me about yourself', 'agent.acquaintance')
manager.addDocument('en', 'Tell me about you', 'agent.acquaintance')
manager.addDocument('en', 'What are you', 'agent.acquaintance')
manager.addDocument('en', 'Who are you', 'agent.acquaintance')
/* eslint-disable no-unused-expressions */
import { BayesClassifier } from 'natural'
import { NlpManager, NluManager } from 'node-nlp'
import logger from 'hoopa-logger'
import baseKnowledge from '../../brain/knowledge'
const LanguageProcessor = new NlpManager({ languages: 'en' })
const UnderstandingProcessor = new NluManager({ languages: 'en' })
const NLP = {
Bayes: new BayesClassifier(),
LanguageProcessor,
UnderstandingProcessor,
trainModel: (newKnowledge = null, bayes, manager) => {
logger.info('Cognitive analysis (NLP & NLU), basic training...')
const trainingContext = baseKnowledge.natural
if (newKnowledge) {
newKnowledge.map(token => trainingContext.push(token))
}
trainingContext.map(token => {
UnderstandingProcessor.addDocument('en', token.input, token.class)
const conversationHandler = async ({ content }, LanguageProcessor) => {
const context = new ConversationContext()
const { data: sentence } = JSON.parse(content)
logger.info(`Conversation control: received ${content}`)
// Local NLP analysis
logger.info(`NLP Analysis locally for ${content}`)
const localProcessedSentence = await LanguageProcessor.process(
'en',
sentence,
context
)
const { answer, classifications } = localProcessedSentence
const suggestedClassification = extractClassification(classifications)
// Trigger intents
if (suggestedClassification && suggestedClassification !== 'None') {
processIntentType(suggestedClassification, sentence)
if (answer) {
return new Promise(async (resolve, reject) => {
log.title('NER')
log.info('Searching for entities...')
// Need to instanciate on the fly to flush entities
this.nerManager = new NerManager()
const { entities, classification } = obj
// Remove end-punctuation and add an end-whitespace
const query = `${string.removeEndPunctuation(obj.query)} `
const expressionsObj = JSON.parse(fs.readFileSync(expressionsFilePath, 'utf8'))
const { module, action } = classification
const promises = []
// Verify the action has entities
if (typeof expressionsObj[module][action].entities !== 'undefined') {
const actionEntities = expressionsObj[module][action].entities
/**
* Browse action entities
* Dynamic injection of the action entities depending of the entity type
*/
/* eslint-disable no-unused-expressions */
import { BayesClassifier } from 'natural'
import { NlpManager, NluManager } from 'node-nlp'
import logger from 'hoopa-logger'
import baseKnowledge from '../../brain/knowledge'
const LanguageProcessor = new NlpManager({ languages: 'en' })
const UnderstandingProcessor = new NluManager({ languages: 'en' })
const NLP = {
Bayes: new BayesClassifier(),
LanguageProcessor,
UnderstandingProcessor,
trainModel: (newKnowledge = null, bayes, manager) => {
logger.info('Cognitive analysis (NLP & NLU), basic training...')
const trainingContext = baseKnowledge.natural
if (newKnowledge) {
newKnowledge.map(token => trainingContext.push(token))
}
trainingContext.map(token => {
UnderstandingProcessor.addDocument('en', token.input, token.class)
manager.addDocument('en', token.input, token.class)
export default () => new Promise(async (resolve, reject) => {
const { argv } = process
const packagesDir = 'packages'
const expressionsClassifier = 'server/src/data/expressions/classifier.json'
let type = (argv[2]) ? argv[2].toLowerCase() : 'expressions'
let lang = ''
if (type.indexOf(':') !== -1) {
[type, lang] = type.split(':')
} else {
lang = langs[process.env.LEON_LANG].short.toLowerCase().substr(0, 2)
}
try {
if (type === 'expressions') {
let manager = new NlpManager({ languages: ['en'] })
if (lang !== 'en') {
manager = new NlpManager({ languages: lang })
}
const packages = fs.readdirSync(packagesDir)
.filter(entity =>
fs.statSync(path.join(packagesDir, entity)).isDirectory())
let expressionsObj = { }
for (let i = 0; i < packages.length; i += 1) {
log.info(`Training "${string.ucfirst(packages[i])}" package modules expressions...`)
expressionsObj = JSON.parse(fs.readFileSync(`${packagesDir}/${packages[i]}/data/expressions/${lang}.json`, 'utf8'))
const modules = Object.keys(expressionsObj)
for (let j = 0; j < modules.length; j += 1) {
const expressionsClassifier = 'server/src/data/expressions/classifier.json'
let type = (argv[2]) ? argv[2].toLowerCase() : 'expressions'
let lang = ''
if (type.indexOf(':') !== -1) {
[type, lang] = type.split(':')
} else {
lang = langs[process.env.LEON_LANG].short.toLowerCase().substr(0, 2)
}
try {
if (type === 'expressions') {
let manager = new NlpManager({ languages: ['en'] })
if (lang !== 'en') {
manager = new NlpManager({ languages: lang })
}
const packages = fs.readdirSync(packagesDir)
.filter(entity =>
fs.statSync(path.join(packagesDir, entity)).isDirectory())
let expressionsObj = { }
for (let i = 0; i < packages.length; i += 1) {
log.info(`Training "${string.ucfirst(packages[i])}" package modules expressions...`)
expressionsObj = JSON.parse(fs.readFileSync(`${packagesDir}/${packages[i]}/data/expressions/${lang}.json`, 'utf8'))
const modules = Object.keys(expressionsObj)
for (let j = 0; j < modules.length; j += 1) {
const module = modules[j]
const actions = Object.keys(expressionsObj[module])
return new Promise((resolve, reject) => {
if (!fs.existsSync(classifierFile)) {
log.title('NLU')
reject({ type: 'warning', obj: new Error('The expressions classifier does not exist, please run: npm run train expressions') })
} else {
log.title('NLU')
try {
const data = fs.readFileSync(classifierFile, 'utf8')
const nlpManager = new NlpManager()
nlpManager.import(data)
this.classifier = nlpManager
log.success('Classifier loaded')
resolve()
} catch (err) {
this.brain.talk(`${this.brain.wernicke('random_errors')}! ${this.brain.wernicke('errors', 'nlu', { '%error%': err.message })}.`)
this.brain.socket.emit('is-typing', false)
reject({ type: 'error', obj: err })
}
}
})
}