KOA+egg.js集成kafka消息隊列的示例
Egg.js : 基于KOA2的企業(yè)級框架
Kafka:高吞吐量的分布式發(fā)布訂閱消息系統(tǒng)
本文章將集成egg + kafka + mysql 的日志系統(tǒng)例子
系統(tǒng)要求:日志記錄,通過kafka進行消息隊列控制
思路圖:

這里消費者和生產(chǎn)者都由日志系統(tǒng)提供
λ.1 環(huán)境準(zhǔn)備
①Kafka
官網(wǎng)下載kafka后,解壓
啟動zookeeper:
bin/zookeeper-server-start.sh config/zookeeper.properties
啟動Kafka server
這里config/server.properties中將num.partitions=5,我們設(shè)置5個partitions
bin/kafka-server-start.sh config/server.properties
② egg + mysql
根據(jù)腳手架搭建好egg,再多安裝kafka-node,egg-mysql
mysql 用戶名root 密碼123456
λ.2 集成
1、根目錄新建app.js,這個文件在每次項目加載時候都會運作
'use strict';
const kafka = require('kafka-node');
module.exports = app => {
app.beforeStart(async () => {
const ctx = app.createAnonymousContext();
const Producer = kafka.Producer;
const client = new kafka.KafkaClient({ kafkaHost: app.config.kafkaHost });
const producer = new Producer(client, app.config.producerConfig);
producer.on('error', function(err) {
console.error('ERROR: [Producer] ' + err);
});
app.producer = producer;
const consumer = new kafka.Consumer(client, app.config.consumerTopics, {
autoCommit: false,
});
consumer.on('message', async function(message) {
try {
await ctx.service.log.insert(JSON.parse(message.value));
consumer.commit(true, (err, data) => {
console.error('commit:', err, data);
});
} catch (error) {
console.error('ERROR: [GetMessage] ', message, error);
}
});
consumer.on('error', function(err) {
console.error('ERROR: [Consumer] ' + err);
});
});
};
上述代碼新建了生產(chǎn)者、消費者。
生產(chǎn)者新建后加載進app全局對象。我們將在請求時候生產(chǎn)消息。這里只是先新建實例
消費者獲取消息將訪問service層的insert方法(數(shù)據(jù)庫插入數(shù)據(jù))。
具體參數(shù)可以參考kafka-node官方API,往下看會有生產(chǎn)者和消費者的配置參數(shù)。
2、controller · log.js
這里獲取到了producer,并傳往service層
'use strict';
const Controller = require('egg').Controller;
class LogController extends Controller {
/**
* @description Kafka控制日志信息流
* @host /log/notice
* @method POST
* @param {Log} log 日志信息
*/
async notice() {
const producer = this.ctx.app.producer;
const Response = new this.ctx.app.Response();
const requestBody = this.ctx.request.body;
const backInfo = await this.ctx.service.log.send(producer, requestBody);
this.ctx.body = Response.success(backInfo);
}
}
module.exports = LogController;
3、service · log.js
這里有一個send方法,這里調(diào)用了producer.send ,進行生產(chǎn)者生產(chǎn)
insert方法則是數(shù)據(jù)庫插入數(shù)據(jù)
'use strict';
const Service = require('egg').Service;
const uuidv1 = require('uuid/v1');
class LogService extends Service {
async send(producer, params) {
const payloads = [
{
topic: this.ctx.app.config.topic,
messages: JSON.stringify(params),
},
];
producer.send(payloads, function(err, data) {
console.log('send : ', data);
});
return 'success';
}
async insert(message) {
try {
const logDB = this.ctx.app.mysql.get('log');
const ip = this.ctx.ip;
const Logs = this.ctx.model.Log.build({
id: uuidv1(),
type: message.type || '',
level: message.level || 0,
operator: message.operator || '',
content: message.content || '',
ip,
user_agent: message.user_agent || '',
error_stack: message.error_stack || '',
url: message.url || '',
request: message.request || '',
response: message.response || '',
created_at: new Date(),
updated_at: new Date(),
});
const result = await logDB.insert('logs', Logs.dataValues);
if (result.affectedRows === 1) {
console.log(`SUCEESS: [Insert ${message.type}]`);
} else console.error('ERROR: [Insert DB] ', result);
} catch (error) {
console.error('ERROR: [Insert] ', message, error);
}
}
}
module.exports = LogService;
4、config · config.default.js
一些上述代碼用到的配置參數(shù)具體在這里,注這里開了5個partition。
'use strict';
module.exports = appInfo => {
const config = (exports = {});
const topic = 'logAction_p5';
// add your config here
config.middleware = [];
config.security = {
csrf: {
enable: false,
},
};
// mysql database configuration
config.mysql = {
clients: {
basic: {
host: 'localhost',
port: '3306',
user: 'root',
password: '123456',
database: 'merchants_basic',
},
log: {
host: 'localhost',
port: '3306',
user: 'root',
password: '123456',
database: 'merchants_log',
},
},
default: {},
app: true,
agent: false,
};
// sequelize config
config.sequelize = {
dialect: 'mysql',
database: 'merchants_log',
host: 'localhost',
port: '3306',
username: 'root',
password: '123456',
dialectOptions: {
requestTimeout: 999999,
},
pool: {
acquire: 999999,
},
};
// kafka config
config.kafkaHost = 'localhost:9092';
config.topic = topic;
config.producerConfig = {
// Partitioner type (default = 0, random = 1, cyclic = 2, keyed = 3, custom = 4), default 0
partitionerType: 1,
};
config.consumerTopics = [
{ topic, partition: 0 },
{ topic, partition: 1 },
{ topic, partition: 2 },
{ topic, partition: 3 },
{ topic, partition: 4 },
];
return config;
};
5、實體類:
mode · log.js
這里使用了 Sequelize
'use strict';
module.exports = app => {
const { STRING, INTEGER, DATE, TEXT } = app.Sequelize;
const Log = app.model.define('log', {
/**
* UUID
*/
id: { type: STRING(36), primaryKey: true },
/**
* 日志類型
*/
type: STRING(100),
/**
* 優(yōu)先等級(數(shù)字越高,優(yōu)先級越高)
*/
level: INTEGER,
/**
* 操作者
*/
operator: STRING(50),
/**
* 日志內(nèi)容
*/
content: TEXT,
/**
* IP
*/
ip: STRING(36),
/**
* 當(dāng)前用戶代理信息
*/
user_agent: STRING(150),
/**
* 錯誤堆棧
*/
error_stack: TEXT,
/**
* URL
*/
url: STRING(255),
/**
* 請求對象
*/
request: TEXT,
/**
* 響應(yīng)對象
*/
response: TEXT,
/**
* 創(chuàng)建時間
*/
created_at: DATE,
/**
* 更新時間
*/
updated_at: DATE,
});
return Log;
};
6、測試Python腳本:
import requests
from multiprocessing import Pool
from threading import Thread
from multiprocessing import Process
def loop():
t = 1000
while t:
url = "http://localhost:7001/log/notice"
payload = "{\n\t\"type\": \"ERROR\",\n\t\"level\": 1,\n\t\"content\": \"URL send ERROR\",\n\t\"operator\": \"Knove\"\n}"
headers = {
'Content-Type': "application/json",
'Cache-Control': "no-cache"
}
response = requests.request("POST", url, data=payload, headers=headers)
print(response.text)
if __name__ == '__main__':
for i in range(10):
t = Thread(target=loop)
t.start()
7、建表語句:
SET NAMES utf8mb4; SET FOREIGN_KEY_CHECKS = 0; -- ---------------------------- -- Table structure for logs -- ---------------------------- DROP TABLE IF EXISTS `logs`; CREATE TABLE `logs` ( `id` varchar(36) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NOT NULL, `type` varchar(100) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NOT NULL COMMENT '日志類型', `level` int(11) NULL DEFAULT NULL COMMENT '優(yōu)先等級(數(shù)字越高,優(yōu)先級越高)', `operator` varchar(50) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NULL DEFAULT NULL COMMENT '操作人', `content` text CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NULL COMMENT '日志信息', `ip` varchar(36) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NULL DEFAULT NULL COMMENT 'IP\r\nIP', `user_agent` varchar(150) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NULL DEFAULT NULL COMMENT '當(dāng)前用戶代理信息', `error_stack` text CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NULL COMMENT '錯誤堆棧', `url` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NULL DEFAULT NULL COMMENT '當(dāng)前URL', `request` text CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NULL COMMENT '請求對象', `response` text CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NULL COMMENT '響應(yīng)對象', `created_at` datetime(0) NULL DEFAULT NULL COMMENT '創(chuàng)建時間', `updated_at` datetime(0) NULL DEFAULT NULL COMMENT '更新時間', PRIMARY KEY (`id`) USING BTREE ) ENGINE = InnoDB CHARACTER SET = utf8mb4 COLLATE = utf8mb4_bin ROW_FORMAT = Dynamic; SET FOREIGN_KEY_CHECKS = 1;
λ.3 后話
網(wǎng)上類似資料甚少,啃各種文檔,探尋技術(shù)實現(xiàn)方式
以上就是本文的全部內(nèi)容,希望對大家的學(xué)習(xí)有所幫助,也希望大家多多支持腳本之家。
相關(guān)文章
npm一鍵安裝Python以及node-sass依賴環(huán)境的方法
Node-sass是一個庫,它將Node.js綁定到LibSass,下面這篇文章主要給大家介紹了關(guān)于npm一鍵安裝Python以及node-sass依賴環(huán)境的相關(guān)資料,文中還介紹了安裝node-sass報錯Python環(huán)境問題避坑指南,需要的朋友可以參考下2022-09-09
nodejs入門教程三:調(diào)用內(nèi)部和外部方法示例
這篇文章主要介紹了nodejs入門教程之調(diào)用內(nèi)部和外部方法,結(jié)合實例形式分析了nodejs內(nèi)部與外部方法的定義與調(diào)用相關(guān)操作實現(xiàn)技巧,需要的朋友可以參考下2017-04-04
淺析node應(yīng)用的timing-attack安全漏洞
本篇文章給大家通過原理的原因分析了node應(yīng)用的timing-attack安全漏洞問題,有興趣的朋友閱讀參考下。2018-02-02
使用Meteor配合Node.js編寫實時聊天應(yīng)用的范例
這篇文章主要介紹了使用Meteor配合Node.js編寫實時聊天應(yīng)用的范例,Node.js作為異步框架,其最突出的使用便是用來編寫實時應(yīng)用程序,需要的朋友可以參考下2015-06-06

