首页 > 解决方案 > 使用 Tensorflow.js 加载 Mobilenet 模型并在 express 中间件中进行分类

问题描述

我已经训练了神经元移动网络模型,现在我想用 express 中间件公开它以进行图像分类。如果没有 express,只需使用分类方法启动模型,它就可以正常工作。

但是当我在 express 中调用分类方法时出现错误。

我的代码:

global.fetch = require('node-fetch')
const multer = require('multer');
const express = require('express');
const bodyParser = require('body-parser');
const fs = require('fs');
const jpeg = require('jpeg-js');
const tf = require('@tensorflow/tfjs');
require('@tensorflow/tfjs-node');
const mobilenet = require('@tensorflow-models/mobilenet');
var Type = require('type-of-is');
//-----------------------------------------------------------------------------------------------------
async function LoadModelAsync(path){
    const mn = new mobilenet.MobileNet(1, 1);
    mn.path = `file://${path}`;
    await mn.load();
    return mn;
}
function imageByteArray(image, numChannels) {
    var pixels = image.data;
    const numPixels = image.width * image.height;
    var values = new Int32Array(numPixels * numChannels);
    for (var i = 0; i < numPixels; i++) {
        for (var channel = 0; channel < numChannels; ++channel) {
        values[i * numChannels + channel] = pixels[i * 4 + channel];
        }
    }
    return values;
}
function readImage(path, numChannels) {
    const buf = fs.readFileSync(path);
    const image = jpeg.decode(buf, true);
    const values = imageByteArray(image, numChannels);
    const outShape = [image.height, image.width, numChannels];
    const input = tf.tensor3d(values, outShape, 'int32');
    return input;
}
async function ClassifyAsync(mn_model, image)
{
    console.time('classification time');
    const predictions = await mn_model.classify(image);
    console.timeEnd('classification time');
    predictions.forEach(element => {
        console.log(this.IMAGENET_CLASSES[element.className] + " : " + 
        element.probability + "%");
    });
 }
 //-----------------------------------------------------------------------------------------------------
const IMAGENET_CLASSES = {
    'tench, Tinca tinca': 'contrat',
    'goldfish, Carassius auratus': 'signature without contract',
    'great white shark, white shark, man-eater, man-eating shark,    
    Carcharodon carcharias': 'signature with contract',
    'tiger shark, Galeocerdo cuvieri': 'signature'
};
const port = process.env.PORT || 3000;
var storage = multer.diskStorage({
     destination: function (req, file, cb) {
         cb(null, 'uploads')
     },
     filename: function (req, file, cb) {
         cb(null, Date.now() + '-' + file.originalname)
     }
})
var upload = multer({ storage: storage })
var model = LoadModelAsync('CNN/model.json');
console.log(Type(model))
const numChannels = 3;

app = express();
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({ extended: false }));

//curl -X POST -H 'content-type: multipart/form-data' -F signature=@image.jpg localhost:3000/profile; echo
app.post('/profile', upload.single('signature'), async function (req, res, next) {
    try{
        console.log(req.body);
        console.log(req.file);
        var image = readImage(req.file.path, numChannels);       
        ClassifyAsync(model,image);
    }catch(error){
        console.log(error)
    }
    console.log(Type(model))
    res.status(204).end();
})
var server = app.listen(port, () => {
console.log('Started at port ' + server.address().port)
})

当我启动我的服务器时

node server.js
2018-12-11 10:50:33.251048: I 
tensorflow/core/platform/cpu_feature_guard.cc:141] Your CPU supports instructions that this TensorFlow binary was not compiled to use: SSE4.1 SSE4.2 AVX AVX2 FMA
[Function: Promise]
(node:9454) Warning: N-API is an experimental feature and could change at any time.
Started at port 3000
2018-12-11 10:50:36.788288: W tensorflow/core/framework/allocator.cc:113] 
Allocation of 205520896 exceeds 10% of system memory.

当我使用 CURL 向我的 API 发送请求时,错误日志:

{}
{ 
    fieldname: 'signature',
    originalname: 'sigok.JPG',
    encoding: '7bit',
    mimetype: 'image/jpeg',
    destination: 'uploads',
    filename: '1544521838948-sigok.JPG',
    path: 'uploads/1544521838948-sigok.JPG',
    size: 47403 }
[Function: Promise]
(node:9454) UnhandledPromiseRejectionWarning: TypeError: mn_model.classify is not a function
at ClassifyAsync (/home/leeson/transfer-learning-tensorflowjs/app/server2.js:43:40)
at /home/leeson/transfer-learning-tensorflowjs/app/server2.js:87:9
at Layer.handle [as handle_request] (/home/leeson/transfer-learning-tensorflowjs/app/node_modules/express/lib/router/layer.js:95:5)
at next (/home/leeson/transfer-learning-tensorflowjs/app/node_modules/express/lib/router/route.js:137:13)
at Immediate._onImmediate (/home/leeson/transfer-learning-tensorflowjs/app/node_modules/multer/lib/make-middleware.js:53:37)
at runCallback (timers.js:798:20)
at tryOnImmediate (timers.js:752:5)
at processImmediate [as _immediateCallback] (timers.js:729:5)
(node:9454) UnhandledPromiseRejectionWarning: Unhandled promise rejection. This error originated either by throwing inside of an async function without a catch block, or by rejecting a promise which was not handled with .catch(). 
(rejection id: 1)
(node:9454) [DEP0018] DeprecationWarning: Unhandled promise rejections are deprecated. In the future, promise rejections that are not handled will terminate the Node.js process with a non-zero exit code

谢谢你的帮助

标签: node.jsexpresstensorflow.js

解决方案


您实际上正在将顶级变量分配给异步函数的值

async function LoadModelAsync(path){
    const mn = new mobilenet.MobileNet(1, 1);
    mn.path = `file://${path}`;
    await mn.load();
    return mn;
}

var model = LoadModelAsync('CNN/model.json');
         // ^ this needs to have await infant

由于不允许顶级等待

你可以简单地使用一个承诺

var model = null;
var server = null;

loadModelAsync( 'CNN/model.json' )
    .then( m => model = m )
             // ^ assigns the global model variable
    .then( () => server = app.listen(port, () => {
             // ^ assigns the global server variable and starts the server
       console.log('Started at port ' + server.address().port)
    }) );

推荐阅读