Error I am getting in backend is “TypeError: Cannot destructure property ‘buffer’ of ‘req.file’ as it is undefined.”
import express, { Request, Response } from 'express'
import http from 'http'
import compression from 'compression'
import cors from 'cors'
import dotenv from 'dotenv'
import multer from 'multer'
import * as tf from '@tensorflow/tfjs-node'
import * as mobilenet from '@tensorflow-models/mobilenet'
import connectDB from './db/dbConnect'
import router from './routes'
dotenv.config()
const app: express.Application = express()
const PORT: number = parseInt(process.env.PORT || '1010')
app.use(
cors({
credentials: true,
})
)
app.use(express.json())
app.use(express.urlencoded({ extended: true }))
app.use(compression())
app.use('/api', router())
// Set up Multer for file uploads
const storage = multer.memoryStorage()
const upload = multer({ storage: storage })
// Load the MobileNet model
let model: mobilenet.MobileNet | null = null
;(async () => {
try {
await tf.ready() // Wait for TensorFlow.js to be ready
model = await mobilenet.load()
console.log('Model Loaded')
} catch (error) {
console.log(error)
}
})()
// Route for image upload and prediction
app.post(
'/classifyImage',
upload.single('image'),
async (req: Request, res: Response) => {
try {
if (!model) {
return res.status(500).json({ error: 'Model not loaded' })
}
const { buffer } = req.file
const img = tf.node.decodeImage(buffer)
const batchedImg: tf.Tensor3D = tf.expandDims(img) as tf.Tensor3D
const predictions = await model.classify(batchedImg)
const result = predictions[0]
res.json({ className: result.className, probability: result.probability })
} catch (error) {
console.log(error)
res.status(500).json({ error: 'Internal server error' })
}
}
)
const server = http.createServer(app)
const startServer = async () => {
try {
const db = await connectDB(process.env.MONGO_URI)
if (db) {
console.log('DB Connected')
}
server.listen(PORT, () => {
console.log('Server is running on http://localhost:' + PORT)
})
} catch (error) {
console.log(error)
}
}
startServer()
Frontend code
import React, { ChangeEvent, FC, useRef, useState } from 'react'
import axios from 'axios'
interface ImageUploadProps {}
interface Prediction {
className: string
probability: number
}
const ImageUpload: FC<ImageUploadProps> = () => {
const [uploadedImage, setUploadedImage] = useState<string | null>(null)
const [predictions, setPredictions] = useState<Prediction[]>([])
const [loading, setLoading] = useState<boolean>(false)
const imageRef = useRef<HTMLImageElement>(null)
const handleImageUpload = (event: ChangeEvent<HTMLInputElement>) => {
const { files } = event.target
if (files && files.length > 0) {
const url = URL.createObjectURL(files[0])
setUploadedImage(url)
}
}
const onGenerate = async () => {
setLoading(true)
try {
if (imageRef.current) {
const formData = new FormData()
formData.append('image', imageRef.current!.src)
console.log(formData.get('image'))
const response = await axios.post(
'http://localhost:7070/classifyImage',
formData,
{
headers: {
'Content-Type': 'multipart/form-data',
},
}
)
const { className, probability } = response.data
setPredictions([{ className, probability }])
}
} catch (error) {
console.log(error)
} finally {
setLoading(false)
}
}
return (
<div className='w-75'>
<div className='input-group'>
<input
name='image'
type='file'
className='form-control'
id='inputGroupFile04'
aria-describedby='inputGroupFileAddon04'
aria-label='Upload'
disabled={loading}
onChange={handleImageUpload}
/>
<button
className='btn btn-primary'
type='button'
id='inputGroupFileAddon04'
onClick={onGenerate}
disabled={loading || !uploadedImage}
>
Generate
</button>
</div>
{uploadedImage && (
<div className='w-100 mt-3'>
<h4 className='fw-bold'>Uploaded Image:</h4>
<img
className='w-100'
src={uploadedImage}
alt='Selected'
crossOrigin='anonymous'
ref={imageRef}
/>
</div>
)}
{predictions.length > 0 && (
<ul>
{predictions.map(({ className, probability }, index) => (
<li key={index}>{`${className} (${Math.round(
probability * 100
)}%)`}</li>
))}
</ul>
)}
</div>
)
}
export default ImageUpload