Here is what following i want to run in CentOS 7x servers.
1 - PureBasic webSocket server for CentOS 7.x open-source GPL license
2 - On the server from Google chrome/Chromium, Firefox, Opera, Safari connect and send picture as 1 frame
3 - PureBasic webSocket server on CentOS 7.x use that picture and use it for AI, deep learning, machine learning feedback
Is there anything available similar for my example to get started?
Code: Select all
public class WebsocketServer extends Server {
private final static Logger LOG = Logger.getLogger(WebsocketServer.class);
public WebsocketServer(int port) {
SelectChannelConnector connector = new SelectChannelConnector();
connector.setPort(port);
addConnector(connector);
WebSocketHandler wsHandler = new WebSocketHandler() {
public WebSocket doWebSocketConnect(HttpServletRequest request, String protocol) {
return new FaceDetectWebSocket();
}
};
setHandler(wsHandler);
}
/**
* Simple innerclass that is used to handle websocket connections.
*
* @author jos
*/
private static class FaceDetectWebSocket implements WebSocket,
WebSocket.OnBinaryMessage, WebSocket.OnTextMessage {
private Connection connection;
private FaceDetection faceDetection = new FaceDetection();
public FaceDetectWebSocket() {
super();
}
/**
* On open we set the connection locally, and enable
* binary support
*/
public void onOpen(Connection connection) {
this.connection = connection;
this.connection.setMaxBinaryMessageSize(1024 * 512);
}
/**
* Cleanup if needed. Not used for this example
*/
public void onClose(int code, String message) {}
/**
* When we receive a binary message we assume it is an image. We then run this
* image through our face detection algorithm and send back the response.
*/
public void onMessage(byte[] data, int offset, int length) {
ByteArrayOutputStream bOut = new ByteArrayOutputStream();
bOut.write(data, offset, length);
try {
byte[] result = faceDetection.convert(bOut.toByteArray());
this.connection.sendMessage(result, 0, result.length);
} catch (IOException e) {
LOG.error("Error in facedetection, ignoring message:" + e.getMessage());
}
}
}
/**
* Start the server on port 999
*/
public static void main(String[] args) throws Exception {
WebsocketServer server = new WebsocketServer(9999);
server.start();
server.join();
}
}
public class FaceDetection {
private static final String CASCADE_FILE = "resources/haarcascade_frontalface_alt.xml";
private int minsize = 20;
private int group = 0;
private double scale = 1.1;
/**
* Based on FaceDetection example from JavaCV.
*/
public byte[] convert(byte[] imageData) throws IOException {
// create image from supplied bytearray
IplImage originalImage = cvDecodeImage(cvMat(1, imageData.length,CV_8UC1, new BytePointer(imageData)));
// Convert to grayscale for recognition
IplImage grayImage = IplImage.create(originalImage.width(), originalImage.height(), IPL_DEPTH_8U, 1);
cvCvtColor(originalImage, grayImage, CV_BGR2GRAY);
// storage is needed to store information during detection
CvMemStorage storage = CvMemStorage.create();
// Configuration to use in analysis
CvHaarClassifierCascade cascade = new CvHaarClassifierCascade(cvLoad(CASCADE_FILE));
// We detect the faces.
CvSeq faces = cvHaarDetectObjects(grayImage, cascade, storage, scale, group, minsize);
// We iterate over the discovered faces and draw yellow rectangles around them.
for (int i = 0; i < faces.total(); i++) {
CvRect r = new CvRect(cvGetSeqElem(faces, i));
cvRectangle(originalImage, cvPoint(r.x(), r.y()),
cvPoint(r.x() + r.width(), r.y() + r.height()),
CvScalar.YELLOW, 1, CV_AA, 0);
}
// convert the resulting image back to an array
ByteArrayOutputStream bout = new ByteArrayOutputStream();
BufferedImage imgb = originalImage.getBufferedImage();
ImageIO.write(imgb, "png", bout);
return bout.toByteArray();
}
}