Node JS integration with spark job server

394 Views Asked by At

I'm trying to call spark job server API from node js. The API which is the python egg file does provide the count of nulls from the file. So once I call the API from the node, it is reaching the SJS server and the job starts which triggers res.on('data') event and soon after that, it triggers res.on('end') as well before the job finishes it's execution and returns the result. So due to this, I am not able to get the data once the job completes.

Below is the code snippet, Please let me know what is the mistake done here.

        var postData = {
                'input': {
                    'strings': {
                        'file': 'file path to be passed'
                    }
                }
        };

        var options = {
            hostname: 'localhost',
            port: 8090,
            path: '/jobs?appName=my_ml_job&classPath=my_py_package.NullCheck.nullcheck&context=py-context',
            method: 'POST',
            headers: {
                'Content-Type': 'application/json',
                'Content-Length': Buffer.byteLength(JSON.stringify(postData))
            }
        };

        var post_req = Http.request(options, function(res) {
            res.setEncoding('utf8');
            res.on('data', function (chunk) {
                console.log('Response: ' + chunk);
            });

            res.on('end', () => {
                return{
                    'STATUS': 'FINISHED'
                }                   
            });
        });

        post_req.on('error', e => {
            rj(e);
        });

        // post the data
        post_req.write(postData);
        post_req.end();
0

There are 0 best solutions below