com.microsoft.projectoxford.emotionsample.RecognizeActivity.java Source code

Java tutorial

Introduction

Here is the source code for com.microsoft.projectoxford.emotionsample.RecognizeActivity.java

Source

//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license.
//
// Microsoft Cognitive Services (formerly Project Oxford): https://www.microsoft.com/cognitive-services
//
// Microsoft Cognitive Services (formerly Project Oxford) GitHub:
// https://github.com/Microsoft/Cognitive-Emotion-Android
//
// Copyright (c) Microsoft Corporation
// All rights reserved.
//
// MIT License:
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
package com.microsoft.projectoxford.emotionsample;

import android.Manifest;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.ServiceConnection;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.ImageFormat;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.*;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.Image;
import android.media.ImageReader;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
import android.os.IBinder;
import android.support.v7.app.ActionBarActivity;
import android.support.v7.app.AppCompatActivity;
import android.text.Html;
import android.text.method.MovementMethod;
import android.text.method.ScrollingMovementMethod;
import android.os.HandlerThread;
import android.support.annotation.NonNull;
import android.support.v4.app.ActivityCompat;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.util.Size;
import android.util.SparseIntArray;
import android.view.Menu;
import android.view.MenuItem;
import android.view.Surface;
import android.view.TextureView;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ImageButton;
import android.widget.ImageView;
import android.widget.ProgressBar;
import android.widget.SeekBar;
import android.widget.TextView;
import android.widget.Toast;

import com.google.gson.Gson;
import com.microsoft.projectoxford.emotion.EmotionServiceClient;
import com.microsoft.projectoxford.emotion.EmotionServiceRestClient;
import com.microsoft.projectoxford.emotion.contract.RecognizeResult;
import com.microsoft.projectoxford.emotion.rest.EmotionServiceException;
import com.microsoft.projectoxford.emotionsample.helper.ImageHelper;

import com.microsoft.projectoxford.emotionsample.initialization.MusicObject;
import com.microsoft.projectoxford.emotionsample.initialization.SongListModel;
import com.microsoft.projectoxford.emotionsample.musicPlayer.PlayerService;
import com.microsoft.projectoxford.face.FaceServiceRestClient;
import com.microsoft.projectoxford.face.contract.Face;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;

import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

public class RecognizeActivity extends AppCompatActivity implements SongListModel.notifyMainClassListener { //implements SurfaceHolder.Callback {

    /**
     * Music player
     */
    private PlayerService musicSrv;
    private Intent playIntent;
    private boolean musicBound = false;
    private List<MusicObject> songList = new ArrayList<>();
    private SongListModel mModel;

    private TextView mTitleText;
    private ImageButton mPlayPause;
    private ImageButton mForward;
    private ImageButton mPrevious;
    private SeekBar mSeekbar;

    private Bitmap mPlayImg;
    private Bitmap mPauseImg;

    private String mEmotion = "Happy";
    // The button to select an image
    private Button mButtonSelectImage;

    // The URI of the image selected to detect.
    private Uri mImageUri;

    // The image selected to detect.
    private Bitmap mBitmap;

    private EmotionServiceClient client;

    private TextureView textureView;
    private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
    static {
        ORIENTATIONS.append(Surface.ROTATION_0, 90);
        ORIENTATIONS.append(Surface.ROTATION_90, 0);
        ORIENTATIONS.append(Surface.ROTATION_180, 270);
        ORIENTATIONS.append(Surface.ROTATION_270, 180);
    }

    private String cameraId;
    protected CameraDevice cameraDevice;
    protected CameraCaptureSession cameraCaptureSessions;
    protected CaptureRequest captureRequest;
    protected CaptureRequest.Builder captureRequestBuilder;
    private Size imageDimension;
    private ImageReader imageReader;
    private File file;
    private static final int REQUEST_CAMERA_PERMISSION = 200;
    private boolean mFlashSupported;
    private Handler mBackgroundHandler;
    private HandlerThread mBackgroundThread;

    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.activity_recognize);

        //Music player code
        mModel = new SongListModel(this, this);
        mTitleText = (TextView) findViewById(R.id.title_text);
        mSeekbar = (SeekBar) findViewById(R.id.seek_bar);
        mPlayPause = (ImageButton) findViewById(R.id.play_pause);
        mForward = (ImageButton) findViewById(R.id.forward);
        mPrevious = (ImageButton) findViewById(R.id.previous);
        mPauseImg = BitmapFactory.decodeResource(getResources(), R.mipmap.ic_pause);
        mPlayImg = BitmapFactory.decodeResource(getResources(), R.mipmap.ic_play);

        disablePlayer();
        mSeekbar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() {
            @Override
            public void onProgressChanged(SeekBar seekBar, int i, boolean b) {

            }

            @Override
            public void onStartTrackingTouch(SeekBar seekBar) {
                musicSrv.pauseSong();
                musicSrv.setGetTimeResults(false);
            }

            @Override
            public void onStopTrackingTouch(SeekBar seekBar) {
                musicSrv.setSongTime(mSeekbar.getProgress());
                musicSrv.resumeSong();
                musicSrv.setGetTimeResults(true);
                mPlayPause.setImageBitmap(mPauseImg);
            }
        });

        //Other Code
        if (client == null) {
            client = new EmotionServiceRestClient(getString(R.string.subscription_key));
        }

        textureView = (TextureView) findViewById(R.id.texture);
        assert textureView != null;
        textureView.setSurfaceTextureListener(textureListener);

        mButtonSelectImage = (Button) findViewById(R.id.buttonSelectImage);
        mButtonSelectImage.setOnClickListener(new View.OnClickListener() {
            @Override
            public void onClick(View v) {
                disablePlayer();
                musicSrv.pauseSong();
                try {
                    mTitleText.setText(R.string.loading_library);
                    takePicture();
                } catch (Exception e) {

                }
            }
        });

    }

    TextureView.SurfaceTextureListener textureListener = new TextureView.SurfaceTextureListener() {
        @Override
        public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
            //open your camera here
            openCamera();
        }

        @Override
        public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
            // Transform you image captured size according to the surface width and height
        }

        @Override
        public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
            return false;
        }

        @Override
        public void onSurfaceTextureUpdated(SurfaceTexture surface) {
        }
    };

    private final CameraDevice.StateCallback stateCallback = new CameraDevice.StateCallback() {
        @Override
        public void onOpened(CameraDevice camera) {
            //This is called when the camera is open
            Log.e("LOG", "onOpened");
            cameraDevice = camera;

            /*
            CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
            try {
            Log.e("CAMERA STUFF", manager.getCameraIdList().toString());
            CameraCharacteristics cameraCharacteristics = manager.getCameraCharacteristics(cameraId);
            Log.e("CAMERA STUFF", manager.getCameraCharacteristics(cameraId).toString());
            Log.e("CAMERA STUFF", String.valueOf(cameraCharacteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT));
            }catch(Exception e){
                
            }
            */

            createCameraPreview();
        }

        @Override
        public void onDisconnected(CameraDevice camera) {
            cameraDevice.close();
        }

        @Override
        public void onError(CameraDevice camera, int error) {
            cameraDevice.close();
            cameraDevice = null;
        }
    };

    final CameraCaptureSession.CaptureCallback captureCallbackListener = new CameraCaptureSession.CaptureCallback() {
        @Override
        public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request,
                TotalCaptureResult result) {
            super.onCaptureCompleted(session, request, result);
            //Toast.makeText(RecognizeActivity.this, "Saved:" + file, Toast.LENGTH_SHORT).show();
            createCameraPreview();
        }
    };

    protected void startBackgroundThread() {
        mBackgroundThread = new HandlerThread("Camera Background");
        mBackgroundThread.start();
        mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
    }

    protected void stopBackgroundThread() {
        mBackgroundThread.quitSafely();
        try {
            mBackgroundThread.join();
            mBackgroundThread = null;
            mBackgroundHandler = null;
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
    }

    protected void takePicture() {
        if (null == cameraDevice) {
            Log.e("LOG", "cameraDevice is null");
            return;
        }
        CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);

        mBitmap = textureView.getBitmap();
        if (mBitmap != null) {
            doRecognize();
        }
        /*
        try {
            
        CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraDevice.getId());
        Size[] jpegSizes = null;
        if (characteristics != null) {
            jpegSizes = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP).getOutputSizes(ImageFormat.JPEG);
        }
        int width = 640;
        int height = 480;
        if (jpegSizes != null && 0 < jpegSizes.length) {
            width = jpegSizes[0].getWidth();
            height = jpegSizes[0].getHeight();
        }
        ImageReader reader = ImageReader.newInstance(width, height, ImageFormat.JPEG, 1);
        List<Surface> outputSurfaces = new ArrayList<Surface>(2);
        outputSurfaces.add(reader.getSurface());
        outputSurfaces.add(new Surface(textureView.getSurfaceTexture()));
        final CaptureRequest.Builder captureBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
        captureBuilder.addTarget(reader.getSurface());
        captureBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
        // Orientation
        int rotation = getWindowManager().getDefaultDisplay().getRotation();
        captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, ORIENTATIONS.get(rotation));
        final File file = new File(Environment.getExternalStorageDirectory()+"/pic.jpg");
            
            
        //---------//
        mImageUri = Uri.fromFile(file);
        mBitmap = ImageHelper.loadSizeLimitedBitmapFromUri(
                mImageUri, getContentResolver());
            
        mBitmap = textureView.getBitmap();
        if (mBitmap != null) {
            doRecognize();
        }
        //-----------//
            
            
        ImageReader.OnImageAvailableListener readerListener = new ImageReader.OnImageAvailableListener() {
            @Override
            public void onImageAvailable(ImageReader reader) {
                Image image = null;
                try {
                    image = reader.acquireLatestImage();
                    ByteBuffer buffer = image.getPlanes()[0].getBuffer();
                    byte[] bytes = new byte[buffer.capacity()];
                    buffer.get(bytes);
                    save(bytes);
                } catch (FileNotFoundException e) {
                    e.printStackTrace();
                } catch (IOException e) {
                    e.printStackTrace();
                } finally {
                    if (image != null) {
                        image.close();
                    }
                }
            }
            private void save(byte[] bytes) throws IOException {
                OutputStream output = null;
                try {
                    output = new FileOutputStream(file);
                    output.write(bytes);
                } finally {
                    if (null != output) {
                        output.close();
                    }
                }
            }
        };
            
        reader.setOnImageAvailableListener(readerListener, mBackgroundHandler);
        final CameraCaptureSession.CaptureCallback captureListener = new CameraCaptureSession.CaptureCallback() {
            @Override
            public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
                super.onCaptureCompleted(session, request, result);
                //Toast.makeText(RecognizeActivity.this, "Saved:" + file, Toast.LENGTH_SHORT).show();
                createCameraPreview();
            }
        };
        cameraDevice.createCaptureSession(outputSurfaces, new CameraCaptureSession.StateCallback() {
            @Override
            public void onConfigured(CameraCaptureSession session) {
                try {
                    session.capture(captureBuilder.build(), captureListener, mBackgroundHandler);
                } catch (CameraAccessException e) {
                    e.printStackTrace();
                }
            }
            @Override
            public void onConfigureFailed(CameraCaptureSession session) {
            }
        }, mBackgroundHandler);
        } catch (CameraAccessException e) {
        e.printStackTrace();
        }*/

    }

    protected void createCameraPreview() {
        try {
            SurfaceTexture texture = textureView.getSurfaceTexture();
            assert texture != null;
            texture.setDefaultBufferSize(imageDimension.getWidth(), imageDimension.getHeight());
            Surface surface = new Surface(texture);
            captureRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
            captureRequestBuilder.addTarget(surface);
            cameraDevice.createCaptureSession(Arrays.asList(surface), new CameraCaptureSession.StateCallback() {
                @Override
                public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) {
                    //The camera is already closed
                    if (null == cameraDevice) {
                        return;
                    }
                    // When the session is ready, we start displaying the preview.
                    cameraCaptureSessions = cameraCaptureSession;
                    updatePreview();
                }

                @Override
                public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) {
                    Toast.makeText(RecognizeActivity.this, "Configuration change", Toast.LENGTH_SHORT).show();
                }
            }, null);
        } catch (CameraAccessException e) {
            e.printStackTrace();
        }
    }

    private void openCamera() {
        CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
        Log.e("LOG", "is camera open");
        try {
            cameraId = "1";//manager.getCameraIdList()[0];
            CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
            StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
            assert map != null;
            imageDimension = map.getOutputSizes(SurfaceTexture.class)[0];
            // Add permission for camera and let user grant the permission
            if (ActivityCompat.checkSelfPermission(this,
                    Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED
                    && ActivityCompat.checkSelfPermission(this,
                            Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) {
                ActivityCompat.requestPermissions(RecognizeActivity.this,
                        new String[] { Manifest.permission.CAMERA, Manifest.permission.WRITE_EXTERNAL_STORAGE },
                        REQUEST_CAMERA_PERMISSION);
                return;
            }
            manager.openCamera(cameraId, stateCallback, null);
        } catch (CameraAccessException e) {
            e.printStackTrace();
        }
        Log.e("LOG", "openCamera X");
    }

    protected void updatePreview() {
        if (null == cameraDevice) {
            Log.e("LOg", "updatePreview error, return");
        }
        captureRequestBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
        try {
            cameraCaptureSessions.setRepeatingRequest(captureRequestBuilder.build(), null, mBackgroundHandler);
        } catch (CameraAccessException e) {
            e.printStackTrace();
        }
    }

    private void closeCamera() {
        if (null != cameraDevice) {
            cameraDevice.close();
            cameraDevice = null;
        }
        if (null != imageReader) {
            imageReader.close();
            imageReader = null;
        }
    }

    @Override
    public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions,
            @NonNull int[] grantResults) {
        if (requestCode == REQUEST_CAMERA_PERMISSION) {
            if (grantResults[0] == PackageManager.PERMISSION_DENIED) {
                // close the app
                Toast.makeText(RecognizeActivity.this,
                        "Sorry!!!, you can't use this app without granting permission", Toast.LENGTH_LONG).show();
                finish();
            }
        }
    }

    @Override
    protected void onResume() {
        super.onResume();
        Log.e("LOG", "onResume");
        startBackgroundThread();
        if (textureView.isAvailable()) {
            openCamera();
        } else {
            textureView.setSurfaceTextureListener(textureListener);
        }
    }

    @Override
    protected void onPause() {
        Log.e("LOG", "onPause");
        closeCamera();
        stopBackgroundThread();
        super.onPause();
    }

    @Override
    public boolean onCreateOptionsMenu(Menu menu) {
        // Inflate the menu; this adds items to the action bar if it is present.
        getMenuInflater().inflate(R.menu.menu_recognize, menu);
        return true;
    }

    @Override
    public boolean onOptionsItemSelected(MenuItem item) {
        // Handle action bar item clicks here. The action bar will
        // automatically handle clicks on the Home/Up button, so long
        // as you specify a parent activity in AndroidManifest.xml.
        int id = item.getItemId();

        //noinspection SimplifiableIfStatement
        if (id == R.id.action_settings) {
            return true;
        }

        return super.onOptionsItemSelected(item);
    }

    public void doRecognize() {
        mButtonSelectImage.setEnabled(false);

        // Do emotion detection using auto-detected faces.
        try {
            new doRequest(false).execute();
        } catch (Exception e) {
            Log.e("Error Exception: ", e.toString());
        }
    }

    private List<RecognizeResult> processWithAutoFaceDetection()
            throws EmotionServiceException, IOException, JSONException {
        Log.d("emotion", "Start emotion detection with auto-face detection");

        Gson gson = new Gson();

        // Put the image into an input stream for detection.
        ByteArrayOutputStream output = new ByteArrayOutputStream();
        mBitmap.compress(Bitmap.CompressFormat.JPEG, 100, output);
        ByteArrayInputStream inputStream = new ByteArrayInputStream(output.toByteArray());

        long startTime = System.currentTimeMillis();

        List<RecognizeResult> result = null;
        //
        // Detect emotion by auto-detecting faces in the image.
        //
        result = this.client.recognizeImage(inputStream);

        String json = gson.toJson(result);
        Log.e("result", json);
        try {

            JSONArray jArray = new JSONArray(json);
            JSONObject jObj = jArray.getJSONObject(0);
            JSONObject scores = jObj.getJSONObject("scores");

            HashMap<String, Double> emotionScores = new HashMap<>();
            emotionScores.put("Anger", scores.getDouble("anger"));
            emotionScores.put("Contempt", scores.getDouble("contempt"));
            emotionScores.put("Disgust", scores.getDouble("disgust"));
            emotionScores.put("Fear", scores.getDouble("fear"));
            emotionScores.put("Happiness", scores.getDouble("happiness"));
            emotionScores.put("Neutral", scores.getDouble("neutral"));
            emotionScores.put("Sadness", scores.getDouble("sadness"));
            emotionScores.put("Surprise", scores.getDouble("surprise"));
            double maxScore = Collections.max(emotionScores.values());
            for (Map.Entry<String, Double> entry : emotionScores.entrySet()) {
                if (entry.getValue() == maxScore) {
                    Log.e("Max emotion score ", entry.getKey());
                    //String testS = entry.getKey();
                    mEmotion = entry.getKey();
                }
            }

        } catch (Exception e) {
            e.printStackTrace();
        }

        Log.e("emotion",
                String.format("Detection done. Elapsed time: %d ms", (System.currentTimeMillis() - startTime)));
        return result;
    }

    private class doRequest extends AsyncTask<String, String, List<RecognizeResult>> {
        // Store error message
        private Exception e = null;
        private boolean useFaceRectangles = false;

        public doRequest(boolean useFaceRectangles) {
            this.useFaceRectangles = useFaceRectangles;
        }

        @Override
        protected List<RecognizeResult> doInBackground(String... args) {
            if (this.useFaceRectangles == false) {
                try {
                    return processWithAutoFaceDetection();
                } catch (Exception e) {
                    this.e = e; // Store error
                }
            }
            return null;
        }

        @Override
        protected void onPostExecute(List<RecognizeResult> result) {
            super.onPostExecute(result);
            // Display based on error existence

            if (this.useFaceRectangles == false) {
                Log.e("Doing: ", "Recognizing emotions with auto-detected face rectangles...");
            } else {
                Log.e("Doing: ", "Recognizing emotions with existing face rectangles from Face API...");
            }
            if (e != null) {
                Log.e("Error Exception: ", e.toString());
                this.e = null;
            } else {
                if (result.size() == 0) {
                    Log.e("L", "No emotion detected :(");
                } else {
                    Integer count = 0;
                    // Covert bitmap to a mutable bitmap by copying it
                    Bitmap bitmapCopy = mBitmap.copy(Bitmap.Config.ARGB_8888, true);
                    Canvas faceCanvas = new Canvas(bitmapCopy);
                    faceCanvas.drawBitmap(mBitmap, 0, 0, null);
                    Paint paint = new Paint(Paint.ANTI_ALIAS_FLAG);
                    paint.setStyle(Paint.Style.STROKE);
                    paint.setStrokeWidth(5);
                    paint.setColor(Color.RED);
                    for (RecognizeResult r : result) {
                        faceCanvas.drawRect(r.faceRectangle.left, r.faceRectangle.top,
                                r.faceRectangle.left + r.faceRectangle.width,
                                r.faceRectangle.top + r.faceRectangle.height, paint);
                    }

                }
            }

            mButtonSelectImage.setEnabled(true);
            //TODO: Set list and make taost text
            /**
             * Start the music playing here
             */
            enablePlayer();

            /**
             *
             Melancholy / Sadness / Loss / Sorrow / Pain: 50 to 85 BPM
             Thoughtful / Introspective: 90-105 BPM
             Happy / Party / Celebration: 110-125 BPM
             Excitement / Energy / Danger / Anger: 130 BPM and up
                
             Neutral=  All
                
             Sadness/Contempt/Disgust
             Happiness / Surprised
             Anger Fear
                
                
             emotionScores.put("Anger", scores.getDouble("anger"));
             emotionScores.put("Contempt", scores.getDouble("contempt"));
             emotionScores.put("Disgust", scores.getDouble("disgust"));
             emotionScores.put("Fear", scores.getDouble("fear"));
             emotionScores.put("Happiness", scores.getDouble("happiness"));
             emotionScores.put("Neutral", scores.getDouble("neutral"));
             emotionScores.put("Sadness", scores.getDouble("sadness"));
             emotionScores.put("Surprise", scores.getDouble("surprise"));
             */
            if (mEmotion.equals("Sadness") || mEmotion.equals("Contempt") || mEmotion.equals("Disgust")) {
                musicSrv.setList(mModel.getCategoryList("Sadness"));
            } else if (mEmotion.equals("Happiness") || mEmotion.equals("Surprise")) {
                musicSrv.setList(mModel.getCategoryList("Happiness"));
            } else if (mEmotion.equals("Anger") || mEmotion.equals("Fear")) {
                musicSrv.setList(mModel.getCategoryList("Anger"));
            } else {
                musicSrv.setList(mModel.getCategoryList(null));
            }
            musicSrv.playSong();
        }

    }

    /**
     * MUSIC PLAYER FUNCTIONS
     */
    //connect to the service
    private ServiceConnection musicConnection = new ServiceConnection() {

        @Override
        public void onServiceConnected(ComponentName name, IBinder service) {
            PlayerService.MusicBinder binder = (PlayerService.MusicBinder) service;
            //get service
            musicSrv = binder.getService();
            binder.setListener(new PlayerService.MusicPlayerListener() {
                @Override
                public void sendProgress(int progress) {
                    mSeekbar.setProgress(progress);
                }

                @Override
                public void sendPlayerInfo(String title, String artist) {
                    String input_text = "<b>" + mEmotion + "</b>" + ":    " + title;
                    mTitleText.setText(Html.fromHtml(input_text));

                }

                @Override
                public void setMax(int maxTime) {
                    mSeekbar.setMax(maxTime);
                }
            });
            //pass list
            //musicSrv.setList(mModel.getCategoryList(null));
            musicBound = true;
        }

        @Override
        public void onServiceDisconnected(ComponentName name) {
            musicBound = false;
        }
    };

    public void disablePlayer() {
        mSeekbar.setEnabled(false);
        mPlayPause.setEnabled(false);
        mForward.setEnabled(false);
        mPrevious.setEnabled(false);
        mPlayPause.setColorFilter(getResources().getColor(R.color.button_disabled_background));
        mForward.setColorFilter(getResources().getColor(R.color.button_disabled_background));
        mPrevious.setColorFilter(getResources().getColor(R.color.button_disabled_background));

    }

    public void enablePlayer() {
        mSeekbar.setEnabled(true);
        mPlayPause.setEnabled(true);
        mForward.setEnabled(true);
        mPrevious.setEnabled(true);

        mPlayPause.setColorFilter(getResources().getColor(R.color.black));
        mForward.setColorFilter(getResources().getColor(R.color.black));
        mPrevious.setColorFilter(getResources().getColor(R.color.black));

    }

    public void playPauseClick(View v) {
        if (musicSrv.pause_startSong()) {
            mPlayPause.setImageBitmap(mPlayImg);
        } else {
            mPlayPause.setImageBitmap(mPauseImg);
        }
    }

    public void forwardClick(View v) {
        musicSrv.skipSong();
    }

    public void previousClick(View v) {
        musicSrv.prevSong();
    }

    /**
     * Lifecycle functions for player
     */
    @Override
    public void onStart() {
        super.onStart();
        if (playIntent == null) {
            playIntent = new Intent(this, PlayerService.class);
            bindService(playIntent, musicConnection, Context.BIND_AUTO_CREATE);
            startService(playIntent);
        }
    }

    @Override
    protected void onDestroy() {
        stopService(playIntent);
        musicSrv = null;
        super.onDestroy();
    }

    @Override
    public void stopLoading() {
    }

    @Override
    public void setInfoMessage(String msg) {
    }
}