|
14 | 14 | "5. Plot example decoded outputs\n",
|
15 | 15 | "\n",
|
16 | 16 | "See \"Examples_kf_decoder\" for a Kalman filter example. <br>\n",
|
17 |
| - "Because the Kalman filter utilizes different preprocessing, we don't include an example here. to keep this notebook more understandable" |
| 17 | + "Because the Kalman filter utilizes different preprocessing, we don't include an example here. to keep this notebook more understandable\n", |
| 18 | + "\n", |
| 19 | + "We also include a note on memory usage for the neural net decoders at the end of #4" |
18 | 20 | ]
|
19 | 21 | },
|
20 | 22 | {
|
|
29 | 31 | {
|
30 | 32 | "cell_type": "code",
|
31 | 33 | "execution_count": 1,
|
32 |
| - "metadata": { |
33 |
| - "collapsed": false |
34 |
| - }, |
| 34 | + "metadata": {}, |
35 | 35 | "outputs": [
|
36 |
| - { |
37 |
| - "name": "stdout", |
38 |
| - "output_type": "stream", |
39 |
| - "text": [ |
40 |
| - "\n", |
41 |
| - "WARNING: statsmodels is not installed. You will be unable to use the Naive Bayes Decoder\n" |
42 |
| - ] |
43 |
| - }, |
44 | 36 | {
|
45 | 37 | "name": "stderr",
|
46 | 38 | "output_type": "stream",
|
47 | 39 | "text": [
|
48 |
| - "Using Theano backend.\n" |
| 40 | + "Using Theano backend.\n", |
| 41 | + "WARNING (theano.configdefaults): install mkl with `conda install mkl-service`: No module named 'mkl'\n" |
49 | 42 | ]
|
50 | 43 | }
|
51 | 44 | ],
|
|
99 | 92 | {
|
100 | 93 | "cell_type": "code",
|
101 | 94 | "execution_count": 2,
|
102 |
| - "metadata": { |
103 |
| - "collapsed": false |
104 |
| - }, |
| 95 | + "metadata": {}, |
105 | 96 | "outputs": [],
|
106 | 97 | "source": [
|
107 |
| - "folder='' #ENTER THE FOLDER THAT YOUR DATA IS IN\n", |
| 98 | + "# folder='' #ENTER THE FOLDER THAT YOUR DATA IS IN\n", |
108 | 99 | "# folder='/home/jglaser/Data/DecData/' \n",
|
109 |
| - "# folder='/Users/jig289/Dropbox/Public/Decoding_Data/'\n", |
| 100 | + "folder='/Users/jig289/Dropbox/Public/Decoding_Data/'\n", |
110 | 101 | "\n",
|
111 | 102 | "with open(folder+'example_data_s1.pickle','rb') as f:\n",
|
112 | 103 | " neural_data,vels_binned=pickle.load(f,encoding='latin1') #If using python 3\n",
|
|
136 | 127 | {
|
137 | 128 | "cell_type": "code",
|
138 | 129 | "execution_count": 3,
|
139 |
| - "metadata": { |
140 |
| - "collapsed": true |
141 |
| - }, |
| 130 | + "metadata": {}, |
142 | 131 | "outputs": [],
|
143 | 132 | "source": [
|
144 | 133 | "bins_before=6 #How many bins of neural data prior to the output are used for decoding\n",
|
|
163 | 152 | {
|
164 | 153 | "cell_type": "code",
|
165 | 154 | "execution_count": 4,
|
166 |
| - "metadata": { |
167 |
| - "collapsed": false |
168 |
| - }, |
| 155 | + "metadata": {}, |
169 | 156 | "outputs": [],
|
170 | 157 | "source": [
|
171 | 158 | "# Format for recurrent neural networks (SimpleRNN, GRU, LSTM)\n",
|
|
187 | 174 | {
|
188 | 175 | "cell_type": "code",
|
189 | 176 | "execution_count": 5,
|
190 |
| - "metadata": { |
191 |
| - "collapsed": false |
192 |
| - }, |
| 177 | + "metadata": {}, |
193 | 178 | "outputs": [],
|
194 | 179 | "source": [
|
195 | 180 | "#Set decoding output\n",
|
|
215 | 200 | {
|
216 | 201 | "cell_type": "code",
|
217 | 202 | "execution_count": 6,
|
218 |
| - "metadata": { |
219 |
| - "collapsed": true |
220 |
| - }, |
| 203 | + "metadata": {}, |
221 | 204 | "outputs": [],
|
222 | 205 | "source": [
|
223 | 206 | "#Set what part of data should be part of the training/testing/validation sets\n",
|
|
236 | 219 | {
|
237 | 220 | "cell_type": "code",
|
238 | 221 | "execution_count": 7,
|
239 |
| - "metadata": { |
240 |
| - "collapsed": false |
241 |
| - }, |
| 222 | + "metadata": {}, |
242 | 223 | "outputs": [],
|
243 | 224 | "source": [
|
244 | 225 | "num_examples=X.shape[0]\n",
|
|
277 | 258 | {
|
278 | 259 | "cell_type": "code",
|
279 | 260 | "execution_count": 8,
|
280 |
| - "metadata": { |
281 |
| - "collapsed": false |
282 |
| - }, |
| 261 | + "metadata": {}, |
283 | 262 | "outputs": [],
|
284 | 263 | "source": [
|
285 | 264 | "#Z-score \"X\" inputs. \n",
|
|
322 | 301 | "cell_type": "code",
|
323 | 302 | "execution_count": 9,
|
324 | 303 | "metadata": {
|
325 |
| - "collapsed": false, |
326 | 304 | "scrolled": true
|
327 | 305 | },
|
328 | 306 | "outputs": [
|
|
359 | 337 | {
|
360 | 338 | "cell_type": "code",
|
361 | 339 | "execution_count": 10,
|
362 |
| - "metadata": { |
363 |
| - "collapsed": false |
364 |
| - }, |
| 340 | + "metadata": {}, |
365 | 341 | "outputs": [
|
366 | 342 | {
|
367 | 343 | "name": "stdout",
|
368 | 344 | "output_type": "stream",
|
369 | 345 | "text": [
|
370 |
| - "('R2s:', array([ 0.73127717, 0.73370796]))\n" |
| 346 | + "R2s: [0.73127717 0.73370796]\n" |
371 | 347 | ]
|
372 | 348 | }
|
373 | 349 | ],
|
|
395 | 371 | },
|
396 | 372 | {
|
397 | 373 | "cell_type": "code",
|
398 |
| - "execution_count": 12, |
399 |
| - "metadata": { |
400 |
| - "collapsed": false |
401 |
| - }, |
| 374 | + "execution_count": 11, |
| 375 | + "metadata": {}, |
402 | 376 | "outputs": [
|
| 377 | + { |
| 378 | + "name": "stderr", |
| 379 | + "output_type": "stream", |
| 380 | + "text": [ |
| 381 | + "/Users/jig289/anaconda/envs/decode/lib/python3.5/site-packages/xgboost/core.py:614: UserWarning: Use subset (sliced data) of np.ndarray is not recommended because it will generate extra copies and increase memory consumption\n", |
| 382 | + " \"because it will generate extra copies and increase memory consumption\")\n" |
| 383 | + ] |
| 384 | + }, |
403 | 385 | {
|
404 | 386 | "name": "stdout",
|
405 | 387 | "output_type": "stream",
|
406 | 388 | "text": [
|
407 |
| - "('R2s:', array([ 0.75403802, 0.76625732]))\n" |
| 389 | + "R2s: [0.75403802 0.76625732]\n" |
408 | 390 | ]
|
409 | 391 | }
|
410 | 392 | ],
|
|
432 | 414 | },
|
433 | 415 | {
|
434 | 416 | "cell_type": "code",
|
435 |
| - "execution_count": 13, |
436 |
| - "metadata": { |
437 |
| - "collapsed": false |
438 |
| - }, |
| 417 | + "execution_count": 12, |
| 418 | + "metadata": {}, |
439 | 419 | "outputs": [
|
440 | 420 | {
|
441 | 421 | "name": "stderr",
|
442 | 422 | "output_type": "stream",
|
443 | 423 | "text": [
|
444 |
| - "/opt/anaconda/anaconda2/lib/python2.7/site-packages/sklearn/svm/base.py:220: ConvergenceWarning: Solver terminated early (max_iter=4000). Consider pre-processing your data with StandardScaler or MinMaxScaler.\n", |
| 424 | + "/Users/jig289/anaconda/envs/decode/lib/python3.5/site-packages/sklearn/svm/base.py:244: ConvergenceWarning: Solver terminated early (max_iter=4000). Consider pre-processing your data with StandardScaler or MinMaxScaler.\n", |
| 425 | + " % self.max_iter, ConvergenceWarning)\n", |
| 426 | + "/Users/jig289/anaconda/envs/decode/lib/python3.5/site-packages/sklearn/svm/base.py:244: ConvergenceWarning: Solver terminated early (max_iter=4000). Consider pre-processing your data with StandardScaler or MinMaxScaler.\n", |
445 | 427 | " % self.max_iter, ConvergenceWarning)\n"
|
446 | 428 | ]
|
447 | 429 | },
|
448 | 430 | {
|
449 | 431 | "name": "stdout",
|
450 | 432 | "output_type": "stream",
|
451 | 433 | "text": [
|
452 |
| - "('R2s:', array([ 0.81684722, 0.82538223]))\n" |
| 434 | + "R2s: [0.81684722 0.82538223]\n" |
453 | 435 | ]
|
454 | 436 | }
|
455 | 437 | ],
|
|
484 | 466 | },
|
485 | 467 | {
|
486 | 468 | "cell_type": "code",
|
487 |
| - "execution_count": 14, |
488 |
| - "metadata": { |
489 |
| - "collapsed": false |
490 |
| - }, |
| 469 | + "execution_count": 13, |
| 470 | + "metadata": {}, |
491 | 471 | "outputs": [
|
492 |
| - { |
493 |
| - "name": "stderr", |
494 |
| - "output_type": "stream", |
495 |
| - "text": [ |
496 |
| - "/opt/anaconda/anaconda2/lib/python2.7/site-packages/keras/models.py:826: UserWarning: The `nb_epoch` argument in `fit` has been renamed `epochs`.\n", |
497 |
| - " warnings.warn('The `nb_epoch` argument in `fit` '\n" |
498 |
| - ] |
499 |
| - }, |
500 | 472 | {
|
501 | 473 | "name": "stdout",
|
502 | 474 | "output_type": "stream",
|
503 | 475 | "text": [
|
504 |
| - "('R2s:', array([ 0.82674219, 0.84643551]))\n" |
| 476 | + "R2s: [0.82578506 0.84818598]\n" |
505 | 477 | ]
|
506 | 478 | }
|
507 | 479 | ],
|
|
529 | 501 | },
|
530 | 502 | {
|
531 | 503 | "cell_type": "code",
|
532 |
| - "execution_count": 15, |
533 |
| - "metadata": { |
534 |
| - "collapsed": false |
535 |
| - }, |
| 504 | + "execution_count": 14, |
| 505 | + "metadata": {}, |
536 | 506 | "outputs": [
|
537 |
| - { |
538 |
| - "name": "stderr", |
539 |
| - "output_type": "stream", |
540 |
| - "text": [ |
541 |
| - "decoders.py:433: UserWarning: Update your `SimpleRNN` call to the Keras 2 API: `SimpleRNN(400, recurrent_dropout=0, dropout=0, input_shape=(13, 52), activation=\"relu\")`\n", |
542 |
| - " model.add(SimpleRNN(self.units,input_shape=(X_train.shape[1],X_train.shape[2]),dropout_W=self.dropout,dropout_U=self.dropout,activation='relu')) #Within recurrent layer, include dropout\n" |
543 |
| - ] |
544 |
| - }, |
545 | 507 | {
|
546 | 508 | "name": "stdout",
|
547 | 509 | "output_type": "stream",
|
548 | 510 | "text": [
|
549 |
| - "('R2s:', array([ 0.82154092, 0.82119071]))\n" |
| 511 | + "R2s: [0.82405748 0.80346205]\n" |
550 | 512 | ]
|
551 | 513 | }
|
552 | 514 | ],
|
|
574 | 536 | },
|
575 | 537 | {
|
576 | 538 | "cell_type": "code",
|
577 |
| - "execution_count": 16, |
578 |
| - "metadata": { |
579 |
| - "collapsed": false |
580 |
| - }, |
| 539 | + "execution_count": 15, |
| 540 | + "metadata": {}, |
581 | 541 | "outputs": [
|
582 |
| - { |
583 |
| - "name": "stderr", |
584 |
| - "output_type": "stream", |
585 |
| - "text": [ |
586 |
| - "decoders.py:512: UserWarning: Update your `GRU` call to the Keras 2 API: `GRU(400, dropout=0, recurrent_dropout=0, input_shape=(13, 52))`\n", |
587 |
| - " model.add(GRU(self.units,input_shape=(X_train.shape[1],X_train.shape[2]),dropout_W=self.dropout,dropout_U=self.dropout)) #Within recurrent layer, include dropout\n" |
588 |
| - ] |
589 |
| - }, |
590 | 542 | {
|
591 | 543 | "name": "stdout",
|
592 | 544 | "output_type": "stream",
|
593 | 545 | "text": [
|
594 |
| - "('R2s:', array([ 0.84200426, 0.83707654]))\n" |
| 546 | + "R2s: [0.83770423 0.83575681]\n" |
595 | 547 | ]
|
596 | 548 | }
|
597 | 549 | ],
|
|
619 | 571 | },
|
620 | 572 | {
|
621 | 573 | "cell_type": "code",
|
622 |
| - "execution_count": 17, |
623 |
| - "metadata": { |
624 |
| - "collapsed": false |
625 |
| - }, |
| 574 | + "execution_count": 16, |
| 575 | + "metadata": {}, |
626 | 576 | "outputs": [
|
627 |
| - { |
628 |
| - "name": "stderr", |
629 |
| - "output_type": "stream", |
630 |
| - "text": [ |
631 |
| - "decoders.py:591: UserWarning: Update your `LSTM` call to the Keras 2 API: `LSTM(400, dropout=0, recurrent_dropout=0, input_shape=(13, 52))`\n", |
632 |
| - " model.add(LSTM(self.units,input_shape=(X_train.shape[1],X_train.shape[2]),dropout_W=self.dropout,dropout_U=self.dropout)) #Within recurrent layer, include dropout\n" |
633 |
| - ] |
634 |
| - }, |
635 | 577 | {
|
636 | 578 | "name": "stdout",
|
637 | 579 | "output_type": "stream",
|
638 | 580 | "text": [
|
639 |
| - "('R2s:', array([ 0.85177274, 0.85034613]))\n" |
| 581 | + "R2s: [0.84809856 0.84108359]\n" |
640 | 582 | ]
|
641 | 583 | }
|
642 | 584 | ],
|
|
655 | 597 | "print('R2s:', R2s_lstm)"
|
656 | 598 | ]
|
657 | 599 | },
|
| 600 | + { |
| 601 | + "cell_type": "markdown", |
| 602 | + "metadata": {}, |
| 603 | + "source": [ |
| 604 | + "### 4 - Side note on memory usage in TensorFlow\n", |
| 605 | + "When using the tensorflow backend for Keras (which is standard in newer versions), there can be issues with memory leakage, particularly when fitting many models. To avoid this problem, models can be deleted with the following code:\n", |
| 606 | + "\n", |
| 607 | + "```\n", |
| 608 | + "import gc\n", |
| 609 | + "from keras import backend as K\n", |
| 610 | + "\n", |
| 611 | + "del model_lstm\n", |
| 612 | + "K.clear_session()\n", |
| 613 | + "gc.collect()\n", |
| 614 | + "```" |
| 615 | + ] |
| 616 | + }, |
658 | 617 | {
|
659 | 618 | "cell_type": "markdown",
|
660 | 619 | "metadata": {},
|
|
665 | 624 | {
|
666 | 625 | "cell_type": "code",
|
667 | 626 | "execution_count": 18,
|
668 |
| - "metadata": { |
669 |
| - "collapsed": false |
670 |
| - }, |
| 627 | + "metadata": {}, |
671 | 628 | "outputs": [
|
672 | 629 | {
|
673 | 630 | "data": {
|
|
714 | 671 | "metadata": {
|
715 | 672 | "anaconda-cloud": {},
|
716 | 673 | "kernelspec": {
|
717 |
| - "display_name": "Python [py35]", |
| 674 | + "display_name": "Python 3", |
718 | 675 | "language": "python",
|
719 |
| - "name": "Python [py35]" |
| 676 | + "name": "python3" |
720 | 677 | },
|
721 | 678 | "language_info": {
|
722 | 679 | "codemirror_mode": {
|
|
728 | 685 | "name": "python",
|
729 | 686 | "nbconvert_exporter": "python",
|
730 | 687 | "pygments_lexer": "ipython3",
|
731 |
| - "version": "3.5.2" |
| 688 | + "version": "3.5.4" |
732 | 689 | }
|
733 | 690 | },
|
734 | 691 | "nbformat": 4,
|
735 |
| - "nbformat_minor": 0 |
| 692 | + "nbformat_minor": 1 |
736 | 693 | }
|
0 commit comments