I had to convert the code to run on Python3. Mostly changed all print statements, thats it.
Any suggestions would be helpful. Thx.
(C:\GFApps\Anaconda3) C:\Users\tparmar\Documents\Python\predict stock market pri
ce using rnn>python main.py --stock_count=100 --train --input_size=1 --lstm_size
=128 --max_epoch=50 --embed_size=8
{'batch_size': 64,
'embed_size': 8,
'init_epoch': 5,
'init_learning_rate': 0.001,
'input_size': 1,
'keep_prob': 0.8,
'learning_rate_decay': 0.99,
'lstm_size': 128,
'max_epoch': 50,
'num_layers': 1,
'num_steps': 30,
'sample_size': 4,
'stock_count': 100,
'stock_symbol': None,
'train': True}
2017-12-16 23:32:55.276680: I C:\tf_jenkins\home\workspace\rel-win\M\windows\PY\
36\tensorflow\core\platform\cpu_feature_guard.cc:137] Your CPU supports instruct
ions that this TensorFlow binary was not compiled to use: AVX AVX2
C:\GFApps\Anaconda3\lib\site-packages\tensorflow\python\ops\gradients_impl.py:96
: UserWarning: Converting sparse IndexedSlices to a dense Tensor of unknown shap
e. This may consume a large amount of memory.
"Converting sparse IndexedSlices to a dense Tensor of unknown shape. "
---------
Variables: name (type shape) [size]
---------
embed_matrix:0 (float32_ref 100x8) [800, bytes: 3200]
dynamic_rnn/lstm_cell/kernel:0 (float32_ref 129x512) [66048, bytes: 264192]
dynamic_rnn/lstm_cell/bias:0 (float32_ref 512) [512, bytes: 2048]
w:0 (float32_ref 128x1) [128, bytes: 512]
b:0 (float32_ref 1) [1, bytes: 4]
Total size of variables: 67489
Total bytes of variables: 269956
{True: 497, False: 8}
main.py:58: FutureWarning: sort(columns=....) is deprecated, use sort_values(by=
.....)
info = info.sort('market_cap', ascending=False).reset_index(drop=True)
Head of S&P 500 info:
symbol name sector price \
0 AAPL Apple Inc. Information Technology 139.52
1 GOOGL Alphabet Inc Class A Information Technology 851.15
2 GOOG Alphabet Inc Class C Information Technology 831.91
3 MSFT Microsoft Corp. Information Technology 64.40
4 AMZN Amazon.com Inc Consumer Discretionary 846.02
dividend_yield price/earnings earnings/share book_value 52_week_low \
0 1.63 16.75 8.33 25.19 89.47
1 NaN 30.53 27.88 201.12 672.66
2 NaN 29.84 27.88 201.12 663.28
3 2.43 30.31 2.12 8.90 48.03
4 NaN 172.66 4.90 40.43 538.58
52_week_high market_cap ebitda price/sales price/book \
0 140.28 732.00 69.75 3.35 5.53
1 867.00 588.50 29.86 6.49 4.21
2 841.95 575.20 29.86 6.34 4.12
3 65.91 497.65 27.74 5.80 7.22
4 860.86 403.70 11.67 2.97 20.94
sec_filings file_exists
0 http://www.sec.gov/cgi-bin/browse-edgar?action... True
1 http://www.sec.gov/cgi-bin/browse-edgar?action... True
2 http://www.sec.gov/cgi-bin/browse-edgar?action... True
3 http://www.sec.gov/cgi-bin/browse-edgar?action... True
4 http://www.sec.gov/cgi-bin/browse-edgar?action... True
len(merged_test_X) = 17838
len(merged_test_y) = 17838
len(merged_test_labels) = 17838
{'AAPL': array([ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12,
13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25,
26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38,
39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51,
52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64,
65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77,
78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90,
91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103,
104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116,
117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129,
130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142,
143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155,
156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168,
169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181,
182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194,
195, 196, 197, 198]), 'GOOGL': array([199, 200, 201, 202, 203, 204, 205,
206, 207, 208, 209, 210, 211,
212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224,
225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237,
238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250,
251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263,
264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276,
277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289,
290, 291, 292, 293, 294, 295, 296, 297, 298, 299, 300, 301, 302,
303, 304, 305, 306, 307, 308, 309, 310, 311, 312, 313, 314, 315,
316, 317, 318, 319, 320, 321, 322, 323, 324, 325, 326, 327, 328,
329, 330, 331, 332, 333, 334, 335, 336, 337, 338, 339, 340, 341,
342, 343, 344, 345, 346, 347, 348, 349, 350, 351, 352, 353, 354,
355, 356, 357, 358, 359, 360, 361, 362, 363, 364, 365]), 'GOOG': array([3
66, 367, 368, 369, 370, 371, 372, 373, 374, 375, 376, 377, 378,
379, 380, 381, 382, 383, 384, 385, 386, 387, 388, 389, 390, 391,
392, 393, 394, 395, 396, 397, 398, 399, 400, 401, 402, 403, 404,
405, 406, 407, 408, 409, 410, 411, 412, 413, 414, 415, 416, 417,
418, 419, 420, 421, 422, 423, 424, 425, 426, 427, 428, 429, 430,
431, 432, 433, 434, 435, 436, 437, 438, 439, 440, 441, 442, 443,
444, 445, 446, 447, 448, 449, 450, 451, 452, 453, 454, 455, 456,
457, 458, 459, 460, 461, 462, 463, 464, 465, 466, 467, 468, 469,
470, 471, 472, 473, 474, 475, 476, 477, 478, 479, 480, 481, 482,
483, 484, 485, 486, 487, 488, 489, 490, 491, 492, 493, 494, 495,
496, 497, 498, 499, 500, 501, 502, 503, 504, 505, 506, 507, 508,
509, 510, 511, 512, 513, 514, 515, 516, 517, 518, 519, 520, 521,
522, 523, 524, 525, 526, 527, 528, 529, 530, 531, 532]), 'MSFT': array([5
33, 534, 535, 536, 537, 538, 539, 540, 541, 542, 543, 544, 545,
546, 547, 548, 549, 550, 551, 552, 553, 554, 555, 556, 557, 558,
559, 560, 561, 562, 563, 564, 565, 566, 567, 568, 569, 570, 571,
572, 573, 574, 575, 576, 577, 578, 579, 580, 581, 582, 583, 584,
585, 586, 587, 588, 589, 590, 591, 592, 593, 594, 595, 596, 597,
598, 599, 600, 601, 602, 603, 604, 605, 606, 607, 608, 609, 610,
611, 612, 613, 614, 615, 616, 617, 618, 619, 620, 621, 622, 623,
624, 625, 626, 627, 628, 629, 630, 631, 632, 633, 634, 635, 636,
637, 638, 639, 640, 641, 642, 643, 644, 645, 646, 647, 648, 649,
650, 651, 652, 653, 654, 655, 656, 657, 658, 659, 660, 661, 662,
663, 664, 665, 666, 667, 668, 669, 670, 671, 672, 673, 674, 675,
676, 677, 678, 679, 680, 681, 682, 683, 684, 685, 686, 687, 688,
689, 690, 691, 692, 693, 694, 695, 696, 697, 698, 699, 700, 701,
702, 703, 704, 705, 706, 707, 708, 709, 710, 711, 712, 713, 714,
715, 716, 717, 718, 719, 720, 721, 722, 723, 724, 725, 726, 727,
728, 729, 730, 731])}
Start training for stocks: ['AAPL', 'GOOGL', 'GOOG', 'MSFT', 'AMZN', 'FB', 'XOM'
, 'JNJ', 'JPM', 'WFC', 'BAC', 'GE', 'T', 'PG', 'WMT', 'CVX', 'V', 'PFE', 'VZ', '
MRK', 'KO', 'CMCSA', 'HD', 'DIS', 'ORCL', 'PM', 'CSCO', 'IBM', 'INTC', 'C', 'UNH
', 'PEP', 'MO', 'AMGN', 'MA', 'MMM', 'MDT', 'BA', 'SLB', 'KHC', 'MCD', 'GS', 'AB
BV', 'HON', 'CELG', 'BMY', 'NKE', 'USB', 'WBA', 'UPS', 'UTX', 'GILD', 'UNP', 'AV
GO', 'RAI', 'LLY', 'CHTR', 'MS', 'CVS', 'PCLN', 'QCOM', 'SBUX', 'AGN', 'TXN', 'A
BT', 'ACN', 'DOW', 'TWX', 'COST', 'AXP', 'LOW', 'DD', 'MDLZ', 'CL', 'CB', 'BLK',
'BIIB', 'AIG', 'PNC', 'TMO', 'NEE', 'NFLX', 'DHR', 'ADBE', 'COP', 'NVDA', 'CRM'
, 'MET', 'GD', 'EOG', 'DUK', 'FOXA', 'CAT', 'GM', 'FOX', 'SCHW', 'SPG', 'PYPL',
'TJX', 'FDX']
Traceback (most recent call last):
File "main.py", line 112, in <module>
tf.app.run()
File "C:\GFApps\Anaconda3\lib\site-packages\tensorflow\python\platform\app.py"
, line 48, in run
_sys.exit(main(_sys.argv[:1] + flags_passthrough))
File "main.py", line 105, in main
rnn_model.train(stock_data_list, FLAGS)
File "C:\Users\tparmar\Documents\Python\predict stock market price using rnn\m
odel_rnn.py", line 209, in train
for batch_X, batch_y in d_.generate_one_epoch(config.batch_size):
File "C:\Users\tparmar\Documents\Python\predict stock market price using rnn\d
ata_model.py", line 65, in generate_one_epoch
random.shuffle(batch_indices)
File "C:\GFApps\Anaconda3\lib\random.py", line 274, in shuffle
x[i], x[j] = x[j], x[i]
TypeError: 'range' object does not support item assignment
(C:\GFApps\Anaconda3) C:\Users\tparmar\Documents\Python\predict stock market pri
ce using rnn>