-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathlf-thesis.bib
1163 lines (1062 loc) · 57.9 KB
/
lf-thesis.bib
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
% Encoding: UTF-8
@Article{Zheng2017,
author = {Huiting Zheng and Jiabin Yuan and Long Chen},
title = {Short-Term Load Forecasting Using {EMD}-{LSTM} Neural Networks with a Xgboost Algorithm for Feature Importance Evaluation},
journal = {Energies},
year = {2017},
volume = {10},
number = {8},
month = {aug},
pages = {1168},
doi = {10.3390/en10081168},
abstract = {Accurate load forecasting is an important issue for the reliable and efficient operation of a power system. This study presents a hybrid algorithm that combines similar days (SD) selection, empirical mode decomposition (EMD), and long short-term memory (LSTM) neural networks to construct a prediction model (i.e., SD-EMD-LSTM) for short-term load forecasting. The extreme gradient boosting-based weighted k-means algorithm is used to evaluate the similarity between the forecasting and historical days. The EMD method is employed to decompose the SD load to several intrinsic mode functions (IMFs) and residual. Separated LSTM neural networks were also employed to forecast each IMF and residual. Lastly, the forecasting values from each LSTM model were reconstructed. Numerical testing demonstrates that the SD-EMD-LSTM method can accurately forecast the electric load.},
comment = {This method uses a S2S LSTM RNN for load forecasting.
K-means clustering with xgboost is used to select a similar day for input to the network.
Intrinsic mode decomposition is used to split the forecast into seperate nerual networks which are then recombined at the end.
Authors claim it works far better than plain LSTM ( such as implemented by Marino)},
file = {:papers/Short-Term Load Forecasting Using EMD-LSTM.pdf:PDF},
publisher = {{MDPI} {AG}},
}
@Article{Marino2016,
author = {Daniel L. Marino and Kasun Amarasinghe and Milos Manic},
title = {Building Energy Load Forecasting using Deep Neural Networks},
journal = {CoRR},
year = {2016},
date = {2016-10-29},
eprint = {1610.09460v1},
eprintclass = {cs.NE},
eprinttype = {arXiv},
abstract = {Ensuring sustainability demands more efficient energy management with minimized energy wastage. Therefore, the power grid of the future should provide an unprecedented level of flexibility in energy management. To that end, intelligent decision making requires accurate predictions of future energy demand/load, both at aggregate and individual site level. Thus, energy load forecasting have received increased attention in the recent past, however has proven to be a difficult problem. This paper presents a novel energy load forecasting methodology based on Deep Neural Networks, specifically Long Short Term Memory (LSTM) algorithms. The presented work investigates two variants of the LSTM: 1) standard LSTM and 2) LSTM-based Sequence to Sequence (S2S) architecture. Both methods were implemented on a benchmark data set of electricity consumption data from one residential customer. Both architectures where trained and tested on one hour and one-minute time-step resolution datasets. Experimental results showed that the standard LSTM failed at one-minute resolution data while performing well in one-hour resolution data. It was shown that S2S architecture performed well on both datasets. Further, it was shown that the presented methods produced comparable results with the other deep learning methods for energy forecasting in literature.},
file = {:papers/Building Energy Load Forecasting using Deep Neural.pdf:PDF},
keywords = {cs.NE},
}
@Article{Bianchi2017,
author = {Filippo Maria Bianchi and Enrico Maiorino and Michael C. Kampffmeyer and Antonello Rizzi and Robert Jenssen},
title = {An overview and comparative analysis of Recurrent Neural Networks for Short Term Load Forecasting},
journal = {CoRR},
year = {2017},
volume = {abs/1705.04378},
eprint = {1705.04378},
url = {http://arxiv.org/abs/1705.04378},
abstract = {The key component in forecasting demand and consumption of resources in a supply network is an accurate prediction of real-valued time series. Indeed, both service interruptions and resource waste can be reduced with the implementation of an effective forecasting system. Significant research has thus been devoted to the design and development of methodologies for short term load forecasting over the past decades. A class of mathematical models, called Recurrent Neural Networks, are nowadays gaining renewed interest among researchers and they are replacing many practical implementation of the forecasting systems, previously based on static methods. Despite the undeniable expressive power of these architectures, their recurrent nature complicates their understanding and poses challenges in the training procedures. Recently, new important families of recurrent architectures have emerged and their applicability in the context of load forecasting has not been investigated completely yet. In this paper we perform a comparative study on the problem of Short-Term Load Forecast, by using different classes of state-of-the-art Recurrent Neural Networks. We test the reviewed models first on controlled synthetic tasks and then on different real datasets, covering important practical cases of study. We provide a general overview of the most important architectures and we define guidelines for configuring the recurrent networks to predict real-valued time series.},
archiveprefix = {arXiv},
bibsource = {dblp computer science bibliography, https://dblp.org},
biburl = {https://dblp.org/rec/bib/journals/corr/BianchiMKRJ17},
comment = {An overview of the use of Elman RNN, LSTM, GRU, NARX, and Echo state RNNs for load forecasting.
Found some reasonable variance between all architectures.
Does not consider S2S.
Also gives a very nice overview of RNNs in general. },
file = {:papers/An overview and comparative analysis of Recurrent Neural Networks for Short Term Load Forecasting.pdf:PDF},
timestamp = {Wed, 07 Jun 2017 14:43:10 +0200},
}
@Article{Flunkert2017,
author = {Valentin Flunkert and David Salinas and Jan Gasthaus},
title = {DeepAR: Probabilistic Forecasting with Autoregressive Recurrent Networks},
journal = {CoRR},
date = {2017-04-13},
eprint = {1704.04110v2},
eprintclass = {cs.AI},
eprinttype = {arXiv},
abstract = {A key enabler for optimizing business processes is accurately estimating the probability distribution of a time series future given its past. Such probabilistic forecasts are crucial for example for reducing excess inventory in supply chains. In this paper we propose DeepAR, a novel methodology for producing accurate probabilistic forecasts, based on training an auto-regressive recurrent network model on a large number of related time series. We show through extensive empirical evaluation on several real-world forecasting data sets that our methodology is more accurate than state-of-the-art models, while requiring minimal feature engineering.},
file = {:papers/DeepAR Forecasting Recurrent Networks.pdf:PDF},
keywords = {cs.AI, cs.LG, stat.ML},
}
@InProceedings{Scott2014,
author = {Scott, Paul and Thi\'ebaux, Sylvie},
title = {Distributed {Multi-Period} Optimal Power Flow for Demand Response in Microgrids},
booktitle = {{ACM} {e-Energy}},
year = {2015},
month = {jul},
url = {http://users.cecs.anu.edu.au/~pscott/extras/papers/scott2015.pdf},
address = {Bangalore India},
file = {:papers/Dynamic Optimal Power Flow in Microgrids using the Alternating Direction Method of Multipliers.pdf:PDF},
}
@Article{Evan2016,
author = {Evan Franklin},
title = {Agents of change Making batteries go the extra mile},
journal = {ReNew, no. 136, pp. 56-58,},
year = {2016},
file = {:papers/agents-of-change-bruny.pdf:PDF},
}
@Report{Jacobs2017,
author = {Walter Gerardi and Damien O’Connor},
title = {Projections of uptake of small-scale systems},
type = {resreport},
institution = {AEMO},
year = {2017},
date = {2017-06-09},
url = {https://www.aemo.com.au/Electricity/National-Electricity-Market-NEM/Planning-and-forecasting/Electricity-Forecasting-Insights/2017-Electricity-Forecasting-Insights/Key-component-consumption-forecasts/PV-and-storage},
file = {:papers/Projections of Uptake of Small-scale Systems.pdf:PDF},
}
@Report{AEMO2016,
title = {2016 NATIONAL ELECTRICITY FORECASTING REPORT},
type = {resreport},
institution = {Australian Energy Market Operator},
year = {2016},
url = {https://www.aemo.com.au/Electricity/National-Electricity-Market-NEM/Planning-and-forecasting/National-Electricity-Forecasting-Report},
file = {:papers/2016 National Electricity Forecasting Report NEFR.pdf:PDF},
}
@Article{Kong2018,
author = {Weicong Kong and Zhao Yang Dong and David J. Hill and Fengji Luo and Yan Xu},
title = {Short-Term Residential Load Forecasting Based on Resident Behaviour Learning},
journal = {{IEEE} Transactions on Power Systems},
year = {2018},
volume = {33},
number = {1},
month = {jan},
pages = {1087--1088},
doi = {10.1109/tpwrs.2017.2688178},
file = {:papers/kong2017.pdf:PDF},
publisher = {Institute of Electrical and Electronics Engineers ({IEEE})},
}
@Thesis{Lier2015,
author = {Ciarán Lier},
title = {Applying Machine Learning Techniques to Short Term Load Forecasting},
type = {mathesis},
institution = {University of Groningen},
year = {2015},
file = {:papers/Thesis_Ciaran_Lier.pdf:PDF},
}
@Article{Basil1975,
author = {Victor R. Basil and Albert J. Turner},
title = {Iterative enhancement: A practical technique for software development},
journal = {{IEEE} Transactions on Software Engineering},
year = {1975},
volume = {{SE}-1},
number = {4},
month = {dec},
pages = {390--396},
doi = {10.1109/tse.1975.6312870},
file = {:papers/Iterative enhancement A practical technique for software development.pdf:PDF},
publisher = {Institute of Electrical and Electronics Engineers ({IEEE})},
}
@Book{Box1970,
author = {George E. P. Box and Gwilym M. Jenkins and Gregory C. Reinsel},
title = {Time Series Analysis},
year = {1970},
publisher = {John Wiley {\&} Sons, Inc.},
file = {:papers/Time Series Analysis_ Forecasting and Control-Wiley (2015).pdf:PDF},
month = {jun},
}
@Book{Weron2006,
author = {Rafa{\l} Weron},
title = {Modeling and Forecasting Electricity Loads and Prices},
year = {2006},
publisher = {John Wiley {\&} Sons Ltd},
doi = {10.1002/9781118673362},
file = {:papers/Rafal Weron-Modeling and Forecasting Electricity Loads and Prices_ A Statistical Approach (The Wiley Finance Series) (2006).pdf:PDF},
month = {dec},
}
@Article{Ceperic2013,
author = {Ervin Ceperic and Vladimir Ceperic and Adrijan Baric},
title = {A Strategy for Short-Term Load Forecasting by Support Vector Regression Machines},
journal = {{IEEE} Transactions on Power Systems},
year = {2013},
volume = {28},
number = {4},
month = {nov},
pages = {4356--4364},
doi = {10.1109/tpwrs.2013.2269803},
file = {:papers/A Strategy for Short-Term Load Forecasting by Support Vector Regression Machines.pdf:PDF},
publisher = {Institute of Electrical and Electronics Engineers ({IEEE})},
}
@InProceedings{Drucker1996,
author = {Drucker, Harris and Burges, Chris J. C. and Kaufman, Linda and Smola, Alex and Vapnik, Vladimir},
title = {Support Vector Regression Machines},
booktitle = {Proceedings of the 9th International Conference on Neural Information Processing Systems},
year = {1996},
series = {NIPS'96},
publisher = {MIT Press},
location = {Denver, Colorado},
pages = {155--161},
url = {http://dl.acm.org/citation.cfm?id=2998981.2999003},
acmid = {2999003},
address = {Cambridge, MA, USA},
file = {:papers/Support Vector Regression Machines.pdf:PDF},
numpages = {7},
}
@Article{Chen2004,
author = {B.-J. Chen and M.-W. Chang and C.-J. Lin},
title = {Load Forecasting Using Support Vector Machines: A Study on {EUNITE} Competition 2001},
journal = {{IEEE} Transactions on Power Systems},
year = {2004},
volume = {19},
number = {4},
month = {nov},
pages = {1821--1830},
doi = {10.1109/tpwrs.2004.835679},
file = {:papers/Load Forecasting Using Support Vector Machines A Study on EUNITE Competition 2001.pdf:PDF},
publisher = {Institute of Electrical and Electronics Engineers ({IEEE})},
}
@Article{Smola2004,
author = {Alex J. Smola and Bernhard Schölkopf},
title = {A tutorial on support vector regression},
journal = {Statistics and Computing},
year = {2004},
volume = {14},
number = {3},
month = {aug},
pages = {199--222},
doi = {10.1023/b:stco.0000035301.49549.88},
file = {:papers/A tutorial on support vector regression.pdf:PDF},
publisher = {Springer Nature},
}
@Article{Desouky2000,
author = {A.A. El Desouky and M.M. El Kateb},
title = {Hybrid adaptive techniques for electric-load forecast using {ANN} and {ARIMA}},
journal = {{IEE} Proceedings - Generation, Transmission and Distribution},
year = {2000},
volume = {147},
number = {4},
pages = {213},
doi = {10.1049/ip-gtd:20000521},
file = {:papers/Hybrid adaptive techniques for electric-load forecast using ANN and ARIMA.pdf:PDF},
publisher = {Institution of Engineering and Technology ({IET})},
}
@Article{Bennett2014,
author = {Christopher J. Bennett and Rodney A. Stewart and Jun Wei Lu},
title = {Forecasting low voltage distribution network demand profiles using a pattern recognition based expert system},
journal = {Energy},
year = {2014},
volume = {67},
month = {apr},
pages = {200--212},
doi = {10.1016/j.energy.2014.01.032},
file = {:papers/Bennett2014.pdf:PDF},
publisher = {Elsevier {BV}},
}
@InProceedings{Karthika2017,
author = {S Karthika and Vijaya Margaret and K. Balaraman},
title = {Hybrid short term load forecasting using {ARIMA}-{SVM}},
booktitle = {2017 Innovations in Power and Advanced Computing Technologies (i-{PACT})},
year = {2017},
publisher = {{IEEE}},
month = {apr},
doi = {10.1109/ipact.2017.8245060},
file = {:papers/Hybrid Short Term Load Forecasting using ARIMA-SVM.pdf:PDF},
}
@Article{RochaReis2005,
author = {A.J. RochaReis and A.P. AlvesdaSilva},
title = {Feature Extraction via Multiresolution Analysis for Short-Term Load Forecasting},
journal = {{IEEE} Transactions on Power Systems},
year = {2005},
volume = {20},
number = {1},
month = {feb},
pages = {189--198},
doi = {10.1109/tpwrs.2004.840380},
file = {:papers/Feature extraction via multiresolution analysis.pdf:PDF},
publisher = {Institute of Electrical and Electronics Engineers ({IEEE})},
}
@Article{AMJADY2009,
author = {N AMJADY and F KEYNIA},
title = {Short-term load forecasting of power systems by combination of wavelet transform and neuro-evolutionary algorithm},
journal = {Energy},
year = {2009},
volume = {34},
number = {1},
month = {jan},
pages = {46--57},
doi = {10.1016/j.energy.2008.09.020},
file = {:papers/Short-term load forecasting of power systems by combination of wavelet transform and neuro-evolutionary algorithm.pdf:PDF},
publisher = {Elsevier {BV}},
}
@Article{Deihimi2012,
author = {Ali Deihimi and Hemen Showkati},
title = {Application of echo state networks in short-term electric load forecasting},
journal = {Energy},
year = {2012},
volume = {39},
number = {1},
month = {mar},
pages = {327--340},
doi = {10.1016/j.energy.2012.01.007},
file = {:papers/Application of echo state networks in short-term electric load forecasting.pdf:PDF},
publisher = {Elsevier {BV}},
}
@Article{Elattar2010,
author = {Ehab E Elattar and John Goulermas and Q H Wu},
title = {Electric Load Forecasting Based on Locally Weighted Support Vector Regression},
journal = {{IEEE} Transactions on Systems, Man, and Cybernetics, Part C (Applications and Reviews)},
year = {2010},
volume = {40},
number = {4},
month = {jul},
pages = {438--447},
doi = {10.1109/tsmcc.2010.2040176},
file = {:papers/Electric Load Forecasting Based on Locally Weighted Support Vector Regression.pdf:PDF},
publisher = {Institute of Electrical and Electronics Engineers ({IEEE})},
}
@Book{Goodfellow-et-al-2016,
author = {Ian Goodfellow and Yoshua Bengio and Aaron Courville},
title = {Deep Learning},
year = {2016},
note = {\url{http://www.deeplearningbook.org}},
publisher = {MIT Press},
file = {:papers/deeplearningbook.pdf:PDF},
}
@Book{negnevitsky2005artificial,
author = {Negnevitsky, Michael},
title = {Artificial intelligence: a guide to intelligent systems},
year = {2005},
publisher = {Pearson Education},
file = {:papers/Artificial_Intelligence-A_Guide_to_Intelligent_Systems.pdf:PDF},
}
@InCollection{Bottou2011,
author = {L{\'{e}}on Bottou},
title = {Large-Scale Machine Learning with Stochastic Gradient Descent},
booktitle = {Chapman {\&} Hall/{CRC} Computer Science {\&} Data Analysis},
year = {2011},
publisher = {Chapman and Hall/{CRC}},
pages = {17--25},
doi = {10.1201/b11429-4},
file = {:papers/Large-Scale Machine Learning with Stochastic Gradient Descent.pdf:PDF},
month = {dec},
}
@Article{Zainal-Mokhtar2013,
author = {Khursiah Zainal-Mokhtar and Junita Mohamad-Saleh},
title = {An Oil Fraction Neural Sensor Developed Using Electrical Capacitance Tomography Sensor Data},
journal = {Sensors},
year = {2013},
volume = {13},
number = {9},
month = {aug},
pages = {11385--11406},
doi = {10.3390/s130911385},
file = {:papers/An Oil Fraction Neural Sensor Developed Using Electrical Capacitance Tomography Sensor Data.pdf:PDF;:images/A-schematic-diagram-of-a-Multi-Layer-Perceptron-MLP-neural-network.png:PNG image},
publisher = {{MDPI} {AG}},
}
@WWW{Deshpande2017,
author = {Mohit Deshpande},
editor = {Perceptrons: The First Neural Networks},
title = {Perceptrons: The First Neural Networks},
year = {2017},
date = {2017-09-12},
url = {https://pythonmachinelearning.pro/perceptrons-the-first-neural-networks/},
file = {:images/Single-Perceptron-825x459.png:PNG image},
}
@Article{2017arXiv171009829S,
author = {{Sabour}, S. and {Frosst}, N. and {E Hinton}, G.},
title = {{Dynamic Routing Between Capsules}},
journal = {ArXiv e-prints},
year = {2017},
month = oct,
eprint = {1710.09829},
adsnote = {Provided by the SAO/NASA Astrophysics Data System},
adsurl = {http://adsabs.harvard.edu/abs/2017arXiv171009829S},
archiveprefix = {arXiv},
file = {:papers/Dynamic Routing Between Capsules.pdf:PDF},
keywords = {Computer Science - Computer Vision and Pattern Recognition},
primaryclass = {cs.CV},
}
@incollection{Vaswani2017,
title = {Attention is All you Need},
author = {Vaswani, Ashish and Shazeer, Noam and Parmar, Niki and Uszkoreit, Jakob and Jones, Llion and Gomez, Aidan N and Kaiser, \L ukasz and Polosukhin, Illia},
booktitle = {Advances in Neural Information Processing Systems 30},
editor = {I. Guyon and U. V. Luxburg and S. Bengio and H. Wallach and R. Fergus and S. Vishwanathan and R. Garnett},
pages = {5998--6008},
year = {2017},
publisher = {Curran Associates, Inc.},
url = {http://papers.nips.cc/paper/7181-attention-is-all-you-need.pdf},
file = {:papers/Attention Is All You Need.pdf:PDF},
}
@Article{DBLP:journals/corr/OordDZSVGKSK16,
author = {A{\"{a}}ron van den Oord and Sander Dieleman and Heiga Zen and Karen Simonyan and Oriol Vinyals and Alex Graves and Nal Kalchbrenner and Andrew W. Senior and Koray Kavukcuoglu},
title = {WaveNet: {A} Generative Model for Raw Audio},
journal = {CoRR},
year = {2016},
volume = {abs/1609.03499},
eprint = {1609.03499},
url = {http://arxiv.org/abs/1609.03499},
archiveprefix = {arXiv},
bibsource = {dblp computer science bibliography, https://dblp.org},
biburl = {https://dblp.org/rec/bib/journals/corr/OordDZSVGKSK16},
file = {:papers/WAVENET A GENERATIVE MODEL FOR RAW AUDIO.pdf:PDF},
timestamp = {Wed, 07 Jun 2017 14:42:54 +0200},
}
@Article{Mandal2010,
author = {Paras Mandal and Anurag K. Srivastava and Tomonobu Senjyu and Michael Negnevitsky},
title = {A new recursive neural network algorithm to forecast electricity price for {PJM} day-ahead market},
journal = {International Journal of Energy Research},
year = {2010},
volume = {34},
number = {6},
month = {may},
pages = {507--522},
doi = {10.1002/er.1569},
file = {:papers/A new recursive neural network algorithm to forecast electricity price for PJM day-ahead market.pdf:PDF},
publisher = {Wiley},
}
@Article{Zhao2017,
author = {Zheng Zhao and Weihai Chen and Xingming Wu and Peter C. Y. Chen and Jingmeng Liu},
title = {{LSTM} network: a deep learning approach for short-term traffic forecast},
journal = {{IET} Intelligent Transport Systems},
year = {2017},
volume = {11},
number = {2},
month = {mar},
pages = {68--75},
doi = {10.1049/iet-its.2016.0208},
file = {:papers/LSTM network a deep learning approach for short-term traffic forecast.pdf:PDF},
publisher = {Institution of Engineering and Technology ({IET})},
}
@Article{Chen2010,
author = {Ying Chen and P.B. Luh and Che Guan and Yige Zhao and L.D. Michel and M.A. Coolbeth and P.B. Friedland and S.J. Rourke},
title = {Short-Term Load Forecasting: Similar Day-Based Wavelet Neural Networks},
journal = {{IEEE} Transactions on Power Systems},
year = {2010},
volume = {25},
number = {1},
month = {feb},
pages = {322--330},
doi = {10.1109/tpwrs.2009.2030426},
file = {:papers/Short-Term Load Forecasting Similar Day-Based Wavelet Neural Networks.pdf:PDF},
publisher = {Institute of Electrical and Electronics Engineers ({IEEE})},
}
@Article{Kong2017,
author = {Weicong Kong and Zhao Yang Dong and Youwei Jia and David J. Hill and Yan Xu and Yuan Zhang},
title = {Short-Term Residential Load Forecasting based on {LSTM} Recurrent Neural Network},
journal = {{IEEE} Transactions on Smart Grid},
year = {2017},
pages = {1--1},
doi = {10.1109/tsg.2017.2753802},
file = {:papers/Short-Term Residential Load Forecasting based on {LSTM} Recurrent Neural Network.pdf:PDF},
publisher = {Institute of Electrical and Electronics Engineers ({IEEE})},
}
@InProceedings{Cho2014,
author = {Kyunghyun Cho and Bart van Merrienboer and Dzmitry Bahdanau and Yoshua Bengio},
title = {On the Properties of Neural Machine Translation: Encoder{\textendash}Decoder Approaches},
booktitle = {Proceedings of {SSST}-8, Eighth Workshop on Syntax, Semantics and Structure in Statistical Translation},
year = {2014},
publisher = {Association for Computational Linguistics},
doi = {10.3115/v1/w14-4012},
file = {:papers/On the Properties of Neural Machine Translation Encoder–Decoder Approaches.pdf:PDF},
}
@Article{Cho2014a,
author = {Kyunghyun Cho and Bart van Merrienboer and Caglar Gulcehre and Dzmitry Bahdanau and Fethi Bougares and Holger Schwenk and Yoshua Bengio},
title = {Learning Phrase Representations using RNN Encoder-Decoder for Statistical Machine Translation},
year = {2014},
date = {2014-06-03},
eprint = {1406.1078v3},
eprintclass = {cs.CL},
eprinttype = {arXiv},
abstract = {In this paper, we propose a novel neural network model called RNN Encoder-Decoder that consists of two recurrent neural networks (RNN). One RNN encodes a sequence of symbols into a fixed-length vector representation, and the other decodes the representation into another sequence of symbols. The encoder and decoder of the proposed model are jointly trained to maximize the conditional probability of a target sequence given a source sequence. The performance of a statistical machine translation system is empirically found to improve by using the conditional probabilities of phrase pairs computed by the RNN Encoder-Decoder as an additional feature in the existing log-linear model. Qualitatively, we show that the proposed model learns a semantically and syntactically meaningful representation of linguistic phrases.},
file = {:papers/Learning Phrase Representations using RNN Encoder–Decoder for Statistical Machine Translation.pdf:PDF},
keywords = {cs.CL, cs.LG, cs.NE, stat.ML},
}
@Article{hochreiter1997long,
author = {Hochreiter, Sepp and Schmidhuber, J{\"u}rgen},
title = {Long short-term memory},
journal = {Neural computation},
year = {1997},
volume = {9},
number = {8},
pages = {1735--1780},
file = {:papers/LONG SHORT-TERM MEMORY.pdf:PDF},
publisher = {MIT Press},
}
@Article{luong2015effective,
author = {Luong, Minh-Thang and Pham, Hieu and Manning, Christopher D},
title = {Effective approaches to attention-based neural machine translation},
journal = {arXiv preprint arXiv:1508.04025},
year = {2015},
file = {:papers/Effective Approaches to Attention-based Neural Machine Translation.pdf:PDF},
}
@paper{Song2017,
author = {Huan Song and Deepta Rajan and Jayaraman Thiagarajan and Andreas Spanias},
title = {Attend and Diagnose: Clinical Time Series Analysis Using Attention Models},
conference = {AAAI Conference on Artificial Intelligence},
year = {2018},
keywords = {Attention model; Recurrent Neural Network; Time Series Analysis; Deep Learning; Clinical; Healthcare},
abstract = {With widespread adoption of electronic health records, there is an increased emphasis for predictive models that can effectively deal with clinical time-series data. Powered by Recurrent Neural Network (RNN) architectures with Long Short-Term Memory (LSTM) units, deep neural networks have achieved state-of-the-art results in several clinical prediction tasks. Despite the success of RNN, its sequential nature prohibits parallelized computing, thus making it inefficient particularly when processing long sequences. Recently, architectures which are based solely on attention mechanisms have shown remarkable success in transduction tasks in NLP, while being computationally superior. In this paper, for the first time, we utilize attention models for clinical time-series modeling, thereby dispensing recurrence entirely. We develop the SAnD (Simply Attend and Diagnose) architecture, which employs a masked, self-attention mechanism, and uses positional encoding and dense interpolation strategies for incorporating temporal order. Furthermore, we develop a multi-task variant of SAnD to jointly infer models with multiple diagnosis tasks. Using the recent MIMIC-III benchmark datasets, we demonstrate that the proposed approach achieves state-of-the-art performance in all tasks, outperforming LSTM models and classical baselines with hand-engineered features.},
url = {https://www.aaai.org/ocs/index.php/AAAI/AAAI18/paper/view/16325},
file = {:papers/Attend and Diagnose Clinical Time Series Analysis using Attention Models.pdf:PDF},
}
@Article{Hippert2001,
author = {H.S. Hippert and C.E. Pedreira and R.C. Souza},
title = {Neural networks for short-term load forecasting: a review and evaluation},
journal = {{IEEE} Transactions on Power Systems},
year = {2001},
volume = {16},
number = {1},
pages = {44--55},
doi = {10.1109/59.910780},
file = {:papers/Neural Networks for Short-Term Load Forecasting A Review and Evaluation.pdf:PDF},
publisher = {Institute of Electrical and Electronics Engineers ({IEEE})},
}
@Article{AEMO2017,
author = {AEMO},
title = {FORECAST ACCURACY REPORT 2017},
year = {2017},
date = {2017-11-01},
file = {:papers/FORECAST-ACCURACY-REPORT-2017-final.pdf:PDF},
}
@Article{Rahman1993,
author = {S. Rahman and O. Hazim},
title = {A generalized knowledge-based short-term load-forecasting technique},
journal = {{IEEE} Transactions on Power Systems},
year = {1993},
volume = {8},
number = {2},
month = {may},
pages = {508--514},
doi = {10.1109/59.260833},
file = {:papers/A Generalized Knowledge-Based Short-Term Load-Forecasting Technique.pdf:PDF},
publisher = {Institute of Electrical and Electronics Engineers ({IEEE})},
}
@Article{Senjyu1998,
author = {T. Senjyu and S. Higa and K. Uezato},
title = {Future load curve shaping based on similarity using fuzzy logic approach},
journal = {{IEE} Proceedings - Generation, Transmission and Distribution},
year = {1998},
volume = {145},
number = {4},
pages = {375},
doi = {10.1049/ip-gtd:19981998},
file = {:papers/Future load curve shaping based on simi arity using fuzzy logic approach.pdf:PDF},
publisher = {Institution of Engineering and Technology ({IET})},
}
@Article{Mansouri2014,
author = {Mansouri, Vahid and akbari, Mohammad Esmaeil},
title = {Neural Networks in Electric Load Forecasting:A Comprehensive Survey},
journal = {Journal of Artificial Intelligence in Electrical Engineering},
year = {2014},
volume = {3},
number = {10},
pages = {37-50},
issn = {2345-4652},
url = {http://jaiee.iau-ahar.ac.ir/article_514436.html},
abstract = {Review and classification of electric load forecasting (LF) techniques based on artificial neuralnetworks (ANN) is presented. A basic ANNs architectures used in LF reviewed. A wide range of ANNoriented applications for forecasting are given in the literature. These are classified into five groups:(1) ANNs in short-term LF, (2) ANNs in mid-term LF, (3) ANNs in long-term LF, (4) Hybrid ANNs inLF, (5) ANNs in Special applications of LF. The major research articles for each category are brieflydescribed and the related literature reviewed. Conclusions are made on future research directions.},
file = {:papers/Neural Networks in Electric Load Forecasting A Comprehensive Survey.pdf:PDF},
}
@WWW{CIGRE2016,
author = {CIGRE},
title = {CIGRE Working Group concludes Demand forecasting study},
year = {2016},
date = {2016-12-19},
url = {https://www.cigreaustralia.org.au/news/features/cigre-working-group-concludes-demand-forecasting-study/},
}
@Article{Schuster1997,
author = {M. Schuster and K.K. Paliwal},
title = {Bidirectional recurrent neural networks},
journal = {{IEEE} Transactions on Signal Processing},
year = {1997},
volume = {45},
number = {11},
pages = {2673--2681},
doi = {10.1109/78.650093},
file = {:papers/Bidirectional Recurrent Neural Networks.pdf:PDF},
publisher = {Institute of Electrical and Electronics Engineers ({IEEE})},
}
@Article{BahdanauCB14,
author = {Dzmitry Bahdanau and Kyunghyun Cho and Yoshua Bengio},
title = {Neural Machine Translation by Jointly Learning to Align and Translate},
journal = {CoRR},
year = {2014},
volume = {abs/1409.0473},
eprint = {1409.0473},
url = {http://arxiv.org/abs/1409.0473},
archiveprefix = {arXiv},
bibsource = {dblp computer science bibliography, https://dblp.org},
biburl = {https://dblp.org/rec/bib/journals/corr/BahdanauCB14},
file = {:papers/Neural Machine Translation by Jointly Learning to Align and Translate.pdf:PDF},
timestamp = {Wed, 07 Jun 2017 14:40:19 +0200},
}
@InProceedings{Haoyu2012,
author = {Wang Haoyu and Zhou Haili},
title = {Basic Design Principles in Software Engineering},
booktitle = {2012 Fourth International Conference on Computational and Information Sciences},
year = {2012},
publisher = {{IEEE}},
month = {aug},
doi = {10.1109/iccis.2012.91},
file = {:papers/Basic Design Principles in Software Engineering.pdf:PDF},
}
@Article{Parmar2018,
author = {Niki Parmar and Ashish Vaswani and Jakob Uszkoreit and Lukasz Kaiser and Noam Shazeer and Alexander Ku},
title = {Image Transformer},
journal = {CoRR},
year = {2018},
volume = {abs/1802.05751},
eprint = {1802.05751},
url = {http://arxiv.org/abs/1802.05751},
archiveprefix = {arXiv},
bibsource = {dblp computer science bibliography, https://dblp.org},
biburl = {https://dblp.org/rec/bib/journals/corr/abs-1802-05751},
file = {:papers/image transformer.pdf:PDF},
timestamp = {Thu, 01 Mar 2018 19:20:48 +0100},
}
@Article{Ba2016,
author = {Ba, Jimmy Lei and Kiros, Jamie Ryan and Hinton, Geoffrey E},
title = {Layer normalization},
journal = {arXiv preprint arXiv:1607.06450},
year = {2016},
file = {:papers/Layer Normalization.pdf:PDF},
}
@InProceedings{He2015,
author = {K. He and X. Zhang and S. Ren and J. Sun},
title = {Deep Residual Learning for Image Recognition},
booktitle = {2016 IEEE Conference on Computer Vision and Pattern Recognition (CVPR)},
year = {2016},
month = {June},
pages = {770-778},
doi = {10.1109/CVPR.2016.90},
abstract = {Deeper neural networks are more difficult to train. We present a residual learning framework to ease the training of networks that are substantially deeper than those used previously. We explicitly reformulate the layers as learning residual functions with reference to the layer inputs, instead of learning unreferenced functions. We provide comprehensive empirical evidence showing that these residual networks are easier to optimize, and can gain accuracy from considerably increased depth. On the ImageNet dataset we evaluate residual nets with a depth of up to 152 layers - 8× deeper than VGG nets [40] but still having lower complexity. An ensemble of these residual nets achieves 3.57% error on the ImageNet test set. This result won the 1st place on the ILSVRC 2015 classification task. We also present analysis on CIFAR-10 with 100 and 1000 layers. The depth of representations is of central importance for many visual recognition tasks. Solely due to our extremely deep representations, we obtain a 28% relative improvement on the COCO object detection dataset. Deep residual nets are foundations of our submissions to ILSVRC & COCO 2015 competitions1, where we also won the 1st places on the tasks of ImageNet detection, ImageNet localization, COCO detection, and COCO segmentation.},
file = {:papers/Deep Residual Learning for Image Recognition.pdf:PDF},
issn = {1063-6919},
keywords = {COCO segmentation;ImageNet localization;ILSVRC & COCO 2015 competitions;deep residual nets;COCO object detection dataset;visual recognition tasks;CIFAR-10;ILSVRC 2015 classification task;ImageNet test set;VGG nets;residual nets;ImageNet dataset;residual function learning;deeper neural network training;image recognition;deep residual learning;Training;Degradation;Complexity theory;Image recognition;Neural networks;Visualization;Image segmentation},
}
@Article{Kingma2014,
author = {Kingma, Diederik P and Ba, Jimmy},
title = {Adam: A method for stochastic optimization},
journal = {arXiv preprint arXiv:1412.6980},
year = {2014},
file = {:papers/ADAM A METHOD FOR STOCHASTIC OPTIMIZATION.pdf:PDF},
}
@Online{census2016,
author = {{Australian Bureau of Statistics}},
title = {Statistical Areas Level 1 6106906 and 6106909},
year = {2016},
url = {http://www.abs.gov.au/websitedbs/D3310114.nsf/Home/2016%20search%20by%20geography},
journal = {Census},
}
@Article{srivastava14a,
author = {Srivastava, Nitish and Hinton, Geoffrey and Krizhevsky, Alex and Sutskever, Ilya and Salakhutdinov, Ruslan},
title = {Dropout: A Simple Way to Prevent Neural Networks from Overfitting},
journal = {J. Mach. Learn. Res.},
year = {2014},
volume = {15},
number = {1},
month = jan,
pages = {1929--1958},
issn = {1532-4435},
url = {http://dl.acm.org/citation.cfm?id=2627435.2670313},
acmid = {2670313},
file = {:papers/Dropout A Simple Way to Prevent Neural Networks from Overfitting.pdf:PDF},
issue_date = {January 2014},
keywords = {deep learning, model combination, neural networks, regularization},
numpages = {30},
publisher = {JMLR.org},
}
@Report{apvi2017,
author = {Warwick Johnston},
title = {National Survey Report of Photovoltaic Applications in AUSTRALIA 2017},
institution = {Australian PV Institute},
year = {2017},
date = {2017},
url = {http://apvi.org.au/pv-in-australia-2017/},
file = {:papers/National Survey Report of Photovoltaic Applications in AUSTRALIA 2017.pdf:PDF},
}
@Report{bloomberg2018,
author = {{Bloomberg New Energy Finance}},
title = {New Energy Outlook 2018},
year = {2018},
date = {2018},
url = {https://about.bnef.com/new-energy-outlook/},
}
@Article{Dehghani2018,
author = {{Dehghani}, M. and {Gouws}, S. and {Vinyals}, O. and {Uszkoreit}, J. and {Kaiser}, {\L}.},
title = {{Universal Transformers}},
journal = {ArXiv e-prints},
year = {2018},
month = jul,
eprint = {1807.03819},
adsnote = {Provided by the SAO/NASA Astrophysics Data System},
adsurl = {http://adsabs.harvard.edu/abs/2018arXiv180703819D},
archiveprefix = {arXiv},
file = {:papers/Universal Transformers.pdf:PDF},
keywords = {Computer Science - Computation and Language, Computer Science - Machine Learning, Statistics - Machine Learning},
primaryclass = {cs.CL},
}
@Article{Rumelhart1986,
author = {David E. Rumelhart and Geoffrey E. Hinton and Ronald J. Williams},
title = {Learning representations by back-propagating errors},
journal = {Nature},
year = {1986},
volume = {323},
number = {6088},
month = {oct},
pages = {533--536},
doi = {10.1038/323533a0},
file = {:papers/Rumelhart1986.pdf:PDF},
publisher = {Springer Nature},
}
@Article{Wang1999,
author = {Chuan Wang and J.C. Principe},
title = {Training neural networks with additive noise in the desired signal},
journal = {{IEEE} Transactions on Neural Networks},
year = {1999},
volume = {10},
number = {6},
pages = {1511--1517},
doi = {10.1109/72.809097},
file = {:papers/Wang1999.pdf:PDF},
publisher = {Institute of Electrical and Electronics Engineers ({IEEE})},
}
@Article{Brown2003,
author = {Warick M. Brown and Tam{\'{a}}s D. Gedeon and David I. Groves},
journal = {Natural Resources Research},
year = {2003},
volume = {12},
number = {2},
pages = {141--152},
doi = {10.1023/a:1024218913435},
file = {:papers/Brown2003.pdf:PDF},
publisher = {Springer Nature},
}
@Article{Senjyu2004,
author = {T. Senjyu and P. Mandal and K. Uezato and T. Funabashi},
title = {Next day load curve forecasting using recurrent neural network structure},
journal = {{IEE} Proceedings - Generation, Transmission and Distribution},
year = {2004},
volume = {151},
number = {3},
pages = {388},
doi = {10.1049/ip-gtd:20040356},
file = {:papers/Next day load curve forecasting using recurrent neural network structure.pdf:PDF},
publisher = {Institution of Engineering and Technology ({IET})},
}
@Article{Dou2018,
author = {Chunxia Dou and Yuhang Zheng and Dong Yue and Zhanqiang Zhang and Kai Ma},
title = {Hybrid model for renewable energy and loads prediction based on data mining and variational mode decomposition},
journal = {{IET} Generation, Transmission {\&} Distribution},
year = {2018},
volume = {12},
number = {11},
month = {jun},
pages = {2642--2649},
doi = {10.1049/iet-gtd.2017.1476},
file = {:papers/Hybrid model for renewable energy and loads prediction based on data mining and variational mode decomposition.pdf:PDF},
publisher = {Institution of Engineering and Technology ({IET})},
}
@Article{Taylor2007,
author = {James W. Taylor and Patrick E. McSharry},
title = {Short-Term Load Forecasting Methods: An Evaluation Based on European Data},
journal = {{IEEE} Transactions on Power Systems},
year = {2007},
volume = {22},
number = {4},
month = {nov},
pages = {2213--2219},
doi = {10.1109/tpwrs.2007.907583},
file = {:papers/Short-Term Load Forecasting Methods An Evaluation Based on European Data.pdf:PDF},
publisher = {Institute of Electrical and Electronics Engineers ({IEEE})},
}
@Article{Amjady2001,
author = {N. Amjady},
title = {Short-term hourly load forecasting using time-series modeling with peak load estimation capability},
journal = {{IEEE} Transactions on Power Systems},
year = {2001},
volume = {16},
number = {3},
pages = {498--505},
doi = {10.1109/59.932287},
file = {:papers/Short-term hourly load forecasting using time-series modeling with peak load estimation capability.pdf:PDF},
publisher = {Institute of Electrical and Electronics Engineers ({IEEE})},
}
@Article{Ghelardoni2013,
author = {Luca Ghelardoni and Alessandro Ghio and Davide Anguita},
title = {Energy Load Forecasting Using Empirical Mode Decomposition and Support Vector Regression},
journal = {{IEEE} Transactions on Smart Grid},
year = {2013},
volume = {4},
number = {1},
month = {mar},
pages = {549--556},
doi = {10.1109/tsg.2012.2235089},
file = {:papers/Energy Load Forecasting Using Empirical Mode Decomposition and Support Vector Regression.pdf:PDF},
publisher = {Institute of Electrical and Electronics Engineers ({IEEE})},
}
@InProceedings{Shen1996TheEM,
author = {Zheng Shen and Quanan Zheng},
title = {The empirical mode decomposition and the Hilbert spectrum for nonlinear and non ‐ stationary time series analysis},
year = {1996},
file = {:papers/The empirical mode decomposition and the Hilbert spectrum for nonlinear and non ‐ stationary time series analysis.pdf:PDF},
}
@Article{Ding2016,
author = {Ni Ding and Clementine Benoit and Guillaume Foggia and Yvon Besanger and Frederic Wurtz},
title = {Neural Network-Based Model Design for Short-Term Load Forecast in Distribution Systems},
journal = {{IEEE} Transactions on Power Systems},
year = {2016},
volume = {31},
number = {1},
month = {jan},
pages = {72--81},
doi = {10.1109/tpwrs.2015.2390132},
file = {:papers/Neural Network-Based Model Design for Short-Term Load Forecast in Distribution Systems.pdf:PDF},
publisher = {Institute of Electrical and Electronics Engineers ({IEEE})},
}
@Article{Hornik1989MultilayerFN,
author = {Kurt Hornik and Maxwell B. Stinchcombe and Halbert White},
title = {Multilayer feedforward networks are universal approximators},
journal = {Neural Networks},
year = {1989},
volume = {2},
pages = {359-366},
file = {:papers/Multilayer feedforward networks are universal approximators.pdf:PDF},
}
@Article{Kamaev2012,
author = {Kamaev, V. A. and Shcherbakov, M. V. and Panchenko, D. P. and Shcherbakova, N. L. and Brebels, A.},
title = {Using connectionist systems for electric energy consumption forecasting in shopping centers},
journal = {Automation and Remote Control},
year = {2012},
volume = {73},
number = {6},
month = {Jun},
pages = {1075--1084},
issn = {1608-3032},
doi = {10.1134/S0005117912060124},
url = {https://doi.org/10.1134/S0005117912060124},
abstract = {A solution is presented for the short-term electrical energy forecasting in shopping centers located in the Netherlands and Belgium. A forecasting method is proposed on the basis of connectionist systems. General description of the forecasting method is provided, as well as its specific features with respect to the forecasting problem are studied. Several connectionist models are generalized, stated and applied, notably, moving average model, linear regression model, and neural network model. In addition, changes in forecasting quality are demonstrated depending on different input variables. The results of using these connectionist models are discussed, and conclusions regarding specific features of every model are outlined.},
day = {01},
file = {:papers/Using connectionist systems for electric energy consumption forecasting in shopping centers.pdf:PDF},
}
@Article{Hernandez2014,
author = {Luis Hern{\'{a}}ndez and Carlos Baladr{\'{o}}n and Javier M. Aguiar and Bel{\'{e}}n Carro and Antonio S{\'{a}}nchez-Esguevillas and Jaime Lloret},
title = {Artificial neural networks for short-term load forecasting in microgrids environment},
journal = {Energy},
year = {2014},
volume = {75},
month = {oct},
pages = {252--264},
doi = {10.1016/j.energy.2014.07.065},
file = {:papers/Artificial neural networks for short-term load forecasting in microgrids environment.pdf:PDF},
publisher = {Elsevier {BV}},
}
@Article{Sun2016,
author = {Xiaorong Sun and Peter B. Luh and Kwok W. Cheung and Wei Guan and Laurent D. Michel and S. S. Venkata and Melanie T. Miller},
title = {An Efficient Approach to Short-Term Load Forecasting at the Distribution Level},
journal = {{IEEE} Transactions on Power Systems},
year = {2016},
volume = {31},
number = {4},
month = {jul},
pages = {2526--2537},
doi = {10.1109/tpwrs.2015.2489679},
file = {:papers/An Efficient Approach to Short-Term Load Forecasting at the Distribution Level.pdf:PDF},
publisher = {Institute of Electrical and Electronics Engineers ({IEEE})},
}
@Article{Arora2013,
author = {Siddharth Arora and James W. Taylor},
title = {Short-Term Forecasting of Anomalous Load Using Rule-Based Triple Seasonal Methods},
journal = {{IEEE} Transactions on Power Systems},
year = {2013},
volume = {28},
number = {3},
month = {aug},
pages = {3235--3242},
doi = {10.1109/tpwrs.2013.2252929},
file = {:papers/Short-term Forecasting of Anomalous Load Using Rule-based Triple Seasonal Methods.pdf:PDF},
publisher = {Institute of Electrical and Electronics Engineers ({IEEE})},
}
@Article{Chung2014,
author = {Chung, Junyoung and Gulcehre, Caglar and Cho, KyungHyun and Bengio, Y},
title = {Empirical Evaluation of Gated Recurrent Neural Networks on Sequence Modeling},
year = {2014},
month = {12},
file = {:papers/Empirical Evaluation of Gated Recurrent Neural Networks on Sequence Modeling.pdf:PDF},
}
@InProceedings{jozefowicz2015empirical,
author = {Jozefowicz, Rafal and Zaremba, Wojciech and Sutskever, Ilya},
title = {An empirical exploration of recurrent network architectures},
booktitle = {International Conference on Machine Learning},
year = {2015},
pages = {2342--2350},
file = {:papers/An empirical exploration of recurrent network architectures.pdf:PDF},
}
@Article{le2015simple,
author = {Le, Quoc V and Jaitly, Navdeep and Hinton, Geoffrey E},
title = {A simple way to initialize recurrent networks of rectified linear units},
journal = {arXiv preprint arXiv:1504.00941},
year = {2015},
file = {:papers/A simple way to initialize recurrent networks of rectified linear units.pdf:PDF},
}
@InProceedings{Chiu2017,
author = {C. Chiu and T. N. Sainath and Y. Wu and R. Prabhavalkar and P. Nguyen and Z. Chen and A. Kannan and R. J. Weiss and K. Rao and E. Gonina and N. Jaitly and B. Li and J. Chorowski and M. Bacchiani},
title = {State-of-the-Art Speech Recognition with Sequence-to-Sequence Models},
booktitle = {2018 IEEE International Conference on Acoustics, Speech and Signal Processing (ICASSP)},
year = {2018},
month = {April},
pages = {4774-4778},
doi = {10.1109/ICASSP.2018.8462105},
file = {:papers/STATE-OF-THE-ART SPEECH RECOGNITION WITH SEQUENCE-TO-SEQUENCE MODELS.pdf:PDF},
issn = {2379-190X},
keywords = {decoding;recurrent neural nets;speech recognition;automatic speech recognition system;neural network;ASR systems;voice search task;listen attend and spell model;acoustic pronunciation and language model;dictation task;streaming recognition;unidirectional LSTM encoder;minimum word error rate optimization;single-head attention;multihead attention architecture;word piece models;LAS model;dictation tasks;language model components;encoder-decoder architectures;sequence-to-sequence models;Training;Hidden Markov models;Decoding;Task analysis;Optimization;Acoustics;Neural networks},
}
@InProceedings{yao2017boosting,
author = {Yao, Ting and Pan, Yingwei and Li, Yehao and Qiu, Zhaofan and Mei, Tao},
title = {Boosting image captioning with attributes},
booktitle = {IEEE International Conference on Computer Vision, ICCV},
year = {2017},
pages = {22--29},
file = {:papers/Boosting image captioning with attributes.pdf:PDF},
}
@Article{Karim2018,
author = {F. Karim and S. Majumdar and H. Darabi and S. Chen},
title = {LSTM Fully Convolutional Networks for Time Series Classification},
journal = {IEEE Access},
year = {2018},
volume = {6},
pages = {1662-1669},
issn = {2169-3536},
doi = {10.1109/ACCESS.2017.2779939},
file = {:papers/LSTM Fully Convolutional Networks for Time Series Classification.pdf:PDF},
keywords = {learning (artificial intelligence);pattern classification;recurrent neural nets;time series;data set preprocessing;LSTM cell;ALSTM-FCN;LSTM-FCN;FCNs;decision process;fully convolutional neural networks;attention long short term memory fully convolutional network;LSTM RNN;long short term memory recurrent neural network sub-modules;time series classification;Time series analysis;Recurrent neural networks;Feature extraction;Convolution;Computer architecture;Machine learning;Machine learning algorithms;Convolutional neural network;long short term memory recurrent neural network;time series classification},
}
@Article{Shi2018,
author = {H. Shi and M. Xu and R. Li},
title = {Deep Learning for Household Load Forecasting—A Novel Pooling Deep RNN},
journal = {IEEE Transactions on Smart Grid},
year = {2018},
volume = {9},
number = {5},
month = {Sept},
pages = {5271-5280},
issn = {1949-3053},
doi = {10.1109/TSG.2017.2686012},
file = {:papers/Deep Learning for Household Load Forecasting – A Novel Pooling Deep RNN.pdf:PDF},