Skip to content

Commit 92354c0

Browse files
committed
initial dev commit
Former-commit-id: 35db27a Former-commit-id: a056b11
1 parent e545679 commit 92354c0

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

53 files changed

+3085
-699
lines changed

README.md

+200-85
Large diffs are not rendered by default.

TODO.md

+6
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
= TODO =
2+
3+
1. Refuse to start if contact is not given (unless --force)
4+
3. Add parameter with a name that needs to be escaped
5+
5. Add parameter with non-uniform cadence
6+
6. Add parameters with non-uniform bin centers and/or gaps in bin ranges

bin/Example.py

+9-1
Original file line numberDiff line numberDiff line change
@@ -27,10 +27,18 @@
2727
import dateutil.parser
2828
import re
2929

30+
from signal import signal, SIGPIPE, SIG_DFL
31+
# Trap broke pipe signal so usage in the form of
32+
# python ./bin/Example.py | python lib/subset.py ...
33+
# does not throw error when subset.py terminates read
34+
# of output of Example.py.
35+
signal(SIGPIPE, SIG_DFL)
36+
3037
parser = argparse.ArgumentParser()
38+
parser.add_argument('--id',default='dataset1') # Not used
3139
parser.add_argument('--params',default='')
3240
parser.add_argument('--start',default='1970-01-01Z')
33-
parser.add_argument('--stop',default='1970-01-01T00:00:11Z')
41+
parser.add_argument('--stop',default='1971-01-01Z')
3442
parser.add_argument('--fmt',default='csv')
3543

3644
v = vars(parser.parse_args())

bin/INTERMAGNET.py

+99
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,99 @@
1+
import os
2+
import re
3+
import sys
4+
import gzip
5+
import pickle
6+
import argparse
7+
import datetime
8+
import urllib.request
9+
10+
# TODO:
11+
# * Read INTERMAGNET-info.pkl (Convert INTERMAGNET-info.json
12+
# to pkl in create_manifest.pkl)
13+
14+
server = 'ftp://ftp.seismo.nrcan.gc.ca'
15+
16+
parser = argparse.ArgumentParser()
17+
parser.add_argument('--id', default='minute/definitive/bou')
18+
parser.add_argument('--start', default='2017-12-01T00:00:00.000000000Z')
19+
parser.add_argument('--stop', default='2017-12-02T00:00:00.000000000Z')
20+
args = vars(parser.parse_args())
21+
22+
id = args['id']
23+
start = args['start']
24+
stop = args['stop']
25+
26+
def download(url, start, stop):
27+
""""""
28+
start = re.sub(r"T"," ", start[0:18])
29+
stop = re.sub(r"T"," ", stop[0:18])
30+
31+
path = os.path.dirname(os.path.realpath(sys.argv[0]))
32+
#print(path)
33+
path = os.path.join(path,'..','metadata','INTERMAGNET','tmp')
34+
path = os.path.realpath(path)
35+
if not os.path.exists(path):
36+
os.mkdirs(path)
37+
38+
filename = os.path.join(path, url.split("/")[-1])
39+
if not os.path.exists(filename):
40+
#print("Downloading " + url)
41+
try:
42+
urllib.request.urlretrieve(url, filename)
43+
except Exception as e:
44+
with open('bin/INTERMAGNET-error.log','at') as f:
45+
f.write(e + ": " + url + "\n")
46+
f.close()
47+
return
48+
49+
#print("Reading " + filename)
50+
try:
51+
if re.search(r'\.gz$', filename):
52+
with gzip.open(filename, 'rt', errors='replace') as f: lines = f.readlines()
53+
else:
54+
with open(filename, 'rt', errors='replace') as f: lines = f.readlines()
55+
except Exception as e:
56+
#print("Problem reading " + filename)
57+
with open('bin/INTERMAGNET-error.log','at') as f:
58+
f.write(e + ": " + filename + "\n")
59+
f.close()
60+
return
61+
62+
for line in lines:
63+
if re.match(r"[0-9]{4}",line):
64+
if line[0:18] >= start and line[0:18] < stop:
65+
# Make comma separated
66+
line = re.sub(r"\s+", ",", line.strip())
67+
# Replace space in 'YYYY-MM-DD HH:MM:SS.FFF' with T
68+
line = line[0:10] + "T" + line[11:23] + "Z" + line[23:]
69+
print(line)
70+
71+
path = os.path.dirname(os.path.realpath(sys.argv[0]))
72+
#print(path)
73+
fnamepkl = os.path.join(path,'..','metadata','INTERMAGNET','INTERMAGNET-manifest.pkl')
74+
fnamepkl = os.path.realpath(fnamepkl)
75+
#print(fnamepkl)
76+
77+
f = open(fnamepkl, 'rb')
78+
S = pickle.load(f)
79+
f.close()
80+
#print(S[id])
81+
#print(S[id]['dates'])
82+
83+
startdt = datetime.datetime.strptime(start[0:10], '%Y-%m-%d')
84+
stopdt = datetime.datetime.strptime(stop[0:10], '%Y-%m-%d')
85+
stepdt = datetime.timedelta(days=1)
86+
#print(startdt)
87+
#print(stopdt)
88+
# If last date is midnight, last date to look for file is before this
89+
# (stop date is exlusive)
90+
if stop[10:] == "T00:00:00.000000000Z":
91+
stopdt = stopdt - stepdt
92+
93+
while startdt <= stopdt:
94+
date = startdt.strftime('%Y%m%d')
95+
if date in S[id]['dates']:
96+
#print("File exists for " + date)
97+
#print("Downloading " + S[id]['dates'][date])
98+
download(server + S[id]['dates'][date], start, stop)
99+
startdt += stepdt

bin/QinDenton.py

+51
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,51 @@
1+
import re
2+
import os
3+
import sys
4+
import argparse
5+
from datetime import datetime
6+
7+
parser = argparse.ArgumentParser()
8+
parser.add_argument('--start',default='1964-01-01T00:00:00.000Z')
9+
parser.add_argument('--stop',default='1964-01-02T00:00:00.000Z')
10+
args = vars(parser.parse_args())
11+
12+
start, stop = args['start'], args['stop']
13+
14+
def convert(timestr):
15+
timestr = re.sub(r'(.*) 00([0-9])',r"\1 \2", timestr)
16+
timestr = re.sub(r'(.*) 0([0-9]{2})',r"\1 \2", timestr)
17+
timestr = re.sub(r'(.*) 0([0-9])',r"\1 \2", timestr)
18+
return timestr
19+
20+
# Convert from input ISO 8601 time to format used in file
21+
startp = datetime.strptime(start,"%Y-%m-%dT%H:%M:%S.%fZ").strftime(" %Y %j %H")
22+
stopp = datetime.strptime(stop,"%Y-%m-%dT%H:%M:%S.%fZ").strftime(" %Y %j %H")
23+
24+
# Strip out leading zeros as in file
25+
startp = convert(startp)
26+
stopp = convert(stopp)
27+
28+
# Save file locally if not found
29+
filename = './public/data/QinDenton/WGhour.d'
30+
if not os.path.exists('./public/data/QinDenton/WGhour.d'):
31+
import urllib.request
32+
os.makedirs('./public/data/QinDenton/')
33+
url = 'http://mag.gmu.edu/ftp/QinDenton/hour/merged/latest/WGhour-latest.d'
34+
print('QinDenton.py: Downloading %s' % url)
35+
urllib.request.urlretrieve(url, filename)
36+
37+
file = open(filename, "r")
38+
39+
# The following avoids the use of date parsing by using fact that
40+
# ASCII values for time in the file are monotonically increasing
41+
# so that >= and < can be used to find start and stop times.
42+
n = 0
43+
for line in file:
44+
timestr = line[0:24]
45+
if n > 0 and timestr >= startp and timestr < stopp:
46+
data = re.sub(r"\s+", ",", line[14:-1].strip())
47+
sys.stdout.write('%s-%03dT%02dZ,%s\n' % (line[1:5],int(line[6:9]),int(line[10:12]),data))
48+
if n > 0 and timestr >= stopp:
49+
sys.stdout.flush()
50+
break
51+
n = n + 1

bin/TestData.js

+8-13
Original file line numberDiff line numberDiff line change
@@ -30,17 +30,6 @@ if (stop.length == 8 || stop.length == 10) { // YYYY-DOY
3030
stop = stop + "T00:00:00.000";
3131
}
3232

33-
if (start === "1970-01-01T00:00:10.000" && stop === "1970-01-01T00:00:20.000") {
34-
// For testing verifier for intervals with no data.
35-
// If request in this time range, return zero bytes for single parameter request
36-
// and data for all parameter request.
37-
if (!all && id === 'dataset0') {
38-
process.exit(0); // Exit if id=dataset0 and not all parameters requested.
39-
} else {
40-
process.exit(0);
41-
}
42-
}
43-
4433
var startsec = moment(start+"Z").valueOf()/1000;
4534
var stopsec = moment(stop+"Z").valueOf()/1000;
4635

@@ -164,8 +153,13 @@ for (var i = startsec; i < stopsec; i++) {
164153
record = record.replace(/,/g,", "); // Make dataset0 use space after comma.
165154
}
166155

156+
if (i > 9 && i < 20) {
157+
record = "";
158+
}
159+
167160
if (records.length > 0) {
168-
records = records + "\n" + record;
161+
if (record.length > 0)
162+
records = records + "\n" + record;
169163
} else {
170164
records = record;
171165
}
@@ -176,7 +170,8 @@ for (var i = startsec; i < stopsec; i++) {
176170

177171
if (flush) {
178172
if (id !== "dataset0") {
179-
console.log(records); // Correct way.
173+
if (records.length > 0)
174+
console.log(records); // Correct way.
180175
} else {
181176
// Make time non-monotonic for dataset0.
182177
records = records.split("\n");

bin/autoplot.jar.REMOVED.git-id

+1-1
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
cec3c7d3baed08c28382da19b1397917265e6edf
1+
d6774f3e54ed4ff694923829b6d2b43dddd8f1b1

conf/Dockerfile

+7
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
FROM node:7-alpine
2+
WORKDIR /app
3+
COPY package.json /app
4+
RUN npm install
5+
COPY . /app
6+
CMD node index.js
7+
EXPOSE 8081

conf/docker.txt

+7
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
https://nodejs.org/en/docs/guides/nodejs-docker-webapp/
2+
https://semaphoreci.com/community/tutorials/dockerizing-a-node-js-web-application
3+
https://docs.docker.com/docker-hub/builds/#prerequisites
4+
https://hub.docker.com/r/rweigel/hapi-reference-server/
5+
6+
https://buddy.works/guides/how-dockerize-node-application
7+
https://medium.com/datreeio/node-js-docker-workflow-b9d936c931e1

0 commit comments

Comments
 (0)