Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
D
df-aggregator
Overview
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Oleksandr Barabash
df-aggregator
Commits
7b6dda83
Commit
7b6dda83
authored
Dec 17, 2020
by
Corey Koval
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Somehow made things worse. Yolo
parent
ca43226b
Show whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
215 additions
and
132 deletions
+215
-132
df-aggregator.py
df-aggregator.py
+213
-132
style.css
static/style.css
+2
-0
No files found.
df-aggregator.py
View file @
7b6dda83
...
@@ -8,7 +8,6 @@ import sqlite3
...
@@ -8,7 +8,6 @@ import sqlite3
import
threading
import
threading
import
signal
import
signal
import
json
import
json
# import hashlib
from
colorsys
import
hsv_to_rgb
from
colorsys
import
hsv_to_rgb
from
optparse
import
OptionParser
from
optparse
import
OptionParser
from
os
import
system
,
name
,
kill
,
getpid
from
os
import
system
,
name
,
kill
,
getpid
...
@@ -18,7 +17,19 @@ from sklearn.preprocessing import StandardScaler, minmax_scale
...
@@ -18,7 +17,19 @@ from sklearn.preprocessing import StandardScaler, minmax_scale
from
geojson
import
Point
,
MultiPoint
,
Feature
,
FeatureCollection
from
geojson
import
Point
,
MultiPoint
,
Feature
,
FeatureCollection
from
czml3
import
Packet
,
Document
,
Preamble
from
czml3
import
Packet
,
Document
,
Preamble
from
czml3.properties
import
Position
,
Polyline
,
PolylineOutlineMaterial
,
Color
,
Material
from
czml3.properties
import
Position
,
Polyline
,
PolylineOutlineMaterial
,
Color
,
Material
from
bottle
import
route
,
run
,
request
,
get
,
post
,
put
,
response
,
redirect
,
template
,
static_file
import
warnings
with
warnings
.
catch_warnings
():
warnings
.
simplefilter
(
"ignore"
)
from
bottle
import
route
,
run
,
request
,
get
,
post
,
put
,
response
,
redirect
,
template
,
static_file
#
# warnings.filterwarnings(action="ignore", message="unclosed", category=ResourceWarning)
from
multiprocessing
import
Process
,
Queue
DBSCAN_Q
=
Queue
()
DATABASE_EDIT_Q
=
Queue
()
DATABASE_RETURN
=
Queue
()
import
sys
import
sys
...
@@ -191,28 +202,28 @@ def plot_intersects(lat_a, lon_a, doa_a, lat_b, lon_b, doa_b, max_distance = 100
...
@@ -191,28 +202,28 @@ def plot_intersects(lat_a, lon_a, doa_a, lat_b, lon_b, doa_b, max_distance = 100
else
:
else
:
return
None
return
None
def
do_dbscan
(
X
):
# print("Starting process")
db
=
DBSCAN
(
eps
=
ms
.
eps
,
min_samples
=
ms
.
min_samp
)
.
fit
(
X
)
DBSCAN_Q
.
put
(
db
.
labels_
)
# print("Finished Process")
###############################################
###############################################
# Computes DBSCAN Alorithm is applicable,
# Computes DBSCAN Alorithm is applicable,
# finds the mean of a cluster of intersections.
# finds the mean of a cluster of intersections.
###############################################
###############################################
def
process_data
(
database_name
,
outfile
):
def
process_data
(
database_name
):
conn
=
sqlite3
.
connect
(
database_name
)
conn
=
sqlite3
.
connect
(
database_name
)
c
=
conn
.
cursor
()
c
=
conn
.
cursor
()
intersect_list
=
[]
intersect_list
=
[]
try
:
try
:
# c.execute("SELECT COUNT(*) FROM intersects")
c
.
execute
(
"SELECT longitude, latitude, time FROM intersects ORDER BY confidence LIMIT 50000"
)
# n_intersects = int(c.fetchone()[0])
# 200000 eats 22GB of RAM
# 187500 eats 13GB of RAM
# 175000 eats 1661 MB of RAM
# 150000 eats 976 MB of RAM
# 140000 eats 920 MB of RAM
c
.
execute
(
"SELECT longitude, latitude, time FROM intersects LIMIT 120000"
)
intersect_array
=
np
.
array
(
c
.
fetchall
())
intersect_array
=
np
.
array
(
c
.
fetchall
())
except
sqlite3
.
OperationalError
:
except
sqlite3
.
OperationalError
:
n_intersects
=
0
n_intersects
=
0
intersect_array
=
np
.
array
([])
intersect_array
=
np
.
array
([])
conn
.
close
()
likely_location
=
[]
likely_location
=
[]
# weighted_location = []
# weighted_location = []
ellipsedata
=
[]
ellipsedata
=
[]
...
@@ -221,7 +232,7 @@ def process_data(database_name, outfile):
...
@@ -221,7 +232,7 @@ def process_data(database_name, outfile):
if
intersect_array
.
size
!=
0
:
if
intersect_array
.
size
!=
0
:
if
ms
.
eps
>
0
:
if
ms
.
eps
>
0
:
X
=
StandardScaler
()
.
fit_transform
(
intersect_array
[:,
0
:
2
:
])
X
=
StandardScaler
()
.
fit_transform
(
intersect_array
[:,
0
:
2
])
# X = np.radians(intersect_array[:,0:2])
# X = np.radians(intersect_array[:,0:2])
n_points
=
len
(
X
)
n_points
=
len
(
X
)
...
@@ -236,11 +247,15 @@ def process_data(database_name, outfile):
...
@@ -236,11 +247,15 @@ def process_data(database_name, outfile):
starttime
=
time
.
time
()
starttime
=
time
.
time
()
# , algorithm='ball_tree', metric='haversine'
# , algorithm='ball_tree', metric='haversine'
db
=
DBSCAN
(
eps
=
ms
.
eps
,
min_samples
=
ms
.
min_samp
)
.
fit
(
X
)
db
=
Process
(
target
=
do_dbscan
,
args
=
(
X
,))
db
.
start
()
labels
=
DBSCAN_Q
.
get
()
db
.
join
()
# db = DBSCAN(eps=ms.eps, min_samples=ms.min_samp).fit(X)
stoptime
=
time
.
time
()
stoptime
=
time
.
time
()
print
(
f
"DBSCAN took {stoptime - starttime} seconds to compute the clusters."
)
print
(
f
"DBSCAN took {stoptime - starttime} seconds to compute the clusters."
)
labels
=
db
.
labels_
#
labels = db.labels_
# del db
intersect_array
=
np
.
column_stack
((
intersect_array
,
labels
))
intersect_array
=
np
.
column_stack
((
intersect_array
,
labels
))
# Number of clusters in labels, ignoring noise if present.
# Number of clusters in labels, ignoring noise if present.
...
@@ -267,10 +282,8 @@ def process_data(database_name, outfile):
...
@@ -267,10 +282,8 @@ def process_data(database_name, outfile):
pearson
=
b
/
np
.
sqrt
(
a
*
c
)
pearson
=
b
/
np
.
sqrt
(
a
*
c
)
ell_radius_x
=
np
.
sqrt
(
1
+
pearson
)
*
np
.
sqrt
(
a
)
*
n_std
ell_radius_x
=
np
.
sqrt
(
1
+
pearson
)
*
np
.
sqrt
(
a
)
*
n_std
ell_radius_y
=
np
.
sqrt
(
1
-
pearson
)
*
np
.
sqrt
(
c
)
*
n_std
ell_radius_y
=
np
.
sqrt
(
1
-
pearson
)
*
np
.
sqrt
(
c
)
*
n_std
axis_x
=
v
.
inverse
(
Reverse
(
clustermean
.
tolist
()),
(
ell_radius_x
+
clustermean
[
1
],
clustermean
[
0
]))[
0
]
axis_x
=
v
.
inverse
(
clustermean
.
tolist
()[::
-
1
],
(
ell_radius_x
+
clustermean
[
1
],
clustermean
[
0
]))[
0
]
axis_y
=
v
.
inverse
(
Reverse
(
clustermean
.
tolist
()),
(
clustermean
[
1
],
ell_radius_y
+
clustermean
[
0
]))[
0
]
axis_y
=
v
.
inverse
(
clustermean
.
tolist
()[::
-
1
],
(
clustermean
[
1
],
ell_radius_y
+
clustermean
[
0
]))[
0
]
# axis_x = v.inverse(clustermean.tolist(), (ell_radius_x + clustermean[1], clustermean[0]))[0]
# axis_y = v.inverse(clustermean.tolist(), (clustermean[1], ell_radius_y + clustermean[0]))[0]
if
b
==
0
and
a
>=
c
:
if
b
==
0
and
a
>=
c
:
rotation
=
0
rotation
=
0
...
@@ -282,10 +295,11 @@ def process_data(database_name, outfile):
...
@@ -282,10 +295,11 @@ def process_data(database_name, outfile):
ellipsedata
.
append
([
axis_x
,
axis_y
,
rotation
,
*
clustermean
.
tolist
()])
ellipsedata
.
append
([
axis_x
,
axis_y
,
rotation
,
*
clustermean
.
tolist
()])
for
x
in
likely_location
:
for
x
in
likely_location
:
print
(
Reverse
(
x
))
print
(
x
[::
-
1
])
# else:
# likely_location = None
##########################
# OPTOMIZE ME
##########################
for
x
in
intersect_array
:
for
x
in
intersect_array
:
try
:
try
:
if
x
[
-
1
]
>=
0
:
if
x
[
-
1
]
>=
0
:
...
@@ -293,7 +307,6 @@ def process_data(database_name, outfile):
...
@@ -293,7 +307,6 @@ def process_data(database_name, outfile):
except
IndexError
:
except
IndexError
:
intersect_list
.
append
(
x
.
tolist
())
intersect_list
.
append
(
x
.
tolist
())
else
:
else
:
print
(
"No Intersections."
)
print
(
"No Intersections."
)
# return None
# return None
...
@@ -312,18 +325,22 @@ def purge_database(type, lat, lon, radius):
...
@@ -312,18 +325,22 @@ def purge_database(type, lat, lon, radius):
intersect_list
=
c
.
fetchall
()
intersect_list
=
c
.
fetchall
()
except
sqlite3
.
OperationalError
:
except
sqlite3
.
OperationalError
:
intersect_list
=
[]
intersect_list
=
[]
conn
.
close
()
purge_count
=
0
purge_count
=
0
for
x
in
intersect_list
:
for
x
in
intersect_list
:
if
type
==
"exclusion"
:
if
type
==
"exclusion"
:
distance
=
v
.
inverse
(
x
,
(
lat
,
lon
))[
0
]
distance
=
v
.
inverse
(
x
,
(
lat
,
lon
))[
0
]
if
distance
<
radius
:
if
distance
<
radius
:
c
.
execute
(
"DELETE FROM intersects WHERE latitude=? AND longitude=?"
,
x
)
# c.execute("DELETE FROM intersects WHERE latitude=? AND longitude=?", x)
command
=
"DELETE FROM intersects WHERE latitude=? AND longitude=?"
DATABASE_EDIT_Q
.
put
((
command
,
x
))
DATABASE_RETURN
.
get
(
timeout
=
1
)
purge_count
+=
1
purge_count
+=
1
elif
type
==
"aoi"
:
elif
type
==
"aoi"
:
pass
pass
conn
.
commit
()
#
conn.commit()
conn
.
close
(
)
DATABASE_EDIT_Q
.
put
((
"done"
,
None
)
)
print
(
f
"I purged {purge_count} intersects."
)
print
(
f
"I purged {purge_count} intersects."
)
###############################################
###############################################
...
@@ -338,7 +355,7 @@ def write_geojson(best_point, all_the_points):
...
@@ -338,7 +355,7 @@ def write_geojson(best_point, all_the_points):
if
best_point
!=
None
:
if
best_point
!=
None
:
reversed_best_point
=
[]
reversed_best_point
=
[]
for
x
in
best_point
:
for
x
in
best_point
:
reversed_best_point
.
append
(
Reverse
(
x
)
)
reversed_best_point
.
append
(
x
[::
-
1
]
)
best_point
=
Feature
(
properties
=
best_pt_style
,
geometry
=
MultiPoint
(
tuple
(
reversed_best_point
)))
best_point
=
Feature
(
properties
=
best_pt_style
,
geometry
=
MultiPoint
(
tuple
(
reversed_best_point
)))
file1
.
write
(
str
(
FeatureCollection
([
best_point
,
all_the_points
])))
file1
.
write
(
str
(
FeatureCollection
([
best_point
,
all_the_points
])))
else
:
else
:
...
@@ -389,7 +406,8 @@ def write_czml(best_point, all_the_points, ellipsedata):
...
@@ -389,7 +406,8 @@ def write_czml(best_point, all_the_points, ellipsedata):
scaled_time
=
minmax_scale
(
all_the_points
[:,
-
1
])
scaled_time
=
minmax_scale
(
all_the_points
[:,
-
1
])
all_the_points
=
np
.
column_stack
((
all_the_points
,
scaled_time
))
all_the_points
=
np
.
column_stack
((
all_the_points
,
scaled_time
))
for
x
in
all_the_points
:
for
x
in
all_the_points
:
rgb
=
hsvtorgb
(
x
[
-
1
]
/
3
,
0.9
,
0.9
)
# rgb = hsvtorgb(x[-1]/3, 0.9, 0.9)
rgb
=
map
(
lambda
x
:
int
(
x
*
255
),
hsv_to_rgb
(
x
[
-
1
]
/
3
,
0.9
,
0.9
))
color_property
=
{
"color"
:{
"rgba"
:
[
*
rgb
,
255
]}}
color_property
=
{
"color"
:{
"rgba"
:
[
*
rgb
,
255
]}}
all_point_packets
.
append
(
Packet
(
id
=
str
(
x
[
1
])
+
", "
+
str
(
x
[
0
]),
all_point_packets
.
append
(
Packet
(
id
=
str
(
x
[
1
])
+
", "
+
str
(
x
[
0
]),
point
=
{
**
point_properties
,
**
color_property
},
point
=
{
**
point_properties
,
**
color_property
},
...
@@ -400,7 +418,7 @@ def write_czml(best_point, all_the_points, ellipsedata):
...
@@ -400,7 +418,7 @@ def write_czml(best_point, all_the_points, ellipsedata):
for
x
in
best_point
:
for
x
in
best_point
:
best_point_packets
.
append
(
Packet
(
id
=
str
(
x
[
0
])
+
", "
+
str
(
x
[
1
]),
best_point_packets
.
append
(
Packet
(
id
=
str
(
x
[
0
])
+
", "
+
str
(
x
[
1
]),
point
=
best_point_properties
,
point
=
best_point_properties
,
position
=
{
"cartographicDegrees"
:
[
x
[
1
],
x
[
0
],
15
]}))
position
=
{
"cartographicDegrees"
:
[
x
[
0
],
x
[
1
],
15
]}))
if
len
(
ellipsedata
)
>
0
:
if
len
(
ellipsedata
)
>
0
:
for
x
in
ellipsedata
:
for
x
in
ellipsedata
:
...
@@ -528,36 +546,6 @@ def write_rx_czml():
...
@@ -528,36 +546,6 @@ def write_rx_czml():
return
output
return
output
###############################################
###############################################
# Converts HSV color values to RGB.
###############################################
def
hsvtorgb
(
h
,
s
,
v
):
rgb_out
=
[]
rgb
=
hsv_to_rgb
(
h
,
s
,
v
)
for
x
in
rgb
:
rgb_out
.
append
(
int
(
x
*
255
))
return
rgb_out
###############################################
# Thangs to do before closing the program.
###############################################
def
finish
():
clear
(
debugging
)
print
(
"Processing, please wait."
)
ms
.
receiving
=
False
update_rx_table
()
if
geofile
!=
None
:
write_geojson
(
*
process_data
(
database_name
,
geofile
)[:
2
])
kill
(
getpid
(),
signal
.
SIGTERM
)
###############################################
# Returns a reverse ordered list.
# This should probably be replaced.
###############################################
def
Reverse
(
lst
):
lst
.
reverse
()
return
lst
###############################################
# CLears the screen if debugging is off.
# CLears the screen if debugging is off.
###############################################
###############################################
def
clear
(
debugging
):
def
clear
(
debugging
):
...
@@ -569,7 +557,6 @@ def clear(debugging):
...
@@ -569,7 +557,6 @@ def clear(debugging):
else
:
else
:
_
=
system
(
'clear'
)
_
=
system
(
'clear'
)
###############################################
###############################################
# Serves static files such as CSS and JS to the
# Serves static files such as CSS and JS to the
# WebUI
# WebUI
...
@@ -647,7 +634,7 @@ def rx_params():
...
@@ -647,7 +634,7 @@ def rx_params():
###############################################
###############################################
@get
(
'/output.czml'
)
@get
(
'/output.czml'
)
def
tx_czml_out
():
def
tx_czml_out
():
output
=
write_czml
(
*
process_data
(
database_name
,
geofile
))
output
=
write_czml
(
*
process_data
(
database_name
))
return
str
(
output
)
return
str
(
output
)
###############################################
###############################################
...
@@ -724,11 +711,16 @@ def handle_interest_areas(action):
...
@@ -724,11 +711,16 @@ def handle_interest_areas(action):
radius
=
data
[
'radius'
]
radius
=
data
[
'radius'
]
add_aoi
(
aoi_type
,
lat
,
lon
,
radius
)
add_aoi
(
aoi_type
,
lat
,
lon
,
radius
)
elif
action
==
"del"
:
elif
action
==
"del"
:
conn
=
sqlite3
.
connect
(
database_name
)
# conn = sqlite3.connect(database_name)
c
=
conn
.
cursor
()
# c = conn.cursor()
c
.
execute
(
"DELETE FROM interest_areas WHERE uid=?"
,
[
data
[
'uid'
]])
# c.execute("DELETE FROM interest_areas WHERE uid=?", [data['uid']])
conn
.
commit
()
to_table
=
data
[
'uid'
]
conn
.
close
()
command
=
"DELETE FROM interest_areas WHERE uid=?"
DATABASE_EDIT_Q
.
put
((
command
,
to_table
))
DATABASE_RETURN
.
get
(
timeout
=
1
)
DATABASE_EDIT_Q
.
put
((
"done"
,
None
))
# conn.commit()
# conn.close()
elif
action
==
"purge"
:
elif
action
==
"purge"
:
conn
=
sqlite3
.
connect
(
database_name
)
conn
=
sqlite3
.
connect
(
database_name
)
c
=
conn
.
cursor
()
c
=
conn
.
cursor
()
...
@@ -754,10 +746,10 @@ def run_receiver(receivers):
...
@@ -754,10 +746,10 @@ def run_receiver(receivers):
conn
=
sqlite3
.
connect
(
database_name
)
conn
=
sqlite3
.
connect
(
database_name
)
c
=
conn
.
cursor
()
c
=
conn
.
cursor
()
c
.
execute
(
'''CREATE TABLE IF NOT EXISTS intersects (time INTEGER, latitude REAL,
#
c.execute('''CREATE TABLE IF NOT EXISTS intersects (time INTEGER, latitude REAL,
longitude REAL, num_parents
INTEGER)'''
)
# longitude REAL, num_parents INTEGER, confidence INTEGER, aoi_id
INTEGER)''')
c
.
execute
(
'''CREATE TABLE IF NOT EXISTS lobs (time INTEGER, station_id TEXT,
#
c.execute('''CREATE TABLE IF NOT EXISTS lobs (time INTEGER, station_id TEXT,
latitude REAL, longitude REAL, confidence INTEGER, lob REAL)'''
)
#
latitude REAL, longitude REAL, confidence INTEGER, lob REAL)''')
while
ms
.
receiving
:
while
ms
.
receiving
:
if
not
debugging
:
if
not
debugging
:
...
@@ -765,7 +757,7 @@ def run_receiver(receivers):
...
@@ -765,7 +757,7 @@ def run_receiver(receivers):
print
(
"Press Control+C to process data and exit."
)
print
(
"Press Control+C to process data and exit."
)
# Main loop to compute intersections between multiple receivers
# Main loop to compute intersections between multiple receivers
intersect_list
=
np
.
array
([])
.
reshape
(
0
,
3
)
intersect_list
=
np
.
array
([])
.
reshape
(
0
,
4
)
for
x
in
range
(
len
(
receivers
)):
for
x
in
range
(
len
(
receivers
)):
for
y
in
range
(
x
):
for
y
in
range
(
x
):
if
x
!=
y
:
if
x
!=
y
:
...
@@ -779,20 +771,24 @@ def run_receiver(receivers):
...
@@ -779,20 +771,24 @@ def run_receiver(receivers):
receivers
[
x
]
.
doa
,
receivers
[
y
]
.
latitude
,
receivers
[
y
]
.
longitude
,
receivers
[
y
]
.
doa
)
receivers
[
x
]
.
doa
,
receivers
[
y
]
.
latitude
,
receivers
[
y
]
.
longitude
,
receivers
[
y
]
.
doa
)
print
(
intersection
)
print
(
intersection
)
if
intersection
:
if
intersection
:
if
check_aoi
(
*
intersection
):
keep
,
in_aoi
=
check_aoi
(
*
intersection
)
if
keep
:
intersection
=
list
(
intersection
)
intersection
=
list
(
intersection
)
avg_conf
=
np
.
mean
([
receivers
[
x
]
.
confidence
,
receivers
[
y
]
.
confidence
])
avg_conf
=
np
.
mean
([
receivers
[
x
]
.
confidence
,
receivers
[
y
]
.
confidence
])
intersection
.
append
(
avg_conf
)
intersection
.
append
(
avg_conf
)
intersection
.
append
(
in_aoi
)
intersection
=
np
.
array
([
intersection
])
intersection
=
np
.
array
([
intersection
])
if
intersection
.
any
()
!=
None
:
if
intersection
.
any
()
!=
None
:
intersect_list
=
np
.
concatenate
((
intersect_list
,
intersection
),
axis
=
0
)
intersect_list
=
np
.
concatenate
((
intersect_list
,
intersection
),
axis
=
0
)
if
intersect_list
.
size
!=
0
:
if
intersect_list
.
size
!=
0
:
avg_coord
=
np
.
average
(
intersect_list
[:,
0
:
2
],
weights
=
intersect_list
[:,
-
1
],
axis
=
0
)
avg_coord
=
np
.
average
(
intersect_list
[:,
0
:
3
],
weights
=
intersect_list
[:,
2
],
axis
=
0
)
to_table
=
[
receivers
[
x
]
.
doa_time
,
avg_coord
[
0
],
avg_coord
[
1
],
len
(
intersect_list
)]
to_table
=
[
receivers
[
x
]
.
doa_time
,
avg_coord
[
0
],
avg_coord
[
1
],
len
(
intersect_list
),
avg_coord
[
2
],
intersect_list
[:,
3
]]
c
.
execute
(
"INSERT INTO intersects VALUES (?,?,?,?)"
,
to_table
)
# c.execute("INSERT INTO intersects VALUES (?,?,?,?,?,?)", to_table)
# conn.commit()
command
=
"INSERT INTO intersects VALUES (?,?,?,?,?,?)"
DATABASE_EDIT_Q
.
put
((
command
,
to_table
))
DATABASE_RETURN
.
get
(
timeout
=
1
)
# Loop to compute intersections for a single receiver and update all receivers
# Loop to compute intersections for a single receiver and update all receivers
for
rx
in
receivers
:
for
rx
in
receivers
:
...
@@ -821,16 +817,29 @@ def run_receiver(receivers):
...
@@ -821,16 +817,29 @@ def run_receiver(receivers):
min_diversity
=
500
min_diversity
=
500
if
(
spacial_diversity
>
min_diversity
and
if
(
spacial_diversity
>
min_diversity
and
abs
(
doa_rxa
-
doa_rxb
)
>
5
):
abs
(
doa_rxa
-
doa_rxb
)
>
5
):
intersection
=
compute_single_intersections
(
lat_rxa
,
lon_rxa
,
doa_rxa
,
conf
_rxa
,
intersection
=
plot_intersects
(
lat_rxa
,
lon
_rxa
,
lat_rxb
,
lon_rxb
,
doa_rxb
,
conf
_rxb
)
doa_rxa
,
lat_rxb
,
lon_rxb
,
doa
_rxb
)
if
intersection
:
if
intersection
:
if
check_aoi
(
*
intersection
[
0
:
2
]):
intersection
=
list
(
intersection
)
avg_conf
=
np
.
mean
([
conf_rxa
,
conf_rxb
])
intersection
.
append
(
avg_conf
)
# if intersection:
keep
,
in_aoi
=
check_aoi
(
*
intersection
[
0
:
2
])
if
keep
:
print
(
intersection
)
print
(
intersection
)
to_table
=
[
current_time
,
intersection
[
0
],
intersection
[
1
],
1
]
to_table
=
[
current_time
,
intersection
[
0
],
intersection
[
1
],
1
,
intersection
[
2
],
in_aoi
]
c
.
execute
(
"INSERT INTO intersects VALUES (?,?,?,?)"
,
to_table
)
# c.execute("INSERT INTO intersects VALUES (?,?,?,?,?,?)", to_table)
command
=
"INSERT INTO intersects VALUES (?,?,?,?,?,?)"
DATABASE_EDIT_Q
.
put
((
command
,
to_table
))
DATABASE_RETURN
.
get
(
timeout
=
1
)
c
.
execute
(
f
"INSERT INTO lobs VALUES (?,?,?,?,?,?)"
,
current_doa
)
# c.execute(f"INSERT INTO lobs VALUES (?,?,?,?,?,?)", current_doa)
conn
.
commit
()
command
=
"INSERT INTO lobs VALUES (?,?,?,?,?,?)"
DATABASE_EDIT_Q
.
put
((
command
,
current_doa
))
DATABASE_RETURN
.
get
(
timeout
=
1
)
# conn.commit()
DATABASE_EDIT_Q
.
put
((
"done"
,
None
))
try
:
try
:
if
rx
.
isActive
:
rx
.
update
()
if
rx
.
isActive
:
rx
.
update
()
except
IOError
:
except
IOError
:
...
@@ -851,6 +860,7 @@ def run_receiver(receivers):
...
@@ -851,6 +860,7 @@ def run_receiver(receivers):
###############################################
###############################################
def
check_aoi
(
lat
,
lon
):
def
check_aoi
(
lat
,
lon
):
keep_list
=
[]
keep_list
=
[]
in_aoi
=
None
conn
=
sqlite3
.
connect
(
database_name
)
conn
=
sqlite3
.
connect
(
database_name
)
c
=
conn
.
cursor
()
c
=
conn
.
cursor
()
try
:
try
:
...
@@ -861,6 +871,7 @@ def check_aoi(lat, lon):
...
@@ -861,6 +871,7 @@ def check_aoi(lat, lon):
conn
.
close
()
conn
.
close
()
if
n_aoi
==
0
:
if
n_aoi
==
0
:
keep_list
.
append
(
True
)
keep_list
.
append
(
True
)
in_aoi
=
-
1
for
x
in
fetch_aoi_data
():
for
x
in
fetch_aoi_data
():
aoi
=
{
aoi
=
{
'uid'
:
x
[
0
],
'uid'
:
x
[
0
],
...
@@ -877,27 +888,12 @@ def check_aoi(lat, lon):
...
@@ -877,27 +888,12 @@ def check_aoi(lat, lon):
elif
aoi
[
'aoi_type'
]
==
"aoi"
:
elif
aoi
[
'aoi_type'
]
==
"aoi"
:
if
distance
<
aoi
[
'radius'
]:
if
distance
<
aoi
[
'radius'
]:
keep_list
.
append
(
True
)
keep_list
.
append
(
True
)
in_aoi
=
aoi
[
'uid'
]
else
:
else
:
keep_list
.
append
(
False
)
keep_list
.
append
(
False
)
keep
=
any
(
keep_list
)
keep
=
any
(
keep_list
)
return
keep
return
keep
,
in_aoi
#################################################
# Compute the intersection of two LOBS from
# a single mobile receiver
#################################################
def
compute_single_intersections
(
lat_rxa
,
lon_rxa
,
doa_rxa
,
conf_rxa
,
lat_rxb
,
lon_rxb
,
doa_rxb
,
conf_rxb
):
intersection
=
plot_intersects
(
lat_rxa
,
lon_rxa
,
doa_rxa
,
lat_rxb
,
lon_rxb
,
doa_rxb
)
# print(type(intersection))
if
intersection
:
intersection
=
list
(
intersection
)
avg_conf
=
np
.
mean
([
conf_rxa
,
conf_rxb
])
intersection
.
append
(
avg_conf
)
return
intersection
###############################################
###############################################
# Adds a new receiver to the program, saves it
# Adds a new receiver to the program, saves it
...
@@ -906,25 +902,30 @@ def compute_single_intersections(lat_rxa, lon_rxa, doa_rxa, conf_rxa,
...
@@ -906,25 +902,30 @@ def compute_single_intersections(lat_rxa, lon_rxa, doa_rxa, conf_rxa,
def
add_receiver
(
receiver_url
):
def
add_receiver
(
receiver_url
):
conn
=
sqlite3
.
connect
(
database_name
)
conn
=
sqlite3
.
connect
(
database_name
)
c
=
conn
.
cursor
()
c
=
conn
.
cursor
()
c
.
execute
(
'''CREATE TABLE IF NOT EXISTS receivers (
#
c.execute('''CREATE TABLE IF NOT EXISTS receivers (
station_id TEXT UNIQUE,
#
station_id TEXT UNIQUE,
station_url TEXT,
#
station_url TEXT,
isAuto INTEGER,
#
isAuto INTEGER,
isMobile INTEGER,
#
isMobile INTEGER,
isSingle INTEGER,
#
isSingle INTEGER,
latitude REAL,
#
latitude REAL,
longitude REAL)
#
longitude REAL)
'''
)
#
''')
try
:
try
:
if
any
(
x
.
station_url
==
receiver_url
for
x
in
receivers
):
if
any
(
x
.
station_url
==
receiver_url
for
x
in
receivers
):
print
(
"Duplicate receiver, ignoring."
)
print
(
"Duplicate receiver, ignoring."
)
else
:
else
:
receivers
.
append
(
receiver
(
receiver_url
))
receivers
.
append
(
receiver
(
receiver_url
))
print
(
f
"Receivers list is now {len(receivers)} long."
)
new_rx
=
receivers
[
-
1
]
.
receiver_dict
()
new_rx
=
receivers
[
-
1
]
.
receiver_dict
()
to_table
=
[
new_rx
[
'station_id'
],
new_rx
[
'station_url'
],
new_rx
[
'auto'
],
to_table
=
[
new_rx
[
'station_id'
],
new_rx
[
'station_url'
],
new_rx
[
'auto'
],
new_rx
[
'mobile'
],
new_rx
[
'single'
],
new_rx
[
'latitude'
],
new_rx
[
'longitude'
]]
new_rx
[
'mobile'
],
new_rx
[
'single'
],
new_rx
[
'latitude'
],
new_rx
[
'longitude'
]]
c
.
execute
(
"INSERT OR IGNORE INTO receivers VALUES (?,?,?,?,?,?,?)"
,
to_table
)
# c.execute("INSERT OR IGNORE INTO receivers VALUES (?,?,?,?,?,?,?)", to_table)
conn
.
commit
()
# conn.commit()
command
=
"INSERT OR IGNORE INTO receivers VALUES (?,?,?,?,?,?,?)"
DATABASE_EDIT_Q
.
put
((
command
,
to_table
))
DATABASE_RETURN
.
get
(
timeout
=
1
)
DATABASE_EDIT_Q
.
put
((
"done"
,
None
))
mobile
=
c
.
execute
(
"SELECT isMobile FROM receivers WHERE station_id = ?"
,
mobile
=
c
.
execute
(
"SELECT isMobile FROM receivers WHERE station_id = ?"
,
[
new_rx
[
'station_id'
]])
.
fetchone
()[
0
]
[
new_rx
[
'station_id'
]])
.
fetchone
()[
0
]
single
=
c
.
execute
(
"SELECT isSingle FROM receivers WHERE station_id = ?"
,
single
=
c
.
execute
(
"SELECT isSingle FROM receivers WHERE station_id = ?"
,
...
@@ -959,31 +960,45 @@ def read_rx_table():
...
@@ -959,31 +960,45 @@ def read_rx_table():
# the receivers.
# the receivers.
###############################################
###############################################
def
update_rx_table
():
def
update_rx_table
():
conn
=
sqlite3
.
connect
(
database_name
)
#
conn = sqlite3.connect(database_name)
c
=
conn
.
cursor
()
#
c = conn.cursor()
for
item
in
receivers
:
for
item
in
receivers
:
rx
=
item
.
receiver_dict
()
rx
=
item
.
receiver_dict
()
to_table
=
[
rx
[
'auto'
],
rx
[
'mobile'
],
rx
[
'single'
],
rx
[
'latitude'
],
rx
[
'longitude'
],
rx
[
'station_id'
]]
to_table
=
[
rx
[
'auto'
],
rx
[
'mobile'
],
rx
[
'single'
],
rx
[
'latitude'
],
rx
[
'longitude'
],
rx
[
'station_id'
]]
c
.
execute
(
'''UPDATE receivers SET
# c.execute('''UPDATE receivers SET
# isAuto=?,
# isMobile=?,
# isSingle=?,
# latitude=?,
# longitude=?
# WHERE station_id = ?''', to_table)
command
=
'''UPDATE receivers SET
isAuto=?,
isAuto=?,
isMobile=?,
isMobile=?,
isSingle=?,
isSingle=?,
latitude=?,
latitude=?,
longitude=?
longitude=?
WHERE station_id = ?'''
,
to_table
)
WHERE station_id = ?'''
conn
.
commit
()
DATABASE_EDIT_Q
.
put
((
command
,
to_table
))
conn
.
close
()
DATABASE_RETURN
.
get
(
timeout
=
1
)
DATABASE_EDIT_Q
.
put
((
"done"
,
None
))
# conn.commit()
# conn.close()
###############################################
###############################################
# Removes a receiver from the program and
# Removes a receiver from the program and
# database upon request.
# database upon request.
###############################################
###############################################
def
del_receiver
(
del_rx
):
def
del_receiver
(
del_rx
):
conn
=
sqlite3
.
connect
(
database_name
)
# conn = sqlite3.connect(database_name)
c
=
conn
.
cursor
()
# c = conn.cursor()
c
.
execute
(
"DELETE FROM receivers WHERE station_id=?"
,
[
del_rx
])
# c.execute("DELETE FROM receivers WHERE station_id=?", [del_rx])
conn
.
commit
()
command
=
"DELETE FROM receivers WHERE station_id=?"
conn
.
close
()
DATABASE_EDIT_Q
.
put
((
command
,
[
del_rx
]))
DATABASE_RETURN
.
get
(
timeout
=
1
)
DATABASE_EDIT_Q
.
put
((
"done"
,
None
))
# conn.commit()
# conn.close()
###############################################
###############################################
# Updates the database with new interest areas.
# Updates the database with new interest areas.
...
@@ -991,19 +1006,23 @@ def del_receiver(del_rx):
...
@@ -991,19 +1006,23 @@ def del_receiver(del_rx):
def
add_aoi
(
aoi_type
,
lat
,
lon
,
radius
):
def
add_aoi
(
aoi_type
,
lat
,
lon
,
radius
):
conn
=
sqlite3
.
connect
(
database_name
)
conn
=
sqlite3
.
connect
(
database_name
)
c
=
conn
.
cursor
()
c
=
conn
.
cursor
()
c
.
execute
(
'''CREATE TABLE IF NOT EXISTS interest_areas (
#
c.execute('''CREATE TABLE IF NOT EXISTS interest_areas (
uid INTEGER,
#
uid INTEGER,
aoi_type TEXT,
#
aoi_type TEXT,
latitude REAL,
#
latitude REAL,
longitude REAL,
#
longitude REAL,
radius INTEGER)
#
radius INTEGER)
'''
)
#
''')
prev_uid
=
c
.
execute
(
'SELECT MAX(uid) from interest_areas'
)
.
fetchone
()[
0
]
prev_uid
=
c
.
execute
(
'SELECT MAX(uid) from interest_areas'
)
.
fetchone
()[
0
]
conn
.
close
()
uid
=
(
prev_uid
+
1
)
if
prev_uid
!=
None
else
0
uid
=
(
prev_uid
+
1
)
if
prev_uid
!=
None
else
0
to_table
=
[
uid
,
aoi_type
,
lat
,
lon
,
radius
]
to_table
=
[
uid
,
aoi_type
,
lat
,
lon
,
radius
]
c
.
execute
(
'INSERT INTO interest_areas VALUES (?,?,?,?,?)'
,
to_table
)
# c.execute('INSERT INTO interest_areas VALUES (?,?,?,?,?)', to_table)
conn
.
commit
()
command
=
'INSERT INTO interest_areas VALUES (?,?,?,?,?)'
conn
.
close
()
DATABASE_EDIT_Q
.
put
((
command
,
to_table
))
DATABASE_RETURN
.
get
(
timeout
=
1
)
DATABASE_EDIT_Q
.
put
((
"done"
,
None
))
# conn.commit()
#########################################
#########################################
# Read all the AOIs from the DB
# Read all the AOIs from the DB
...
@@ -1019,6 +1038,64 @@ def fetch_aoi_data():
...
@@ -1019,6 +1038,64 @@ def fetch_aoi_data():
conn
.
close
()
conn
.
close
()
return
aoi_list
return
aoi_list
def
database_writer
():
conn
=
sqlite3
.
connect
(
database_name
)
c
=
conn
.
cursor
()
c
.
execute
(
'''CREATE TABLE IF NOT EXISTS receivers (
station_id TEXT UNIQUE,
station_url TEXT,
isAuto INTEGER,
isMobile INTEGER,
isSingle INTEGER,
latitude REAL,
longitude REAL)
'''
)
c
.
execute
(
'''CREATE TABLE IF NOT EXISTS interest_areas (
uid INTEGER,
aoi_type TEXT,
latitude REAL,
longitude REAL,
radius INTEGER)
'''
)
c
.
execute
(
'''CREATE TABLE IF NOT EXISTS intersects (
time INTEGER,
latitude REAL,
longitude REAL,
num_parents INTEGER,
confidence INTEGER,
aoi_id INTEGER)'''
)
c
.
execute
(
'''CREATE TABLE IF NOT EXISTS lobs (time INTEGER,
station_id TEXT,
latitude REAL,
longitude REAL,
confidence INTEGER,
lob REAL)'''
)
conn
.
commit
()
try
:
while
True
:
command
,
items
=
DATABASE_EDIT_Q
.
get
()
if
command
==
"done"
:
conn
.
commit
()
else
:
c
.
execute
(
command
,
items
)
DATABASE_RETURN
.
put
(
True
)
except
KeyboardInterrupt
:
DATABASE_RETURN
.
put
(
True
)
conn
.
commit
()
conn
.
close
()
###############################################
# Thangs to do before closing the program.
###############################################
def
finish
():
clear
(
debugging
)
print
(
"Processing, please wait."
)
ms
.
receiving
=
False
update_rx_table
()
if
geofile
!=
None
:
write_geojson
(
*
process_data
(
database_name
)[:
2
])
kill
(
getpid
(),
signal
.
SIGTERM
)
if
__name__
==
'__main__'
:
if
__name__
==
'__main__'
:
###############################################
###############################################
# Help info printed when calling the program
# Help info printed when calling the program
...
@@ -1068,6 +1145,10 @@ if __name__ == '__main__':
...
@@ -1068,6 +1145,10 @@ if __name__ == '__main__':
web
.
daemon
=
True
web
.
daemon
=
True
web
.
start
()
web
.
start
()
dbwriter
=
Process
(
target
=
database_writer
)
dbwriter
.
daemon
=
True
dbwriter
.
start
()
try
:
try
:
###############################################
###############################################
# Reds receivers from the database first, then
# Reds receivers from the database first, then
...
...
static/style.css
View file @
7b6dda83
...
@@ -152,6 +152,7 @@ span {
...
@@ -152,6 +152,7 @@ span {
}
}
.slidespan
{
.slidespan
{
max-width
:
40vw
;
width
:
550px
;
/* Width of the outside container */
width
:
550px
;
/* Width of the outside container */
/* padding-top: 10px;
/* padding-top: 10px;
padding-bottom: 10px;*/
padding-bottom: 10px;*/
...
@@ -272,6 +273,7 @@ input:checked + .switchslider:before {
...
@@ -272,6 +273,7 @@ input:checked + .switchslider:before {
display
:
block
;
display
:
block
;
z-index
:
1
;
z-index
:
1
;
opacity
:
0.5
;
opacity
:
0.5
;
max-width
:
fit-content
;
}
}
.tooltip
:hover
{
.tooltip
:hover
{
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment