Skip to content
GitLab
Projects
Groups
Snippets
Help
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Sign in
Toggle navigation
Open sidebar
energy
bmon
Commits
45258fad
Commit
45258fad
authored
Jan 31, 2015
by
Alan Mitchell
Browse files
Reorg'd scripts and converted to runscripts.
parent
34b2118d
Changes
12
Hide whitespace changes
Inline
Side-by-side
Showing
12 changed files
with
133 additions
and
153 deletions
+133
-153
bmsapp/__init__.py
bmsapp/__init__.py
+1
-1
bmsapp/logging_setup.py
bmsapp/logging_setup.py
+2
-13
bmsapp/readingdb/bmsdata.py
bmsapp/readingdb/bmsdata.py
+42
-1
bmsapp/readingdb/scripts/backup.py
bmsapp/readingdb/scripts/backup.py
+0
-55
bmsapp/readingdb/scripts/daily_status.py
bmsapp/readingdb/scripts/daily_status.py
+0
-29
bmsapp/scripts/add_sensors.py
bmsapp/scripts/add_sensors.py
+3
-3
bmsapp/scripts/backup_readingdb.py
bmsapp/scripts/backup_readingdb.py
+17
-0
bmsapp/scripts/calc_readings.py
bmsapp/scripts/calc_readings.py
+42
-49
bmsapp/scripts/daily_status.py
bmsapp/scripts/daily_status.py
+21
-0
bmsapp/scripts/files/New_Sensors.xlsx
bmsapp/scripts/files/New_Sensors.xlsx
+0
-0
bmsapp/scripts/files/new_sensors.txt
bmsapp/scripts/files/new_sensors.txt
+0
-0
bmsapp/views.py
bmsapp/views.py
+5
-2
No files found.
bmsapp/__init__.py
View file @
45258fad
import
logging
import
g
lo
bal_vars
# causes
code to run in that module
.
import
lo
gging_setup
# causes
logging setup code to run
.
logging
.
getLogger
(
'bms'
).
info
(
'BMS Application first accessed.'
)
...
...
bmsapp/
g
lo
bal_vars
.py
→
bmsapp/lo
gging_setup
.py
View file @
45258fad
'''
This file
holds settings for the application and
sets up logging.
This file sets up logging.
'''
from
os.path
import
dirname
,
join
,
realpath
from
glob
import
glob
import
logging
,
logging
.
handlers
APP_PATH
=
realpath
(
dirname
(
__file__
))
# Full path to the Django database holding project model data
# (building lists, sensor lists, etc.). Assume it is the first sqlite database
# in the directory above
dbs
=
glob
(
join
(
APP_PATH
,
'..'
,
'*.sqlite'
))
PROJ_DB_FILENAME
=
realpath
(
dbs
[
0
])
if
dbs
else
''
# ------- Set up logging for the application
import
logging
,
logging
.
handlers
# Log file for the application
LOG_FILE
=
join
(
APP_PATH
,
'logs'
,
'bms.log'
)
...
...
@@ -43,5 +34,3 @@ fh.setFormatter(formatter)
# add the handler to the logger
logger
.
addHandler
(
fh
)
# --------------------------
\ No newline at end of file
bmsapp/readingdb/bmsdata.py
View file @
45258fad
...
...
@@ -3,8 +3,12 @@
import
sqlite3
import
sys
import
calendar
import
os.path
import
time
import
shutil
import
subprocess
import
glob
import
calendar
import
pytz
from
dateutil
import
parser
...
...
@@ -21,6 +25,8 @@ class BMSdata:
it will be created.
"""
self
.
db_fname
=
fname
# save database filename.
self
.
conn
=
sqlite3
.
connect
(
fname
)
# use the SQLite Row row_factory for all Select queries
...
...
@@ -174,6 +180,41 @@ class BMSdata:
self
.
conn
.
commit
()
return
None
,
None
# no prior values
def
backup_db
(
self
,
days_to_retain
):
"""Backs up the database and compresses the backup. Deletes old backup
files that were created more than 'days_to_retain' ago.
"""
# make backup filename with current date time in 'bak' subdirectory
fname
=
os
.
path
.
join
(
os
.
path
.
dirname
(
self
.
db_fname
),
'bak'
,
time
.
strftime
(
'%Y-%m-%d-%H%M%S'
)
+
'.sqlite'
)
# Before copying the database file, need to force a lock on it so that no
# write operations occur during the copying process
# create a dummy table to write into.
try
:
self
.
cursor
.
execute
(
'CREATE TABLE _junk (x integer)'
)
except
:
# table already existed
pass
# write a value into the table to create a lock on the database
self
.
cursor
.
execute
(
'INSERT INTO _junk VALUES (1)'
)
# now copy database
shutil
.
copy
(
self
.
db_fname
,
fname
)
# Rollback the Insert as we don't really need it.
self
.
conn
.
rollback
()
# gzip the backup file
subprocess
.
call
([
'gzip'
,
fname
])
# delete any backup files more than 'days_to_retain' old.
cutoff_time
=
time
.
time
()
-
days_to_retain
*
24
*
3600.0
for
fn
in
glob
.
glob
(
os
.
path
.
join
(
os
.
path
.
dirname
(
self
.
db_fname
),
'bak'
,
'*.gz'
)):
if
os
.
path
.
getmtime
(
fn
)
<
cutoff_time
:
os
.
remove
(
fn
)
def
import_text_file
(
self
,
filename
,
tz_name
=
'US/Alaska'
):
"""Adds the sensor reading data present in the tab-delimited 'filename' to
the reading database. Date/time values in the file are assumed to be in the
...
...
bmsapp/readingdb/scripts/backup.py
deleted
100644 → 0
View file @
34b2118d
#!/usr/local/bin/python2.7
# Script to backup the BMS sensor reading database.
# The database is copied, gzipped, and placed in the bak directory.
import
os
import
sys
import
time
import
sqlite3
import
shutil
import
subprocess
import
glob
# change into the directory of this script
os
.
chdir
(
os
.
path
.
dirname
(
sys
.
argv
[
0
]))
# get parent directory into path
sys
.
path
.
insert
(
0
,
'../'
)
import
bmsdata
# path to reading database
db_path
=
bmsdata
.
DEFAULT_DB
# make backup filename with current date time in 'bak' subdirectory
fname
=
os
.
path
.
join
(
os
.
path
.
dirname
(
db_path
),
'bak'
,
time
.
strftime
(
'%Y-%m-%d-%H%M%S'
)
+
'.sqlite'
)
# Before copying the database file, need to force a lock on it so that no
# write operations occur during the copying process
conn
=
sqlite3
.
connect
(
db_path
)
cur
=
conn
.
cursor
()
# create a dummy table to write into.
try
:
cur
.
execute
(
'CREATE TABLE _junk (x integer)'
)
except
:
# table already existed
pass
# write a value into the table to create a lock on the database
cur
.
execute
(
'INSERT INTO _junk VALUES (1)'
)
# now copy database
shutil
.
copy
(
db_path
,
fname
)
# Rollback the Insert as we don't really need it.
conn
.
rollback
()
# gzip the backup file
subprocess
.
call
([
'gzip'
,
fname
])
# delete any backup files more than 3 weeks old
cutoff_time
=
time
.
time
()
-
3
*
7
*
24
*
3600.0
for
fn
in
glob
.
glob
(
os
.
path
.
join
(
os
.
path
.
dirname
(
db_path
),
'bak'
,
'*.gz'
)):
if
os
.
path
.
getmtime
(
fn
)
<
cutoff_time
:
os
.
remove
(
fn
)
bmsapp/readingdb/scripts/daily_status.py
deleted
100644 → 0
View file @
34b2118d
#!/usr/local/bin/python2.7
import
os
,
sys
,
logging
,
time
# change into this directory
os
.
chdir
(
os
.
path
.
dirname
(
os
.
path
.
abspath
(
sys
.
argv
[
0
])
))
sys
.
path
.
insert
(
0
,
'../../'
)
# add the parent/parent directory to the Python path
sys
.
path
.
insert
(
0
,
'../'
)
# add the parent directory to the Python path
import
global_vars
# needed to set up logging
import
bmsdata
# make a logger object and set time zone so log readings are stamped with Alaska time.
# Did this because Django sets time to AK time.
os
.
environ
[
'TZ'
]
=
'US/Alaska'
try
:
time
.
tzset
()
except
:
# the above command is not supported in Windows.
# Need to come up with another solution if running on Windows
# is necessary
pass
logger
=
logging
.
getLogger
(
'bms.daily_status'
)
# get a BMSdata object for the sensor reading database.
reading_db
=
bmsdata
.
BMSdata
()
logger
.
info
(
'{:,} readings inserted in last day. {:,} total readings.'
.
format
(
reading_db
.
readingCount
(
time
.
time
()
-
3600
*
24
),
reading_db
.
readingCount
())
)
reading_db
.
close
()
bmsapp/scripts/add_sensors.py
View file @
45258fad
...
...
@@ -2,9 +2,9 @@
a text file.
The sensor information must be stored in a text file with the name
'new_sensors.txt' stored in the s
ame
directory
as
this script. The
first line
of that file is a header line. The file must have the format shown
in the
New_Sensors.xlsx spreadsheet stored in th
is
directory.
'new_sensors.txt' stored in the s
ub
directory
'files' beneath
this script. The
first line
of that file is a header line. The file must have the format shown
in the
New_Sensors.xlsx spreadsheet stored in th
e 'files' sub
directory.
All SensorGroup and Unit objects must be present before running this script.
Building objects will be created as needed. If a sensor with the same
...
...
bmsapp/scripts/backup_readingdb.py
0 → 100644
View file @
45258fad
"""Script to backup the BMS sensor reading database.
The database is copied, gzipped, and placed in the bak directory.
This script is run via django-extensions runscript facility:
manage.py runscript backup_readingdb
"""
import
bmsapp.readingdb.bmsdata
DAYS_TO_RETAIN
=
21
# days of old backup files to retain
def
run
():
'''Method called by runscript.
'''
db
=
bmsapp
.
readingdb
.
bmsdata
.
BMSdata
()
db
.
backup_db
(
DAYS_TO_RETAIN
)
db
.
close
()
bmsapp/scripts/calc_readings.py
View file @
45258fad
#!/usr/local/bin/python2.7
import
os
,
sys
,
sqlite3
,
logging
,
time
# change into this directory
os
.
chdir
(
os
.
path
.
dirname
(
os
.
path
.
abspath
(
sys
.
argv
[
0
])
))
sys
.
path
.
insert
(
0
,
'../'
)
# add the parent directory to the Python path
import
global_vars
from
readingdb
import
bmsdata
from
calcs
import
calcreadings
,
calcfuncs01
# make a logger object and set time zone so log readings are stamped with Alaska time.
# Did this because Django sets time to AK time.
os
.
environ
[
'TZ'
]
=
'US/Alaska'
try
:
time
.
tzset
()
except
:
# the above command is not supported in Windows.
# Need to come up with another solution if running on Windows
# is necessary
pass
logger
=
logging
.
getLogger
(
'bms.calc_readings'
)
# get a BMSdata object for the sensor reading database and then make a Calculate
# Readings object. Other calculated reading classes in addition to CalcReadingFuncs_01
# can be added to the list and they will be search for matching function names.
# Only allow calculated readings within the last 7 days.
reading_db
=
bmsdata
.
BMSdata
()
calc
=
calcreadings
.
CalculateReadings
([
calcfuncs01
.
CalcReadingFuncs_01
,
],
reading_db
,
60
*
24
*
7
)
# get a database connection and cursor to the Django project database that has the sensor
# list.
conn
=
sqlite3
.
connect
(
global_vars
.
PROJ_DB_FILENAME
)
cursor
=
conn
.
cursor
()
# get all the calculated readings in calculation order
cursor
.
execute
(
'SELECT sensor_id, tran_calc_function, function_parameters FROM bmsapp_sensor WHERE is_calculated = 1 ORDER BY calculation_order'
)
for
row
in
cursor
.
fetchall
():
try
:
rec_count
=
calc
.
processCalc
(
row
[
0
],
row
[
1
],
row
[
2
])
logger
.
debug
(
'%s %s readings calculated and inserted'
%
(
rec_count
,
row
[
0
])
)
except
:
logger
.
exception
(
'Error calculating %s readings'
%
row
[
0
])
reading_db
.
close
()
'''Determines and inserts the calculated sensor values into the sensor
reading database. This script is usually run via a cron job every half
hour.
This script is set up to run through use of the django-extensions runscript
feature, in order that the script has easy access to the Django model data
for this application. The script is run by:
manage.py runscript calc_readings
'''
import
logging
from
bmsapp.readingdb
import
bmsdata
from
bmsapp.calcs
import
calcreadings
,
calcfuncs01
import
bmsapp.models
def
run
():
'''This method is called by the 'runscript' command.
'''
# make a logger object
logger
=
logging
.
getLogger
(
'bms.calc_readings'
)
# get a BMSdata object for the sensor reading database and then make a Calculate
# Readings object. Other calculated reading classes in addition to CalcReadingFuncs_01
# can be added to the list and they will be search for matching function names.
# Only allow calculated readings within the last 7 days.
reading_db
=
bmsdata
.
BMSdata
()
calc
=
calcreadings
.
CalculateReadings
([
calcfuncs01
.
CalcReadingFuncs_01
,
],
reading_db
,
60
*
24
*
7
)
# Loop through the calculated sensor readings in the proper calculation order,
# inserting the calculated values in the database.
for
calc_sensor
in
bmsapp
.
models
.
Sensor
.
objects
.
filter
(
is_calculated
=
1
).
order_by
(
'calculation_order'
):
try
:
rec_count
=
calc
.
processCalc
(
calc_sensor
.
sensor_id
,
calc_sensor
.
tran_calc_function
,
calc_sensor
.
function_parameters
)
logger
.
debug
(
'%s %s readings calculated and inserted'
%
(
rec_count
,
calc_sensor
.
sensor_id
)
)
except
:
logger
.
exception
(
'Error calculating %s readings'
%
calc_sensor
.
sensor_id
)
reading_db
.
close
()
bmsapp/scripts/daily_status.py
0 → 100644
View file @
45258fad
'''Script to insert some summary info about database size and insertions.
This script is run through the django-extensions runscript facility. To run:
manage.py runscript daily_status
'''
import
logging
import
time
import
bmsapp.readingdb.bmsdata
def
run
():
'''This method is called by the 'runscript' command.
'''
# get an appropriate logger to use
logger
=
logging
.
getLogger
(
'bms.daily_status'
)
# get a BMSdata object for the sensor reading database.
reading_db
=
bmsapp
.
readingdb
.
bmsdata
.
BMSdata
()
logger
.
info
(
'{:,} readings inserted in last day. {:,} total readings.'
.
format
(
reading_db
.
readingCount
(
time
.
time
()
-
3600
*
24
),
reading_db
.
readingCount
())
)
reading_db
.
close
()
bmsapp/scripts/New_Sensors.xlsx
→
bmsapp/scripts/
files/
New_Sensors.xlsx
View file @
45258fad
File moved
bmsapp/scripts/new_sensors.txt
→
bmsapp/scripts/
files/
new_sensors.txt
View file @
45258fad
File moved
bmsapp/views.py
View file @
45258fad
...
...
@@ -8,7 +8,10 @@ from django.views.decorators.csrf import csrf_exempt
from
django.core.urlresolvers
import
reverse
from
django.conf
import
settings
import
models
,
global_vars
,
view_util
,
storereads
import
models
import
logging_setup
import
view_util
import
storereads
from
reports
import
basechart
from
readingdb
import
bmsdata
...
...
@@ -245,7 +248,7 @@ def show_log(request):
'''
Returns the application's log file, without formatting.
'''
return
HttpResponse
(
'<pre>%s</pre>'
%
open
(
g
lo
bal_vars
.
LOG_FILE
).
read
())
return
HttpResponse
(
'<pre>%s</pre>'
%
open
(
lo
gging_setup
.
LOG_FILE
).
read
())
def
show_video
(
request
,
filename
,
width
,
height
):
'''
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment