Commit 45258fad authored by Alan Mitchell's avatar Alan Mitchell
Browse files

Reorg'd scripts and converted to runscripts.

parent 34b2118d
import logging
import global_vars # causes code to run in that module.
import logging_setup # causes logging setup code to run.
logging.getLogger('bms').info('BMS Application first accessed.')
......
'''
This file holds settings for the application and sets up logging.
This file sets up logging.
'''
from os.path import dirname, join, realpath
from glob import glob
import logging, logging.handlers
APP_PATH = realpath(dirname(__file__))
# Full path to the Django database holding project model data
# (building lists, sensor lists, etc.). Assume it is the first sqlite database
# in the directory above
dbs = glob(join(APP_PATH, '..', '*.sqlite'))
PROJ_DB_FILENAME = realpath(dbs[0]) if dbs else ''
# ------- Set up logging for the application
import logging, logging.handlers
# Log file for the application
LOG_FILE = join(APP_PATH, 'logs', 'bms.log')
......@@ -43,5 +34,3 @@ fh.setFormatter(formatter)
# add the handler to the logger
logger.addHandler(fh)
# --------------------------
\ No newline at end of file
......@@ -3,8 +3,12 @@
import sqlite3
import sys
import calendar
import os.path
import time
import shutil
import subprocess
import glob
import calendar
import pytz
from dateutil import parser
......@@ -21,6 +25,8 @@ class BMSdata:
it will be created.
"""
self.db_fname = fname # save database filename.
self.conn = sqlite3.connect(fname)
# use the SQLite Row row_factory for all Select queries
......@@ -174,6 +180,41 @@ class BMSdata:
self.conn.commit()
return None, None # no prior values
def backup_db(self, days_to_retain):
"""Backs up the database and compresses the backup. Deletes old backup
files that were created more than 'days_to_retain' ago.
"""
# make backup filename with current date time in 'bak' subdirectory
fname = os.path.join(os.path.dirname(self.db_fname), 'bak', time.strftime('%Y-%m-%d-%H%M%S') + '.sqlite')
# Before copying the database file, need to force a lock on it so that no
# write operations occur during the copying process
# create a dummy table to write into.
try:
self.cursor.execute('CREATE TABLE _junk (x integer)')
except:
# table already existed
pass
# write a value into the table to create a lock on the database
self.cursor.execute('INSERT INTO _junk VALUES (1)')
# now copy database
shutil.copy(self.db_fname, fname)
# Rollback the Insert as we don't really need it.
self.conn.rollback()
# gzip the backup file
subprocess.call(['gzip', fname])
# delete any backup files more than 'days_to_retain' old.
cutoff_time = time.time() - days_to_retain * 24 *3600.0
for fn in glob.glob(os.path.join(os.path.dirname(self.db_fname), 'bak', '*.gz')):
if os.path.getmtime(fn) < cutoff_time:
os.remove(fn)
def import_text_file(self, filename, tz_name='US/Alaska'):
"""Adds the sensor reading data present in the tab-delimited 'filename' to
the reading database. Date/time values in the file are assumed to be in the
......
#!/usr/local/bin/python2.7
# Script to backup the BMS sensor reading database.
# The database is copied, gzipped, and placed in the bak directory.
import os
import sys
import time
import sqlite3
import shutil
import subprocess
import glob
# change into the directory of this script
os.chdir(os.path.dirname(sys.argv[0]))
# get parent directory into path
sys.path.insert(0, '../')
import bmsdata
# path to reading database
db_path = bmsdata.DEFAULT_DB
# make backup filename with current date time in 'bak' subdirectory
fname = os.path.join(os.path.dirname(db_path), 'bak', time.strftime('%Y-%m-%d-%H%M%S') + '.sqlite')
# Before copying the database file, need to force a lock on it so that no
# write operations occur during the copying process
conn = sqlite3.connect(db_path)
cur = conn.cursor()
# create a dummy table to write into.
try:
cur.execute('CREATE TABLE _junk (x integer)')
except:
# table already existed
pass
# write a value into the table to create a lock on the database
cur.execute('INSERT INTO _junk VALUES (1)')
# now copy database
shutil.copy(db_path, fname)
# Rollback the Insert as we don't really need it.
conn.rollback()
# gzip the backup file
subprocess.call(['gzip', fname])
# delete any backup files more than 3 weeks old
cutoff_time = time.time() - 3 * 7 * 24 *3600.0
for fn in glob.glob(os.path.join(os.path.dirname(db_path), 'bak', '*.gz')):
if os.path.getmtime(fn) < cutoff_time:
os.remove(fn)
#!/usr/local/bin/python2.7
import os, sys, logging, time
# change into this directory
os.chdir(os.path.dirname( os.path.abspath(sys.argv[0]) ))
sys.path.insert(0, '../../') # add the parent/parent directory to the Python path
sys.path.insert(0, '../') # add the parent directory to the Python path
import global_vars # needed to set up logging
import bmsdata
# make a logger object and set time zone so log readings are stamped with Alaska time.
# Did this because Django sets time to AK time.
os.environ['TZ'] = 'US/Alaska'
try:
time.tzset()
except:
# the above command is not supported in Windows.
# Need to come up with another solution if running on Windows
# is necessary
pass
logger = logging.getLogger('bms.daily_status')
# get a BMSdata object for the sensor reading database.
reading_db = bmsdata.BMSdata()
logger.info( '{:,} readings inserted in last day. {:,} total readings.'.format(reading_db.readingCount(time.time() - 3600*24), reading_db.readingCount()) )
reading_db.close()
......@@ -2,9 +2,9 @@
a text file.
The sensor information must be stored in a text file with the name
'new_sensors.txt' stored in the same directory as this script. The first line
of that file is a header line. The file must have the format shown in the
New_Sensors.xlsx spreadsheet stored in this directory.
'new_sensors.txt' stored in the subdirectory 'files' beneath this script. The
first line of that file is a header line. The file must have the format shown
in the New_Sensors.xlsx spreadsheet stored in the 'files' subdirectory.
All SensorGroup and Unit objects must be present before running this script.
Building objects will be created as needed. If a sensor with the same
......
"""Script to backup the BMS sensor reading database.
The database is copied, gzipped, and placed in the bak directory.
This script is run via django-extensions runscript facility:
manage.py runscript backup_readingdb
"""
import bmsapp.readingdb.bmsdata
DAYS_TO_RETAIN = 21 # days of old backup files to retain
def run():
'''Method called by runscript.
'''
db = bmsapp.readingdb.bmsdata.BMSdata()
db.backup_db(DAYS_TO_RETAIN)
db.close()
#!/usr/local/bin/python2.7
import os, sys, sqlite3, logging, time
# change into this directory
os.chdir(os.path.dirname( os.path.abspath(sys.argv[0]) ))
sys.path.insert(0, '../') # add the parent directory to the Python path
import global_vars
from readingdb import bmsdata
from calcs import calcreadings, calcfuncs01
# make a logger object and set time zone so log readings are stamped with Alaska time.
# Did this because Django sets time to AK time.
os.environ['TZ'] = 'US/Alaska'
try:
time.tzset()
except:
# the above command is not supported in Windows.
# Need to come up with another solution if running on Windows
# is necessary
pass
logger = logging.getLogger('bms.calc_readings')
# get a BMSdata object for the sensor reading database and then make a Calculate
# Readings object. Other calculated reading classes in addition to CalcReadingFuncs_01
# can be added to the list and they will be search for matching function names.
# Only allow calculated readings within the last 7 days.
reading_db = bmsdata.BMSdata()
calc = calcreadings.CalculateReadings([calcfuncs01.CalcReadingFuncs_01, ], reading_db, 60*24*7)
# get a database connection and cursor to the Django project database that has the sensor
# list.
conn = sqlite3.connect(global_vars.PROJ_DB_FILENAME)
cursor = conn.cursor()
# get all the calculated readings in calculation order
cursor.execute('SELECT sensor_id, tran_calc_function, function_parameters FROM bmsapp_sensor WHERE is_calculated = 1 ORDER BY calculation_order')
for row in cursor.fetchall():
try:
rec_count = calc.processCalc(row[0], row[1], row[2])
logger.debug( '%s %s readings calculated and inserted' % (rec_count, row[0]) )
except:
logger.exception('Error calculating %s readings' % row[0])
reading_db.close()
'''Determines and inserts the calculated sensor values into the sensor
reading database. This script is usually run via a cron job every half
hour.
This script is set up to run through use of the django-extensions runscript
feature, in order that the script has easy access to the Django model data
for this application. The script is run by:
manage.py runscript calc_readings
'''
import logging
from bmsapp.readingdb import bmsdata
from bmsapp.calcs import calcreadings, calcfuncs01
import bmsapp.models
def run():
'''This method is called by the 'runscript' command.
'''
# make a logger object
logger = logging.getLogger('bms.calc_readings')
# get a BMSdata object for the sensor reading database and then make a Calculate
# Readings object. Other calculated reading classes in addition to CalcReadingFuncs_01
# can be added to the list and they will be search for matching function names.
# Only allow calculated readings within the last 7 days.
reading_db = bmsdata.BMSdata()
calc = calcreadings.CalculateReadings([calcfuncs01.CalcReadingFuncs_01, ], reading_db, 60*24*7)
# Loop through the calculated sensor readings in the proper calculation order,
# inserting the calculated values in the database.
for calc_sensor in bmsapp.models.Sensor.objects.filter(is_calculated=1).order_by('calculation_order'):
try:
rec_count = calc.processCalc(calc_sensor.sensor_id, calc_sensor.tran_calc_function, calc_sensor.function_parameters)
logger.debug( '%s %s readings calculated and inserted' % (rec_count, calc_sensor.sensor_id) )
except:
logger.exception('Error calculating %s readings' % calc_sensor.sensor_id)
reading_db.close()
'''Script to insert some summary info about database size and insertions.
This script is run through the django-extensions runscript facility. To run:
manage.py runscript daily_status
'''
import logging
import time
import bmsapp.readingdb.bmsdata
def run():
'''This method is called by the 'runscript' command.
'''
# get an appropriate logger to use
logger = logging.getLogger('bms.daily_status')
# get a BMSdata object for the sensor reading database.
reading_db = bmsapp.readingdb.bmsdata.BMSdata()
logger.info( '{:,} readings inserted in last day. {:,} total readings.'.format(reading_db.readingCount(time.time() - 3600*24), reading_db.readingCount()) )
reading_db.close()
......@@ -8,7 +8,10 @@ from django.views.decorators.csrf import csrf_exempt
from django.core.urlresolvers import reverse
from django.conf import settings
import models, global_vars, view_util, storereads
import models
import logging_setup
import view_util
import storereads
from reports import basechart
from readingdb import bmsdata
......@@ -245,7 +248,7 @@ def show_log(request):
'''
Returns the application's log file, without formatting.
'''
return HttpResponse('<pre>%s</pre>' % open(global_vars.LOG_FILE).read())
return HttpResponse('<pre>%s</pre>' % open(logging_setup.LOG_FILE).read())
def show_video(request, filename, width, height):
'''
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment