Compare commits

...

5 Commits

Author SHA1 Message Date
Ciaran Gultnieks f73a35b13c Prevent int to string conversion error in error report 2012-05-25 09:47:55 +01:00
Raymond Wagner a4e2592971 Add ability to correct for bad files from broken hash command. 2012-04-16 13:51:09 -04:00
Raymond Wagner 1d366ee08b find_orphans additions
This adds a non-interactive mode that prints out the list and exists,
triggered by running the script outside of a terminal.

This also adds a new type, 'misplaced', indicating recordings that were
found, but on a backend other than that listed in the backend.  This
is informative only, and does not support any actions being performed on
the recordings in question.
2012-01-04 03:00:09 -05:00
Raymond Wagner 7b021b489a Alternate status information script for perl script of same name 2012-01-01 03:41:05 -05:00
Raymond Wagner cf084628eb Handles issue with 'forgotten' recordings
This adds another type of recording for find_orphans.py to handle.  If a
recording is in the process of deletion when the backend is terminated,
the file and database entry will still exist, but MythTV will ignore any
pre-existing entries with the 'deletepending' flag set. This now deletes
any such recordings over a certain age.
2012-01-01 03:34:48 -05:00
4 changed files with 327 additions and 127 deletions

View File

@ -1,6 +1,7 @@
#!/usr/bin/env python
from MythTV import MythDB, MythBE, Recorded
from MythTV import MythDB, MythBE, MythLog, Recorded as _Recorded
from MythTV.utility import datetime
from socket import timeout
import os
@ -14,132 +15,207 @@ def human_size(s):
o += 1
return str(round(s,1))+('B ','KB','MB','GB')[o]
class Singleton(type):
def __call__(self, *args, **kwargs):
if not hasattr(self, '_instance'):
self._instance = super(Singleton, self).__call__(*args, **kwargs)
# print 'call: %s' % type(self)
# if self.__instance is None:
# self.__instance = super(Singleton, self).__call__(*args, **kwargs)
if callable(self._instance):
return self._instance()
return self._instance
class File( str ):
#Utility class to allow deletion and terminal printing of files.
def __new__(self, host, group, path, name, size):
def __new__(self, host, group, path, name, size, db):
return str.__new__(self, name)
def __init__(self, host, group, path, name, size):
self.host = host
def __init__(self, host, group, path, name, size, db):
self.hosts = [host]
self.group = group
self.path = path
self.size = int(size)
self.db = db
def pprint(self):
name = '%s: %s' % (self.host, os.path.join(self.path, self))
name = '%s: %s' % (self.hosts[0], os.path.join(self.path, self))
print u' {0:<90}{1:>8}'.format(name, human_size(self.size))
def delete(self):
be = MythBE(self.host, db=DB)
be = MythBE(self.hosts[0], db=self.db)
be.deleteFile(self, self.group)
def add_host(self, host):
self.hosts.append(host)
class MyRecorded( Recorded ):
class Recorded( _Recorded ):
#Utility class to allow deletion and terminal printing of orphaned recording entries.
_table = 'recorded'
def pprint(self):
name = u'{0.hostname}: {0.title}'.format(self)
if self.subtitle:
name += ' - '+self.subtitle
print u' {0:<70}{1:>28}'.format(name,self.basename)
def delete(self, force=False, rerecord=False):
if self.doubleorphan:
# self.update(deletepending=0)
rerecord = False
super(MyRecorded, self).delete(force, rerecord)
@property
def doubleorphan(self):
return self.deletepending and ((datetime.now - self.lastmodified).days > 1)
def printrecs(title, recs):
# print out all recordings in list, followed by a count
print title
for rec in sorted(recs, key=lambda x: x.title):
rec.pprint()
print u'{0:>88}{1:>12}'.format('Count:',len(recs))
if len(recs):
print title
for rec in sorted(recs, key=lambda x: x.title):
rec.pprint()
print u'{0:>88}{1:>12}'.format('Count:',len(recs))
def printfiles(title, files):
# print out all files in list, followed by a total size
print title
for f in sorted(files, key=lambda x: x.path):
f.pprint()
size = sum([f.size for f in files])
print u'{0:>88}{1:>12}'.format('Total:',human_size(size))
if len(files):
print title
for f in sorted(files, key=lambda x: x.path):
f.pprint()
size = sum([f.size for f in files])
print u'{0:>88}{1:>12}'.format('Total:',human_size(size))
def populate(host=None):
# scan through all accessible backends to generate a new list of orphaned content
unfiltered = []
kwargs = {'livetv':True}
if host:
# if the host was defined on the command line, check to make sure such a
# host is defined in the database
with DB as c:
c.execute("""SELECT count(1) FROM settings
WHERE hostname=%s AND value=%s""",
(host, 'BackendServerIP'))
if c.fetchone()[0] == 0:
raise Exception('Invalid hostname specified on command line.')
hosts = [host]
kwargs['hostname'] = host
else:
# else, pull a list of all defined backends from the database
with DB as c:
class populate( object ):
__metaclass__ = Singleton
def __init__(self, host=None):
self.db = MythDB()
self.db.searchRecorded.handler = Recorded
self.be = MythBE(db=self.db)
self.log = MythLog(db=self.db)
self.set_host(host)
self.load_backends()
self.load_storagegroups()
def set_host(self, host):
self.host = host
if host:
# if the host was defined on the command line, check
# to make sure such host is defined in the database
with self.db as c:
c.execute("""SELECT count(1) FROM settings
WHERE hostname=? AND value=?""",
(host, 'BackendServerIP'))
if c.fetchone()[0] == 0:
raise Exception('Invalid hostname specified for backend.')
def load_backends(self):
with self.db as c:
c.execute("""SELECT hostname FROM settings
WHERE value='BackendServerIP'""")
hosts = [r[0] for r in c.fetchall()]
for host in hosts:
for sg in DB.getStorageGroup():
# skip special storage groups intended for MythVideo
# this list will need to be added to as additional plugins
# start using their own storage groups
if sg.groupname in ('Videos','Banners','Coverart',\
'Fanart','Screenshots','Trailers'):
continue
self.hosts = []
for host in hosts:
# try to access all defined hosts, and
# store the ones currently accessible
try:
dirs,files,sizes = BE.getSGList(host, sg.groupname, sg.dirname)
for f,s in zip(files,sizes):
newfile = File(host, sg.groupname, sg.dirname, f, s)
# each filename should be unique among all storage directories
# defined on all backends
# store one copy of a file, ignoring where the file actually exists
if newfile not in unfiltered:
unfiltered.append(newfile)
MythBE(backend=host)
self.hosts.append(host)
except:
pass
recs = list(DB.searchRecorded(**kwargs))
def load_storagegroups(self):
self.storagegroups = \
[sg for sg in self.db.getStorageGroup() \
if sg.groupname not in ('Videos','Banners','Coverart',\
'Fanart','Screenshots','Trailers')]
zerorecs = []
orphvids = []
for rec in list(recs):
# run through list of recordings, matching recording basenames with
# found files, and removing from both lists
if rec.basename in unfiltered:
recs.remove(rec)
i = unfiltered.index(rec.basename)
f = unfiltered.pop(i)
if f.size < 1024:
zerorecs.append(rec)
# remove any file with the same basename, these could be snapshots, failed
# transcode temporary files, or anything else relating to a non-orphaned
# recording
name = rec.basename.rsplit('.',1)[0]
for f in list(unfiltered):
if name in f:
unfiltered.remove(f)
def flush(self):
self.misplaced = []
self.zerorecs = []
self.pendrecs = []
self.orphrecs = []
self.orphvids = []
self.orphimgs = []
self.dbbackup = []
self.unfiltered = []
# filter remaining files for those with recording extensions
for f in list(unfiltered):
if not (f.endswith('.mpg') or f.endswith('.nuv')):
continue
orphvids.append(f)
unfiltered.remove(f)
def __call__(self):
self.refresh_content()
return self
# filter remaining files for those with image extensions
orphimgs = []
for f in list(unfiltered):
if not f.endswith('.png'):
continue
orphimgs.append(f)
unfiltered.remove(f)
def refresh_content(self):
# scan through all accessible backends to
# generate a new listof orphaned content
self.flush()
# filter remaining files for those that look like database backups
dbbackup = []
for f in list(unfiltered):
if 'sql' not in f:
continue
dbbackup.append(f)
unfiltered.remove(f)
unfiltered = {}
for host in self.hosts:
for sg in self.storagegroups:
try:
dirs,files,sizes = self.be.getSGList(host, sg.groupname, sg.dirname)
for f,s in zip(files, sizes):
newfile = File(host, sg.groupname, sg.dirname, f, s, self.db)
# each filename should be unique among all storage directories
# defined on all backends, but may exist in the same directory
# on multiple backends if they are shared
if newfile not in unfiltered:
# add a new file to the list
unfiltered[str(newfile)] = newfile
else:
# add a reference to the host on which it was found
unfiltered[str(newfile)].add_host(host)
except:
self.log(MythLog.GENERAL, MythLog.INFO,
'Could not access {0.groupname}@{1}{0.dirname}'.format(sg, host))
return (recs, zerorecs, orphvids, orphimgs, dbbackup, unfiltered)
for rec in self.db.searchRecorded(livetv=True):
if rec.hostname not in self.hosts:
# recording is on an offline backend, ignore it
name = rec.basename.rsplit('.',1)[0]
for n in unfiltered.keys():
if name in n:
# and anything related to it
del unfiltered[n]
elif rec.basename in unfiltered:
# run through list of recordings, matching basenames
# with found files, and removing file from list
f = unfiltered[rec.basename]
del unfiltered[rec.basename]
if f.size < 1024:
# file is too small to be of any worth
self.zerorecs.append(rec)
elif rec.doubleorphan:
# file is marked for deletion, but has been forgotten by the backend
self.pendrecs.append(rec)
elif rec.hostname not in f.hosts:
# recording is in the database, but not where it should be
self.misplaced.append(rec)
name = rec.basename.rsplit('.',1)[0]
for f in unfiltered.keys():
if name in f:
# file is related to a valid recording, ignore it
del unfiltered[f]
else:
# recording has been orphaned
self.orphrecs.append(rec)
for n,f in unfiltered.iteritems():
if n.endswith('.mpg') or n.endswith('.nuv'):
# filter files with recording extensions
self.orphvids.append(f)
elif n.endswith('.png'):
# filter files with image extensions
self.orphimgs.append(f)
elif 'sql' in n:
# filter for database backups
self.dbbackup.append(f)
else:
self.unfiltered.append(f)
def print_results(self):
printrecs("Recordings found on the wrong host", self.misplaced)
printrecs("Recordings with missing files", self.orphrecs)
printrecs("Zero byte recordings", self.zerorecs)
printrecs("Forgotten pending deletions", self.pendrecs)
printfiles("Orphaned video files", self.orphvids)
printfiles("Orphaned snapshots", self.orphimgs)
printfiles("Database backups", self.dbbackup)
printfiles("Other files", self.unfiltered)
def delete_recs(recs):
printrecs('The following recordings will be deleted', recs)
@ -185,34 +261,23 @@ def delete_files(files):
sys.exit(0)
def main(host=None):
while True:
recs, zerorecs, orphvids, orphimgs, dbbackup, unfiltered = populate(host)
if not sys.stdin.isatty():
populate().print_results()
sys.exit(0)
if len(recs):
printrecs("Recordings with missing files", recs)
if len(zerorecs):
printrecs("Zero byte recordings", zerorecs)
if len(orphvids):
printfiles("Orphaned video files", orphvids)
if len(orphimgs):
printfiles("Orphaned snapshots", orphimgs)
if len(dbbackup):
printfiles("Database backups", dbbackup)
if len(unfiltered):
printfiles("Other files", unfiltered)
while True:
results = populate(host)
results.print_results()
opts = []
if len(recs):
opts.append(['Delete orphaned recording entries', delete_recs, recs])
if len(zerorecs):
opts.append(['Delete zero byte recordings', delete_recs, zerorecs])
if len(orphvids):
opts.append(['Delete orphaned video files', delete_files, orphvids])
if len(orphimgs):
opts.append(['Delete orphaned snapshots', delete_files, orphimgs])
if len(unfiltered):
opts.append(['Delete other files', delete_files, unfiltered])
opts.append(['Refresh list', None, None])
opts = [opt for opt in (
('Delete orphaned recording entries', delete_recs, results.orphrecs),
('Delete zero byte recordings', delete_recs, results.zerorecs),
('Forgotten pending deletion recordings', delete_recs, results.pendrecs),
('Delete orphaned video files', delete_files, results.orphvids),
('Delete orphaned snapshots', delete_files, results.orphimgs),
('Delete other files', delete_files, results.unfiltered),
('Refresh list', None, None))
if (opt[2] is None) or len(opt[2])]
print 'Please select from the following'
for i, opt in enumerate(opts):
print u' {0}. {1}'.format(i+1, opt[0])
@ -241,10 +306,6 @@ def main(host=None):
except EOFError:
sys.exit(0)
DB = MythDB()
BE = MythBE(db=DB)
DB.searchRecorded.handler = MyRecorded
if __name__ == '__main__':
if len(sys.argv) == 2:
main(sys.argv[1])

View File

@ -11,14 +11,17 @@
from MythTV import Video
QUIET = True
def print_aligned(left, right):
indent = 100 - len(left)
print left, indent*' ', right
for vid in Video.getAllEntries():
if vid.hash != 'NULL':
if vid.hash in ('NULL', '', 'UNKNOWN_COMMAND'):
vid.hash = vid.getHash()
vid.update()
print_aligned(vid.filename, vid.hash)
elif not QUIET:
print_aligned(vid.filename, 'skipped')
continue
vid.hash = vid.getHash()
vid.update()
print_aligned(vid.filename, vid.hash)

View File

@ -0,0 +1,136 @@
#!/usr/bin/env python
from datetime import timedelta, datetime
from optparse import OptionParser
from MythTV import MythBE, Program
class MyProgram( Program ):
@property
def is_scheduled(self):
return self.recstatus in (self.rsWillRecord,
self.rsRecording,
self.rsRecorded)
@property
def is_duplicate(self):
return self.recstatus in (self.rsRepeat,
self.rsPreviousRecording,
self.rsCurrentRecording)
@property
def is_deactivated(self):
return self.recstatus in (self.rsInactive,
self.rsTooManyRecordings,
self.rsCancelled,
self.rsAborted,
self.rsNotListed,
self.rsDontRecord,
self.rsLowDiskSpace,
self.rsTunerBusy,
self.rsNeverRecord,
self.rsEarlierShowing,
self.rsLaterShowing)
@property
def is_conflict(self):
return self.recstatus == self.rsConflict
def main(opts):
MythBE.getPendingRecordings.handler = MyProgram
be = MythBE()
now = datetime.now()
if not opts.plaintext:
print '<h3>Upcoming Recordings:</h3>'
print '<div class="schedule">'
count = 0
for rec in be.getPendingRecordings():
if not ((opts.filter&2**0 and rec.is_scheduled) or
(opts.filter&2**1 and rec.is_duplicate) or
(opts.filter&2**2 and rec.is_deactivated) or
(opts.filter&2**3 and rec.is_conflict)):
continue
if opts.time and (opts.time < rec.recstartts):
continue
if now > rec.recendts:
continue
if opts.count and (opts.count <= count):
break
count += 1
if opts.plaintext:
print '{0} - {1}'.format(rec.starttime.strftime('%m/%d, %I:%M %p'),
rec.callsign)
if rec.subtitle:
print '{0.title} - {0.subtitle}'.format(rec)
else:
print '{0.title}'.format(rec)
print rec.description
print ''
else:
print '<a href="#">{0} - {1} - {2}'.format(rec.starttime.strftime('%m/%d, %I:%M %p'),
rec.callsign,
rec.title),
if rec.subtitle:
print rec.subtitle,
print '<br /><span><strong>{0.title}</strong>'.format(rec),
print rec.starttime.strftime('%m/%d, %I:%M %p'),
print '<br /><em>{0.description}<br/></span></a><hr />'
if not opts.plaintext:
print '</div>'
if __name__ == '__main__':
parser = OptionParser(usage="usage: %prog [options]")
parser.add_option('-n', "--count", action="store", type="int",
default=0, dest="count",
help="Outputs information on the next <count> shows to be recorded.")
parser.add_option("--hours", action="store", type="int",
default=-1, dest="hours",
help="Outputs information for recordings starting within the next "+\
"specified hours.")
parser.add_option("--minutes", action="store", type="int",
default=-1, dest="minutes",
help="Outputs information for recordings starting within the next "+\
"specified minutes.")
parser.add_option("--show-scheduled", action="store_true", default=False,
dest="scheduled",
help="Outputs information about recordings MythTV plans to actually "+\
"record.")
parser.add_option("--show-duplicates", action="store_true", default=False,
dest="duplicate",
help="Outputs information about recordings MythTV will not record "+\
"because of the specified duplicate matching policy for that rule")
parser.add_option("--show-deactivated", action="store_true", default=False,
dest="deactivated",
help="Outputs information on shows that are deactivated and will not "+\
"be recorded by MythTV. This may be due to the schedule being "+\
"inactive, being set to never record, because the show will be "+\
"recorded at an earlier or later date, because there are too many "+\
"recordings on that rule, because there is insufficient disk space, "+\
"or because the show is not in the time slot listed by the rule.")
parser.add_option("--show-conflicts", action="store_true", default=False,
dest="conflicts",
help="Outputs information on shows that will not be recorded due to "+\
"higher priority conflicts.")
parser.add_option("--plain-text", action="store_true", default=False,
dest="plaintext", help="Output data in plain text format.")
opts, args = parser.parse_args()
if opts.scheduled or opts.duplicate or opts.deactivated or opts.conflicts:
opts.filter = opts.scheduled*2**0 | \
opts.duplicate*2**1 | \
opts.deactivated*2**2 | \
opts.conflicts*2**3
else:
opts.filter = 2**0 or 2**3
if (opts.hours >= 0) or (opts.minutes >= 0):
opts.time = datetime.now() + timedelta(
hours=opts.hours if (opts.hours >= 0) else 0,
minutes=opts.minutes if (opts.minutes >= 0) else 0)
else:
opts.time = None
main(opts)

View File

@ -406,7 +406,7 @@ def main():
export = VIDEO(opts,int(args[0]))
except Exception, e:
Job(int(args[0])).update({'status':Job.ERRORED,
'comment':'ERROR: '+e.args[0]})
'comment':'ERROR: '+str(e.args[0])})
MythLog(module='mythvidexport.py').logTB(MythLog.GENERAL)
sys.exit(1)
else: