Working in Python 2.7.4 and Python 3.4.0, HOWEVER, Console isn't exiting correctly.
This commit is contained in:
parent
55a5e41b00
commit
0dcd14a73b
|
@ -77,6 +77,13 @@ def match_in_ignore_list( path, ignore_list ):
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
use_bytearray_str_conversion = type( b"str" ) is not str
|
||||||
|
def get_str_from_process_stdout( line ):
|
||||||
|
if use_bytearray_str_conversion:
|
||||||
|
return ''.join( map( chr, line ) )
|
||||||
|
else:
|
||||||
|
return line
|
||||||
|
|
||||||
# Keep these in mind if you have issues:
|
# Keep these in mind if you have issues:
|
||||||
# https://stackoverflow.com/questions/16557908/getting-output-of-a-process-at-runtime
|
# https://stackoverflow.com/questions/16557908/getting-output-of-a-process-at-runtime
|
||||||
# https://stackoverflow.com/questions/4417546/constantly-print-subprocess-output-while-process-is-running
|
# https://stackoverflow.com/questions/4417546/constantly-print-subprocess-output-while-process-is-running
|
||||||
|
@ -89,6 +96,8 @@ def get_client_set( path ):
|
||||||
|
|
||||||
proc = subprocess.Popen( command.split( ), stdout=subprocess.PIPE, stderr=None, cwd=path )
|
proc = subprocess.Popen( command.split( ), stdout=subprocess.PIPE, stderr=None, cwd=path )
|
||||||
for line in proc.stdout:
|
for line in proc.stdout:
|
||||||
|
line = get_str_from_process_stdout( line )
|
||||||
|
|
||||||
clientFile_tag = "... clientFile "
|
clientFile_tag = "... clientFile "
|
||||||
if not line.startswith( clientFile_tag ):
|
if not line.startswith( clientFile_tag ):
|
||||||
continue
|
continue
|
||||||
|
@ -112,6 +121,7 @@ class PDict( dict ):
|
||||||
dict.__init__( self, args )
|
dict.__init__( self, args )
|
||||||
self.mutex = multiprocessing.Semaphore( )
|
self.mutex = multiprocessing.Semaphore( )
|
||||||
|
|
||||||
|
# TODO: Create a child thread for triggering autoflush events
|
||||||
class Console( threading.Thread ):
|
class Console( threading.Thread ):
|
||||||
MSG = enum('WRITE', 'FLUSH', 'SHUTDOWN', 'CLEAR' )
|
MSG = enum('WRITE', 'FLUSH', 'SHUTDOWN', 'CLEAR' )
|
||||||
|
|
||||||
|
@ -124,6 +134,7 @@ class Console( threading.Thread ):
|
||||||
self.queue = multiprocessing.JoinableQueue( )
|
self.queue = multiprocessing.JoinableQueue( )
|
||||||
self.auto_flush_num = auto_flush_num if auto_flush_num is not None else -1
|
self.auto_flush_num = auto_flush_num if auto_flush_num is not None else -1
|
||||||
self.auto_flush_time = auto_flush_time * 1000 if auto_flush_time is not None else -1
|
self.auto_flush_time = auto_flush_time * 1000 if auto_flush_time is not None else -1
|
||||||
|
self.shutting_down = False
|
||||||
|
|
||||||
def write( self, data, pid = None ):
|
def write( self, data, pid = None ):
|
||||||
self.queue.put( ( Console.MSG.WRITE, pid if pid is not None else os.getpid(), data ) )
|
self.queue.put( ( Console.MSG.WRITE, pid if pid is not None else os.getpid(), data ) )
|
||||||
|
@ -158,7 +169,11 @@ class Console( threading.Thread ):
|
||||||
for line in buffer:
|
for line in buffer:
|
||||||
print( line )
|
print( line )
|
||||||
self.buffers.clear( )
|
self.buffers.clear( )
|
||||||
|
self.buffer_write_times.clear( )
|
||||||
self.queue.task_done( )
|
self.queue.task_done( )
|
||||||
|
|
||||||
|
print(self.queue.qsize())
|
||||||
|
print(self.queue.empty())
|
||||||
break
|
break
|
||||||
|
|
||||||
elif event == Console.MSG.WRITE:
|
elif event == Console.MSG.WRITE:
|
||||||
|
@ -174,6 +189,7 @@ class Console( threading.Thread ):
|
||||||
self.flush( pid )
|
self.flush( pid )
|
||||||
elif self.auto_flush_time >= 0 and ( datetime.datetime.now( ) - self.buffer_write_times[ pid ] ).microseconds >= self.auto_flush_time:
|
elif self.auto_flush_time >= 0 and ( datetime.datetime.now( ) - self.buffer_write_times[ pid ] ).microseconds >= self.auto_flush_time:
|
||||||
self.flush( pid )
|
self.flush( pid )
|
||||||
|
# TODO: if buffer is not empty and we don't auto flush on write, sleep until a time then auto flush according to auto_flush_time
|
||||||
elif event == Console.MSG.FLUSH:
|
elif event == Console.MSG.FLUSH:
|
||||||
pid = data[ 1 ]
|
pid = data[ 1 ]
|
||||||
if pid in self.buffers:
|
if pid in self.buffers:
|
||||||
|
@ -188,128 +204,6 @@ class Console( threading.Thread ):
|
||||||
|
|
||||||
self.queue.task_done( )
|
self.queue.task_done( )
|
||||||
|
|
||||||
class Worker( threading.Thread ):
|
|
||||||
def __init__( self, console, queue, files_to_ignore ):
|
|
||||||
threading.Thread.__init__( self )
|
|
||||||
|
|
||||||
self.console = console
|
|
||||||
self.queue = queue
|
|
||||||
self.files_to_ignore = files_to_ignore
|
|
||||||
|
|
||||||
def run( self ):
|
|
||||||
while True:
|
|
||||||
( cmd, data ) = self.queue.get( )
|
|
||||||
|
|
||||||
if cmd == MSG.SHUTDOWN:
|
|
||||||
self.queue.task_done( )
|
|
||||||
self.console.flush( )
|
|
||||||
break
|
|
||||||
|
|
||||||
if cmd != MSG.PARSE_DIRECTORY or data is None:
|
|
||||||
self.console.flush( )
|
|
||||||
self.queue.task_done( )
|
|
||||||
continue
|
|
||||||
|
|
||||||
directory = data
|
|
||||||
|
|
||||||
self.console.write( "Working on " + directory )
|
|
||||||
|
|
||||||
try:
|
|
||||||
dir_contents = os.listdir( directory )
|
|
||||||
except OSError as ex:
|
|
||||||
self.console.write( "| " + type( ex ).__name__ )
|
|
||||||
# args can be anything, can't guarantee they'll convert to a string
|
|
||||||
#self.console.write( "| " + ' '.join( [ str( arg ) for arg in ex.args ] ) )
|
|
||||||
self.console.write( "| " + repr( ex ) )
|
|
||||||
self.console.write( "|ERROR." )
|
|
||||||
self.console.flush( )
|
|
||||||
self.queue.task_done( )
|
|
||||||
continue
|
|
||||||
|
|
||||||
if p4_ignore in dir_contents:
|
|
||||||
file_regexes = []
|
|
||||||
# Should automatically ignore .p4ignore even if it's not specified, otherwise it'll be deleted.
|
|
||||||
path = join( directory, p4_ignore )
|
|
||||||
with open( path ) as f:
|
|
||||||
for line in f:
|
|
||||||
new_line = remove_comment( line.strip( ) )
|
|
||||||
if len( new_line ) > 0:
|
|
||||||
# doesn't look quite right, fix it:
|
|
||||||
file_regexes.append( re.compile( join( re.escape( directory + os.sep ), new_line ) ) )
|
|
||||||
|
|
||||||
self.console.write( "| Appending ignores from " + path )
|
|
||||||
with self.files_to_ignore.mutex:
|
|
||||||
if directory not in self.files_to_ignore:
|
|
||||||
self.files_to_ignore[ directory ] = []
|
|
||||||
self.files_to_ignore[ directory ].extend( file_regexes )
|
|
||||||
|
|
||||||
|
|
||||||
ignore_list = get_ignore_list( directory, self.files_to_ignore )
|
|
||||||
|
|
||||||
|
|
||||||
files = []
|
|
||||||
command = "p4 fstat *"
|
|
||||||
|
|
||||||
try:
|
|
||||||
proc = subprocess.Popen( command.split( ), stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=directory )
|
|
||||||
(out, err) = proc.communicate()
|
|
||||||
except Exception as ex:
|
|
||||||
self.console.write( "| " + type( ex ).__name__ )
|
|
||||||
# args can be anything, can't guarantee they'll convert to a string
|
|
||||||
#self.console.write( "| " + ' '.join( [ str( arg ) for arg in ex.args ] ) )
|
|
||||||
self.console.write( "| " + repr( ex ) )
|
|
||||||
self.console.write( "|ERROR." )
|
|
||||||
self.console.flush( )
|
|
||||||
self.queue.task_done( )
|
|
||||||
continue
|
|
||||||
|
|
||||||
for line in err.decode('utf-8').split( os.linesep ):
|
|
||||||
if len( line ) == 0:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# # dirty hack that grabs the filename from the ends of the printed out (not err) "depo_path - local_path"
|
|
||||||
# # I could use regex to verify the expected string, but that will just slow us down.
|
|
||||||
# base = basename( line )
|
|
||||||
i = line.rfind( ' - ')
|
|
||||||
if i >= 0:
|
|
||||||
base = line[ : i ]
|
|
||||||
if base == "*" or len(base) == 0:
|
|
||||||
# Directory is empty, we could delete it now
|
|
||||||
continue
|
|
||||||
path = join( directory, base )
|
|
||||||
|
|
||||||
if not os.path.isdir( path ):
|
|
||||||
files.append( base )
|
|
||||||
|
|
||||||
for content in dir_contents:
|
|
||||||
path = join( directory, content )
|
|
||||||
if os.path.isdir( path ):
|
|
||||||
if match_in_ignore_list( path, ignore_list ):
|
|
||||||
self.console.write( "| Ignoring " + content )
|
|
||||||
else:
|
|
||||||
self.queue.put( ( MSG.PARSE_DIRECTORY, path ) )
|
|
||||||
|
|
||||||
for file in files:
|
|
||||||
path = join( directory, file )
|
|
||||||
|
|
||||||
if match_in_ignore_list( path, ignore_list ):
|
|
||||||
self.console.write( "| Ignoring " + path )
|
|
||||||
continue
|
|
||||||
|
|
||||||
self.console.write( "| " + file + " is unversioned, removing it." )
|
|
||||||
try:
|
|
||||||
os.chmod( path, stat.S_IWRITE )
|
|
||||||
os.remove( path )
|
|
||||||
except OSError as ex:
|
|
||||||
self.console.write( "| " + type( ex ).__name__ )
|
|
||||||
self.console.write( "| " + repr( ex ) )
|
|
||||||
self.console.write( "|ERROR." )
|
|
||||||
|
|
||||||
self.console.write( "|Done." )
|
|
||||||
self.console.flush( )
|
|
||||||
|
|
||||||
self.queue.task_done( )
|
|
||||||
|
|
||||||
def main( args ):
|
def main( args ):
|
||||||
# check requirements
|
# check requirements
|
||||||
if os.system( 'p4 > Nul' ) != 0:
|
if os.system( 'p4 > Nul' ) != 0:
|
||||||
|
@ -334,7 +228,7 @@ def main( args ):
|
||||||
|
|
||||||
c.writeflush( "Checking " + directory)
|
c.writeflush( "Checking " + directory)
|
||||||
for root, dirs, files in os.walk( directory ):
|
for root, dirs, files in os.walk( directory ):
|
||||||
ignore_list = PDict()#get_ignore_list( root, files_to_ignore )
|
ignore_list = {}#get_ignore_list( root, files_to_ignore )
|
||||||
|
|
||||||
c.write( "|Checking " + root )
|
c.write( "|Checking " + root )
|
||||||
|
|
||||||
|
@ -359,6 +253,7 @@ def main( args ):
|
||||||
# c.writeflush( "| " + type( ex ).__name__ )
|
# c.writeflush( "| " + type( ex ).__name__ )
|
||||||
# c.writeflush( "| " + repr( ex ) )
|
# c.writeflush( "| " + repr( ex ) )
|
||||||
# c.writeflush( "|ERROR." )
|
# c.writeflush( "|ERROR." )
|
||||||
|
|
||||||
c.write( "|Done." )
|
c.write( "|Done." )
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|
Loading…
Reference in New Issue