I added dry-run to p4SyncMissing some time ago; I don't remember if I finished it, but I don't have time to test, and don't want to lose it, so submitting it. Wow is this a rough testing branch or what.
Add TODOs. Fix some directory bugs when running scripts with a cwd not in the p4 workspace. TODO: make sure all scripts and options work when run outside p4 workspace. If user isn't logged in you can get weird errors later on in the pipeline, and without extra manually added prints, you wouldn't know you just need to log in. Added TODO: about detecting if we need to do a p4 login. Some of my stuff seems to have stopped working with later version of Python/p4, had to update string to byte string; no doubt more of these issues hiding. Haven't tested on python 2 in a while, do not consider these working there.
This commit is contained in:
parent
3aa1373758
commit
85de0ec1ca
4 changed files with 172 additions and 50 deletions
69
p4GetCLDetails.py
Normal file
69
p4GetCLDetails.py
Normal file
|
|
@ -0,0 +1,69 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf8 -*-
|
||||
# author: Brian Ernst
|
||||
|
||||
"""
|
||||
This is an experimental script for fetching changelist details based on changelists
|
||||
containing a specific search string of your choice.
|
||||
A future version of this script would generate unified diffs of the desired output.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import pathlib
|
||||
import subprocess
|
||||
|
||||
# TODO: Use p4helper instead of adding this.
|
||||
# TODO: Consider updating helpers to use json output instead of parsing the standard p4 output
|
||||
def get_proc_output( args, path = None ):
|
||||
proc = subprocess.Popen( args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=path, encoding='utf-8' )
|
||||
proc.wait()
|
||||
data, err = proc.communicate()
|
||||
return data
|
||||
|
||||
def output_cls_with_string(directory = None):
|
||||
"""
|
||||
Args:
|
||||
directory (str): This is the directory to limit scanning to, can be None.
|
||||
MUST use Perforce syntax; if you want to scan directory must end with `/...` to search path
|
||||
"""
|
||||
|
||||
cwd = os.getcwd()
|
||||
|
||||
command = f"p4 -ztag -Mj changes -L {directory if directory else ''}"
|
||||
|
||||
wrote = False
|
||||
count = 0
|
||||
# TODO: update to arg output
|
||||
with open(pathlib.Path(cwd) / 'TODO_UPDATE_SCRIPT.log', 'w') as log_file:
|
||||
|
||||
proc = subprocess.Popen( command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=cwd )
|
||||
for line in proc.stdout:
|
||||
j = json.loads(line)
|
||||
|
||||
# TODO: update to arg search term
|
||||
if 'TODO_SEARCH_TERMS' not in j['desc'].lower():
|
||||
continue
|
||||
|
||||
print(j)
|
||||
print()
|
||||
|
||||
command2 = f"p4 describe {j['change']}"
|
||||
output = get_proc_output(command2, path=cwd)
|
||||
if wrote:
|
||||
log_file.write('\n\n\n\n')
|
||||
wrote = True
|
||||
log_file.write(output)
|
||||
log_file.flush()
|
||||
|
||||
count += 1
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser()
|
||||
|
||||
parser.add_argument('-d', '--directory')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
output_cls_with_string(directory = args.directory)
|
||||
24
p4Helper.py
24
p4Helper.py
|
|
@ -146,6 +146,9 @@ def fail_if_no_p4():
|
|||
print( 'Perforce Command-line Client(p4) is required for this script.' )
|
||||
sys.exit( 1 )
|
||||
|
||||
# TODO: Do an operation that would trigger login error like P4PASSWD missing.
|
||||
# See if we need to trigger a p4 login or can avoid it.
|
||||
|
||||
# Keep these in mind if you have issues:
|
||||
# https://stackoverflow.com/questions/16557908/getting-output-of-a-process-at-runtime
|
||||
# https://stackoverflow.com/questions/4417546/constantly-print-subprocess-output-while-process-is-running
|
||||
|
|
@ -174,13 +177,13 @@ def get_client_set( path ):
|
|||
proc.wait( )
|
||||
|
||||
for line in proc.stderr:
|
||||
if "no such file" in line:
|
||||
if b"no such file" in line:
|
||||
continue
|
||||
raise Exception(line)
|
||||
|
||||
return files
|
||||
|
||||
def get_client_root( ):
|
||||
def get_p4_info(info_tag):
|
||||
"""
|
||||
|
||||
:rtype : string
|
||||
|
|
@ -191,16 +194,23 @@ def get_client_root( ):
|
|||
for line in proc.stdout:
|
||||
line = get_str_from_process_stdout( line )
|
||||
|
||||
clientFile_tag = "Client root: "
|
||||
if not line.startswith( clientFile_tag ):
|
||||
if not line.startswith( info_tag ):
|
||||
continue
|
||||
|
||||
local_path = normpath( line[ len( clientFile_tag ) : ].strip( ) )
|
||||
local_path = normpath( line[ len( info_tag ) : ].strip( ) )
|
||||
if local_path == "null":
|
||||
local_path = None
|
||||
return local_path
|
||||
return None
|
||||
|
||||
def get_client_stream( ):
|
||||
result = get_p4_info("Client stream: ")
|
||||
return result
|
||||
|
||||
def get_client_root( ):
|
||||
result = get_p4_info("Client root: ")
|
||||
return result
|
||||
|
||||
class P4Workspace:
|
||||
"""
|
||||
Use this class when working in a workspace.
|
||||
|
|
@ -218,7 +228,7 @@ class P4Workspace:
|
|||
def __enter__( self ):
|
||||
# get user
|
||||
#print("\nChecking p4 info...")
|
||||
result = get_p4_py_results('info')
|
||||
result = get_p4_py_results('info', self.directory)
|
||||
if len(result) == 0 or b'userName' not in result[0].keys():
|
||||
print("Can't find perforce info, is it even setup? Possibly can't connect to server.")
|
||||
sys.exit(1)
|
||||
|
|
@ -248,7 +258,7 @@ class P4Workspace:
|
|||
oldworkspace_name = parse_info_from_command('p4 info', 'Client name: ')
|
||||
|
||||
# get user workspaces
|
||||
results = get_p4_py_results('workspaces -u ' + username)
|
||||
results = get_p4_py_results('workspaces -u ' + username, self.directory)
|
||||
workspaces = []
|
||||
for r in results:
|
||||
whost = get_str_from_process_stdout(r[b'Host'])
|
||||
|
|
|
|||
|
|
@ -23,6 +23,7 @@ def main( args ):
|
|||
#http://docs.python.org/library/optparse.html
|
||||
parser = optparse.OptionParser( )
|
||||
|
||||
# TODO: Add dry-run option.
|
||||
parser.add_option( "-d", "--dir", dest="directory", help="Desired directory to crawl.", default=None )
|
||||
parser.add_option( "-t", "--threads", dest="thread_count", help="Number of threads to crawl your drive and poll p4.", default=100 )
|
||||
parser.add_option( "-q", "--quiet", action="store_true", dest="quiet", help="This overrides verbose", default=False )
|
||||
|
|
|
|||
|
|
@ -25,6 +25,7 @@ class P4SyncMissing:
|
|||
parser.add_option( "-d", "--dir", dest="directory", help="Desired directory to crawl.", default=None )
|
||||
parser.add_option( "-t", "--threads", dest="thread_count", help="Number of threads to crawl your drive and poll p4.", default=12 )
|
||||
parser.add_option( "-q", "--quiet", action="store_true", dest="quiet", help="This overrides verbose", default=False )
|
||||
parser.add_option( "--dry-run", action="store_true", dest="dry_run", help="Whether the script should not make any changes but note what files it would try to sync.", default=False )
|
||||
parser.add_option( "-v", "--verbose", action="store_true", dest="verbose", default=False )
|
||||
|
||||
( options, args ) = parser.parse_args( args )
|
||||
|
|
@ -34,7 +35,7 @@ class P4SyncMissing:
|
|||
with Console( auto_flush_time=1 ) as c:
|
||||
with P4Workspace( directory ):
|
||||
if not options.quiet:
|
||||
c.writeflush( "Retreiving missing files..." )
|
||||
c.writeflush( "Setting up to retreive missing files..." )
|
||||
c.writeflush( " Setting up threads..." )
|
||||
|
||||
# Setup threading
|
||||
|
|
@ -83,6 +84,9 @@ class P4SyncMissing:
|
|||
threads = [ ]
|
||||
thread_count = options.thread_count if options.thread_count > 0 else multiprocessing.cpu_count( ) + options.thread_count
|
||||
|
||||
if not options.quiet:
|
||||
c.writeflush( f" Setting up {thread_count} threads..." )
|
||||
|
||||
count = 0
|
||||
total = 0
|
||||
self.queue = multiprocessing.JoinableQueue( )
|
||||
|
|
@ -101,7 +105,7 @@ class P4SyncMissing:
|
|||
command = "p4 fstat ..."
|
||||
|
||||
if not options.quiet:
|
||||
c.writeflush( " Checking files in depot, this may take some time for large depots..." )
|
||||
c.writeflush( f" Checking files in workspace (on stream `{get_client_stream()}`), this may take some time for large depots..." )
|
||||
|
||||
proc = subprocess.Popen( command.split( ), stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=directory )
|
||||
|
||||
|
|
@ -141,16 +145,28 @@ class P4SyncMissing:
|
|||
return
|
||||
if options.verbose:
|
||||
for f in bucket.queue:
|
||||
c.write( " Checking " + os.path.relpath( f, directory ) )
|
||||
c.write( " Queued " + os.path.relpath( f, directory ) )
|
||||
self.queue.put( ( WRK.SYNC, bucket.queue ) )
|
||||
bucket.queue = []
|
||||
bucket.queue_size = 0
|
||||
|
||||
# Wrap with a try block for catching an early termination so we can clear the queue so threads don't keep spinning.
|
||||
try:
|
||||
# From here on out we have to scrape a bunch of lines just for
|
||||
# one file. We have to build up a picture from the metadata about
|
||||
# the state of the file.
|
||||
#
|
||||
# This means we're iterating a few times until we have client_file
|
||||
# and file_action. Yes it's bad we wait to check client_file and
|
||||
# file_action as long as there's pending lines, we should check
|
||||
# that outside of the loop, not within.
|
||||
for line in proc.stdout:
|
||||
line = get_str_from_process_stdout( line )
|
||||
|
||||
#push work when finding out type
|
||||
# Push work when finding out type and that it needs to be synced.
|
||||
if client_file and file_action is not None and line.startswith( headType_tag ):
|
||||
if not options.dry_run:
|
||||
# Add file to queue to be synced.
|
||||
|
||||
file_type = normpath( line[ len( headType_tag ) : ].strip( ) )
|
||||
if file_type == file_type_text:
|
||||
|
|
@ -159,17 +175,31 @@ class P4SyncMissing:
|
|||
self.buckets[file_type_binary].append(client_file)
|
||||
count += 1
|
||||
|
||||
#check sizes and push
|
||||
# We wait to kick off a sync until we consider it to
|
||||
# be a minimum sufficient size, then once it is, we
|
||||
# kick it for syncing.
|
||||
for b in self.buckets.values():
|
||||
if b.is_full():
|
||||
push_queued(b)
|
||||
else:
|
||||
c.write( f" {os.path.relpath( client_file, directory )}")
|
||||
|
||||
elif client_file and line.startswith( headAction_tag ):
|
||||
# Even if we're ignoring the file, count it as being processed.
|
||||
total += 1
|
||||
|
||||
file_action = normpath( line[ len( headAction_tag ) : ].strip( ) )
|
||||
|
||||
if any(file_action == a for a in rejected_actions):
|
||||
# If the headAction indicates we shouldn't waste time
|
||||
# syncing, don't indicate a valid action for our
|
||||
# purposes. Yes we're hijacking the file_action, not
|
||||
# the best way to do this.
|
||||
file_action = None
|
||||
else:
|
||||
total += 1
|
||||
# If file exists, we don't need to sync it, don't
|
||||
# indicate valid action for our purposes. Yes we're
|
||||
# hijacking the file_action, not the best way to do this.
|
||||
if os.path.exists( client_file ):
|
||||
file_action = None
|
||||
|
||||
|
|
@ -196,9 +226,21 @@ class P4SyncMissing:
|
|||
c.writeflush( " Done. Checked " + str(total) + " file(s)." )
|
||||
c.writeflush( " Queued " + str(count) + " file(s), now waiting for threads..." )
|
||||
|
||||
#for i in range( thread_count ):
|
||||
#self.queue.put( ( WRK.SHUTDOWN, None ) )
|
||||
|
||||
#for t in threads:
|
||||
#t.join( )
|
||||
|
||||
self.queue.join()
|
||||
|
||||
except KeyboardInterrupt:
|
||||
# Don't leave threads to keep working.
|
||||
self.queue.empty()
|
||||
|
||||
# Wait for threads to finish logging everything.
|
||||
for i in range( thread_count ):
|
||||
self.queue.put( ( WRK.SHUTDOWN, None ) )
|
||||
|
||||
for t in threads:
|
||||
t.join( )
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue