The --changes-block-size handling was intended to help when
a user has a limited maxscanrows (see p4 group). It used
p4 changes -m $maxchanges to limit the number of results.
Unfortunately, it turns out that the maxscanrows and maxresults
limits are actually applied *before* the -m maxchanges parameter
is considered (experimentally).
Fix the block-size handling so that it gets blocks of changes
limited by revision number ($Start..$Start+$N, etc). This limits
the number of results early enough that both sets of tests pass.
If the --changes-block-size option is not in use, then the code
naturally falls back to the original scheme and gets as many changes
as possible.
Unfortunately, it also turns out that p4 print can fail on
files with more changes than maxscanrows. This fix is unable to
workaround this problem, although in the real world this shouldn't
normally happen.
Signed-off-by: Luke Diamand l...@diamand.org
---
git-p4.py | 48 +++-
t/t9818-git-p4-block.sh | 12 ++--
2 files changed, 41 insertions(+), 19 deletions(-)
diff --git a/git-p4.py b/git-p4.py
index 26ad4bc..0e29b75 100755
--- a/git-p4.py
+++ b/git-p4.py
@@ -744,41 +744,63 @@ def originP4BranchesExist():
def p4ChangesForPaths(depotPaths, changeRange, block_size):
assert depotPaths
-assert block_size
# Parse the change range into start and end
if changeRange is None or changeRange == '':
-changeStart = '@1'
-changeEnd = '#head'
+changeStart = 1
+changeEnd = None
else:
parts = changeRange.split(',')
assert len(parts) == 2
-changeStart = parts[0]
-changeEnd = parts[1]
+changeStart = int(parts[0][1:])
+if parts[1] == '#head':
+changeEnd = None
+else:
+changeEnd = int(parts[1])
# Accumulate change numbers in a dictionary to avoid duplicates
changes = {}
+# We need the most recent change list number if we're operating in
+# batch mode. For whatever reason, clients with limited MaxResults
+# can get this for the entire depot, but not for individual bits of
+# the depot.
+if block_size:
+results = p4CmdList([changes, -m, 1])
+mostRecentCommit = int(results[0]['change'])
+
for p in depotPaths:
# Retrieve changes a block at a time, to prevent running
# into a MaxScanRows error from the server.
start = changeStart
-end = changeEnd
get_another_block = True
while get_another_block:
new_changes = []
cmd = ['changes']
-cmd += ['-m', str(block_size)]
-cmd += [%s...%s,%s % (p, start, end)]
+
+if block_size:
+end = changeStart + block_size# only fetch a few at a time
+else:
+end = changeEnd # fetch as many as possible
+
+if end:
+endStr = str(end)
+else:
+endStr = '#head'
+
+cmd += [%s...@%d,%s % (p, changeStart, endStr)]
for line in p4_read_pipe_lines(cmd):
changeNum = int(line.split( )[1])
new_changes.append(changeNum)
changes[changeNum] = True
-if len(new_changes) == block_size:
-get_another_block = True
-end = '@' + str(min(new_changes))
-else:
+
+if not block_size:
+# Not batched, so nothing more to do
get_another_block = False
+elif end = mostRecentCommit:
+get_another_block = False
+else:
+changeStart = end + 1
changelist = changes.keys()
changelist.sort()
@@ -1974,7 +1996,7 @@ class P4Sync(Command, P4UserMap):
self.syncWithOrigin = True
self.importIntoRemotes = True
self.maxChanges =
-self.changes_block_size = 500
+self.changes_block_size = None
self.keepRepoPath = False
self.depotPaths = None
self.p4BranchesInGit = []
diff --git a/t/t9818-git-p4-block.sh b/t/t9818-git-p4-block.sh
index aae1121..3b3ae1f 100755
--- a/t/t9818-git-p4-block.sh
+++ b/t/t9818-git-p4-block.sh
@@ -49,11 +49,11 @@ test_expect_success 'Default user cannot fetch changes' '
! p4 changes -m 1 //depot/...
'
-test_expect_failure 'Clone the repo' '
+test_expect_success 'Clone the repo' '
git p4 clone --dest=$git --changes-block-size=7 --verbose
//depot/included@all
'
-test_expect_failure 'All files are present' '
+test_expect_success 'All files are present' '
echo file.txt expected
test_write_lines outer0.txt outer1.txt outer2.txt outer3.txt outer4.txt
expected
test_write_lines outer5.txt expected
@@ -61,18 +61,18 @@ test_expect_failure 'All files are present' '
test_cmp expected current
'
-test_expect_failure 'file.txt is