bug 432332: improve handling of duplicate chunks in url classifier client request, patch by Dave Camp <dcamp@mozilla.com>, r=tony, a=beltzner

git-svn-id: svn://10.0.0.236/trunk@251295 18797224-902f-48f8-a5cc-f745e15eee43
This commit is contained in:
gavin%gavinsharp.com 2008-05-07 01:51:13 +00:00
parent 980acad646
commit 702471cda7
4 changed files with 72 additions and 11 deletions

View File

@ -128,7 +128,7 @@ static const PRLogModuleInfo *gUrlClassifierDbServiceLog = nsnull;
// want to change schema, or to recover from updating bugs. When an
// implementation version change is detected, the database is scrapped
// and we start over.
#define IMPLEMENTATION_VERSION 4
#define IMPLEMENTATION_VERSION 5
#define MAX_HOST_COMPONENTS 5
#define MAX_PATH_COMPONENTS 4
@ -1078,6 +1078,10 @@ private:
// Flush the cached add/subtract lists to the database.
nsresult FlushChunkLists();
// Inserts a chunk id into the list, sorted. Returns TRUE if the
// number was successfully added, FALSE if the chunk already exists.
PRBool InsertChunkId(nsTArray<PRUint32>& chunks, PRUint32 chunkNum);
// Add a list of entries to the database, merging with
// existing entries as necessary
nsresult AddChunk(PRUint32 tableId, PRUint32 chunkNum,
@ -2330,6 +2334,25 @@ nsUrlClassifierDBServiceWorker::ClearCachedChunkLists()
mHaveCachedSubChunks = PR_FALSE;
}
PRBool
nsUrlClassifierDBServiceWorker::InsertChunkId(nsTArray<PRUint32> &chunks,
PRUint32 chunkNum)
{
PRUint32 low = 0, high = chunks.Length();
while (high > low) {
PRUint32 mid = (high + low) >> 1;
if (chunks[mid] == chunkNum)
return PR_FALSE;
if (chunks[mid] < chunkNum)
low = mid + 1;
else
high = mid;
}
PRUint32 *item = chunks.InsertElementAt(low, chunkNum);
return (item != nsnull);
}
nsresult
nsUrlClassifierDBServiceWorker::AddChunk(PRUint32 tableId,
PRUint32 chunkNum,
@ -2342,11 +2365,15 @@ nsUrlClassifierDBServiceWorker::AddChunk(PRUint32 tableId,
}
#endif
LOG(("Adding %d entries to chunk %d in table %d", entries.Length(), chunkNum, tableId));
nsresult rv = CacheChunkLists(tableId, PR_TRUE, PR_FALSE);
NS_ENSURE_SUCCESS(rv, rv);
mCachedAddChunks.AppendElement(chunkNum);
if (!InsertChunkId(mCachedAddChunks, chunkNum)) {
LOG(("Ignoring duplicate add chunk %d in table %d", chunkNum, tableId));
return NS_OK;
}
LOG(("Adding %d entries to chunk %d in table %d", entries.Length(), chunkNum, tableId));
nsTArray<PRUint32> entryIDs;
@ -2433,7 +2460,13 @@ nsUrlClassifierDBServiceWorker::SubChunk(PRUint32 tableId,
nsTArray<nsUrlClassifierEntry>& entries)
{
nsresult rv = CacheChunkLists(tableId, PR_FALSE, PR_TRUE);
mCachedSubChunks.AppendElement(chunkNum);
if (!InsertChunkId(mCachedSubChunks, chunkNum)) {
LOG(("Ignoring duplicate sub chunk %d in table %d", chunkNum, tableId));
return NS_OK;
}
LOG(("Subbing %d entries in chunk %d in table %d", entries.Length(), chunkNum, tableId));
nsAutoTArray<nsUrlClassifierEntry, 5> existingEntries;
nsUrlClassifierDomainHash lastKey;

View File

@ -20,6 +20,7 @@ if (!profileDir) {
// It will simply return the current directory.
var provider = {
getFile: function(prop, persistent) {
dump("getting file " + prop + "\n");
persistent.value = true;
if (prop == NS_APP_USER_PROFILE_50_DIR ||
prop == NS_APP_USER_PROFILE_LOCAL_50_DIR) {

View File

@ -335,6 +335,28 @@ function testExpireLists() {
doTest([addUpdate, subUpdate, expireUpdate], assertions);
}
// Test a duplicate add chunk.
function testDuplicateAddChunks() {
var addUrls1 = [ "foo.com/a" ];
var addUrls2 = [ "bar.com/b" ];
var update = buildPhishingUpdate(
[
{ "chunkNum" : 1,
"urls" : addUrls1
},
{ "chunkNum" : 1,
"urls" : addUrls2
}]);
var assertions = {
"tableData" : "test-phish-simple;a:1",
"urlsExist" : addUrls1,
"urlsDontExist" : addUrls2
};
doTest([update], assertions);
}
function run_test()
{
runTests([
@ -350,6 +372,7 @@ function run_test()
testSubPartiallyMatches2,
testSubsDifferentChunks,
testExpireLists,
testDuplicateAddChunks
]);
}

View File

@ -100,12 +100,14 @@ function testInvalidUrlForward() {
"urls" : add1Urls }]);
update += "u:asdf://blah/blah\n"; // invalid URL scheme
// The first part of the update should have succeeded.
var assertions = {
"tableData" : "",
"urlsDontExist" : add1Urls
"tableData" : "test-phish-simple;a:1",
"urlsExist" : add1Urls
};
doTest([update], assertions, true);
doTest([update], assertions, false);
}
// A failed network request causes the update to fail.
@ -117,12 +119,14 @@ function testErrorUrlForward() {
"urls" : add1Urls }]);
update += "u:http://test.invalid/asdf/asdf\n"; // invalid URL scheme
// The first part of the update should have succeeded
var assertions = {
"tableData" : "",
"urlsDontExist" : add1Urls
"tableData" : "test-phish-simple;a:1",
"urlsExist" : add1Urls
};
doTest([update], assertions, true);
doTest([update], assertions, false);
}
function testMultipleTables() {