Cap the numbers of cookies per domain and cookies total, respectively.

This is implemented in HashStore#cleanup(), which is automatically
called by #add() after every 150 updates.
This commit is contained in:
Akinori MUSHA 2013-03-15 04:09:08 +09:00
parent d004408296
commit 8d8f01fa81
3 changed files with 107 additions and 0 deletions

View file

@ -11,6 +11,10 @@ end
class HTTP::Cookie
# Maximum number of bytes per cookie (RFC 6265 6.1 requires 4096 at least)
MAX_LENGTH = 4096
# Maximum number of cookies per domain (RFC 6265 6.1 requires 50 at least)
MAX_COOKIES_PER_DOMAIN = 50
# Maximum number of cookies total (RFC 6265 6.1 requires 3000 at least)
MAX_COOKIES_TOTAL = 3000
UNIX_EPOCH = Time.at(0)

View file

@ -8,6 +8,8 @@ end
class HTTP::CookieJar
class HashStore < AbstractStore
GC_THRESHOLD = HTTP::Cookie::MAX_COOKIES_TOTAL / 20
def default_options
{}
end
@ -41,6 +43,7 @@ class HTTP::CookieJar
path_cookies.delete(cookie.name)
else
path_cookies[cookie.name] = cookie
cleanup if (@gc_index += 1) >= GC_THRESHOLD
end
self
@ -90,5 +93,49 @@ class HTTP::CookieJar
def empty?
@jar.empty?
end
def cleanup(session = false)
all_cookies = []
@jar.each { |domain, paths|
domain_cookies = []
paths.each { |path, hash|
hash.delete_if { |name, cookie|
if cookie.expired? || (session && cookie.session?)
true
else
domain_cookies << cookie
false
end
}
}
if (debt = domain_cookies.size - HTTP::Cookie::MAX_COOKIES_PER_DOMAIN) > 0
domain_cookies.sort_by!(&:created_at)
domain_cookies.slice!(0, debt).each { |cookie|
add(cookie.expire)
}
end
all_cookies.concat(domain_cookies)
}
if (debt = all_cookies.size - HTTP::Cookie::MAX_COOKIES_TOTAL) > 0
all_cookies.sort_by!(&:created_at)
all_cookies.slice!(0, debt).each { |cookie|
add(cookie.expire)
}
end
@jar.delete_if { |domain, paths|
paths.delete_if { |path, hash|
hash.empty?
}
paths.empty?
}
@gc_index = 0
end
end
end

View file

@ -510,4 +510,60 @@ class TestHTTPCookieJar < Test::Unit::TestCase
assert_equal('Foo1', @jar.cookies(nurl).map { |c| c.name }.sort.join(' ') )
assert_equal('Foo1 Foo2', @jar.cookies(surl).map { |c| c.name }.sort.join(' ') )
end
def test_max_cookies
jar = HTTP::CookieJar.new
limit_per_domain = HTTP::Cookie::MAX_COOKIES_PER_DOMAIN
uri = URI('http://www.example.org/')
date = Time.at(Time.now.to_i + 86400)
(1..(limit_per_domain + 1)).each { |i|
jar << HTTP::Cookie.new(cookie_values(
:name => 'Foo%d' % i,
:value => 'Bar%d' % i,
:domain => uri.host,
:for_domain => true,
:path => '/dir%d/' % (i / 2),
:origin => uri,
)).tap { |cookie|
cookie.created_at = i == 42 ? date - i : date
}
}
assert_equal limit_per_domain + 1, jar.to_a.size
jar.cleanup
count = jar.to_a.size
assert_equal limit_per_domain, count
assert_equal [*1..41] + [*43..(limit_per_domain + 1)], jar.map { |cookie|
cookie.name[/(?<=^Foo)(\d+)$/].to_i
}
hlimit = HTTP::Cookie::MAX_COOKIES_TOTAL
slimit = hlimit + HTTP::CookieJar::HashStore::GC_THRESHOLD
n = hlimit / limit_per_domain * 2
(1..n).each { |i|
(1..(limit_per_domain + 1)).each { |j|
uri = URI('http://www%d.example.jp/' % i)
jar << HTTP::Cookie.new(cookie_values(
:name => 'Baz%d' % j,
:value => 'www%d.example.jp' % j,
:domain => uri.host,
:for_domain => true,
:path => '/dir%d/' % (i / 2),
:origin => uri,
)).tap { |cookie|
cookie.created_at = i == j ? date - i : date
}
count += 1
}
}
assert_equal true, count > slimit
assert_equal true, jar.to_a.size <= slimit
jar.cleanup
assert_equal hlimit, jar.to_a.size
assert_equal false, jar.any? { |cookie|
cookie.domain == cookie.value
}
end
end