Skip to content

Commit 033088d

Browse files
committed
Remove prefer_cache parameter
With proper freshness handling of stored responses, prefer_cached argument becomes obsolete. It was only used by readme client used by packagecontrol.io Hence it haven't been of any importance for quite a while in PC4.
1 parent e102b62 commit 033088d

10 files changed

Lines changed: 24 additions & 57 deletions

package_control/clients/bitbucket_client.py

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -341,7 +341,7 @@ def _api_url(self, user_repo, suffix=''):
341341

342342
return 'https://api.bitbucket.org/2.0/repositories/%s%s' % (user_repo, suffix)
343343

344-
def _readme_url(self, user_repo, branch, prefer_cached=False):
344+
def _readme_url(self, user_repo, branch):
345345
"""
346346
Parse the root directory listing for the repo and return the URL
347347
to any file that looks like a readme
@@ -352,9 +352,6 @@ def _readme_url(self, user_repo, branch, prefer_cached=False):
352352
:param branch:
353353
The branch to fetch the readme from
354354
355-
:param prefer_cached:
356-
If a cached directory listing should be used instead of a new HTTP request
357-
358355
:raises:
359356
DownloaderException: when there is an error downloading
360357
ClientException: when there is an error parsing the response
@@ -367,7 +364,7 @@ def _readme_url(self, user_repo, branch, prefer_cached=False):
367364

368365
try:
369366
while listing_url:
370-
root_dir_info = self.fetch_json(listing_url, prefer_cached)
367+
root_dir_info = self.fetch_json(listing_url)
371368

372369
for entry in root_dir_info['values']:
373370
if entry['path'].lower() in _readme_filenames:

package_control/clients/github_client.py

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -530,7 +530,7 @@ def _api_url(self, user_repo, suffix=''):
530530

531531
return 'https://api.github.com/repos/%s%s' % (user_repo, suffix)
532532

533-
def _readme_url(self, user_repo, branch, prefer_cached=False):
533+
def _readme_url(self, user_repo, branch):
534534
"""
535535
Fetches the raw GitHub API information about a readme
536536
@@ -540,9 +540,6 @@ def _readme_url(self, user_repo, branch, prefer_cached=False):
540540
:param branch:
541541
The branch to pull the readme from
542542
543-
:param prefer_cached:
544-
If a cached version of the info should be returned instead of making a new HTTP request
545-
546543
:raises:
547544
DownloaderException: when there is an error downloading
548545
ClientException: when there is an error parsing the response
@@ -555,7 +552,7 @@ def _readme_url(self, user_repo, branch, prefer_cached=False):
555552
readme_url = self._api_url(user_repo, '/readme?%s' % query_string)
556553

557554
try:
558-
readme_file = self.fetch_json(readme_url, prefer_cached).get('path')
555+
readme_file = self.fetch_json(readme_url).get('path')
559556
if readme_file:
560557
return 'https://raw.githubusercontent.com/%s/%s/%s' % (user_repo, branch, readme_file)
561558

package_control/clients/json_api_client.py

Lines changed: 4 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -10,16 +10,13 @@ class JSONApiClient:
1010
def __init__(self, settings):
1111
self.settings = settings
1212

13-
def fetch(self, url, prefer_cached=False):
13+
def fetch(self, url):
1414
"""
1515
Retrieves the contents of a URL
1616
1717
:param url:
1818
The URL to download the content from
1919
20-
:param prefer_cached:
21-
If a cached copy of the content is preferred
22-
2320
:raises:
2421
DownloaderException: when there is an error downloading
2522
@@ -35,26 +32,23 @@ def fetch(self, url, prefer_cached=False):
3532
joiner = '?%s' if url.find('?') == -1 else '&%s'
3633
url += joiner % params
3734

38-
return http_get(url, self.settings, 'Error downloading repository.', prefer_cached)
35+
return http_get(url, self.settings, 'Error downloading repository.')
3936

40-
def fetch_json(self, url, prefer_cached=False):
37+
def fetch_json(self, url):
4138
"""
4239
Retrieves and parses the JSON from a URL
4340
4441
:param url:
4542
The URL to download the JSON from
4643
47-
:param prefer_cached:
48-
If a cached copy of the JSON is preferred
49-
5044
:raises:
5145
ClientException: when there is an error parsing the response
5246
5347
:return:
5448
A dict or list from the JSON
5549
"""
5650

57-
repository_json = self.fetch(url, prefer_cached)
51+
repository_json = self.fetch(url)
5852

5953
try:
6054
return json.loads(repository_json.decode('utf-8'))

package_control/clients/readme_client.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@ def readme_info(self, url):
5555
query_string = urlencode({'ref': branch})
5656
readme_json_url = 'https://api.github.com/repos/%s/readme?%s' % (user_repo, query_string)
5757
try:
58-
info = self.fetch_json(readme_json_url, prefer_cached=True)
58+
info = self.fetch_json(readme_json_url)
5959
contents = base64.b64decode(info['content'])
6060
except (ValueError):
6161
pass

package_control/download_manager.py

Lines changed: 4 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@
3333
"""A timer used to disconnect all managers after a period of no usage"""
3434

3535

36-
def http_get(url, settings, error_message='', prefer_cached=False):
36+
def http_get(url, settings, error_message=''):
3737
"""
3838
Performs a HTTP GET request using best matching downloader.
3939
@@ -58,9 +58,6 @@ def http_get(url, settings, error_message='', prefer_cached=False):
5858
:param error_message:
5959
The error message to include if the download fails
6060
61-
:param prefer_cached:
62-
If cached version of the URL content is preferred over a new request
63-
6461
:raises:
6562
DownloaderException: if there was an error downloading the URL
6663
@@ -80,7 +77,7 @@ def http_get(url, settings, error_message='', prefer_cached=False):
8077

8178
try:
8279
manager = _grab(url, settings)
83-
result = manager.fetch(url, error_message, prefer_cached)
80+
result = manager.fetch(url, error_message)
8481

8582
finally:
8683
if manager:
@@ -337,7 +334,7 @@ def close(self):
337334
self.downloader.close()
338335
self.downloader = None
339336

340-
def fetch(self, url, error_message, prefer_cached=False):
337+
def fetch(self, url, error_message):
341338
"""
342339
Downloads a URL and returns the contents
343340
@@ -347,9 +344,6 @@ def fetch(self, url, error_message, prefer_cached=False):
347344
:param error_message:
348345
The error message to include if the download fails
349346
350-
:param prefer_cached:
351-
If cached version of the URL content is preferred over a new request
352-
353347
:raises:
354348
DownloaderException: if there was an error downloading the URL
355349
@@ -496,7 +490,7 @@ def fetch(self, url, error_message, prefer_cached=False):
496490
raise exception
497491

498492
try:
499-
return self.downloader.download(url, error_message, timeout, 3, prefer_cached)
493+
return self.downloader.download(url, error_message, timeout, 3)
500494

501495
except (RateLimitException) as e:
502496
# rate limits are normally reset after an hour

package_control/downloaders/curl_downloader.py

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@ def close(self):
3737

3838
pass
3939

40-
def download(self, url, error_message, timeout, tries, prefer_cached=False):
40+
def download(self, url, error_message, timeout, tries):
4141
"""
4242
Downloads a URL and returns the contents
4343
@@ -55,9 +55,6 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False):
5555
The int number of times to try and download the URL in the case of
5656
a timeout or HTTP 503 error
5757
58-
:param prefer_cached:
59-
If a cached version should be returned instead of trying a new request
60-
6158
:raises:
6259
RateLimitException: when a rate limit is hit
6360
DownloaderException: when any other download error occurs
@@ -66,7 +63,7 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False):
6663
The string contents of the URL
6764
"""
6865

69-
if prefer_cached or self.is_cache_fresh(url):
66+
if self.is_cache_fresh(url):
7067
cached = self.retrieve_cached(url)
7168
if cached:
7269
return cached

package_control/downloaders/oscrypto_downloader.py

Lines changed: 3 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,7 @@ def close(self):
5757
self.socket = None
5858
self.using_proxy = False
5959

60-
def download(self, url, error_message, timeout, tries, prefer_cached=False):
60+
def download(self, url, error_message, timeout, tries):
6161
"""
6262
Downloads a URL and returns the contents
6363
@@ -79,9 +79,6 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False):
7979
The int number of times to try and download the URL in the case of
8080
a timeout or HTTP 503 error
8181
82-
:param prefer_cached:
83-
If a cached version should be returned instead of trying a new request
84-
8582
:raises:
8683
RateLimitException: when a rate limit is hit
8784
DownloaderException: when any other download error occurs
@@ -90,7 +87,7 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False):
9087
The string contents of the URL
9188
"""
9289

93-
if prefer_cached or self.is_cache_fresh(url):
90+
if self.is_cache_fresh(url):
9491
cached = self.retrieve_cached(url)
9592
if cached:
9693
return cached
@@ -157,7 +154,7 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False):
157154
if not location.startswith('/'):
158155
location = os.path.dirname(url_info.path) + location
159156
location = url_info.scheme + '://' + url_info.netloc + location
160-
return self.download(location, error_message, timeout, tried, prefer_cached)
157+
return self.download(location, error_message, timeout, tried)
161158

162159
# Make sure we obey Github's rate limiting headers
163160
self.handle_rate_limit(resp_headers, url)

package_control/downloaders/urllib_downloader.py

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@ def close(self):
5050
handler.close()
5151
self.opener = None
5252

53-
def download(self, url, error_message, timeout, tries, prefer_cached=False):
53+
def download(self, url, error_message, timeout, tries):
5454
"""
5555
Downloads a URL and returns the contents
5656
@@ -72,9 +72,6 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False):
7272
The int number of times to try and download the URL in the case of
7373
a timeout or HTTP 503 error
7474
75-
:param prefer_cached:
76-
If a cached version should be returned instead of trying a new request
77-
7875
:raises:
7976
RateLimitException: when a rate limit is hit
8077
DownloaderException: when any other download error occurs
@@ -83,7 +80,7 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False):
8380
The string contents of the URL
8481
"""
8582

86-
if prefer_cached or self.is_cache_fresh(url):
83+
if self.is_cache_fresh(url):
8784
cached = self.retrieve_cached(url)
8885
if cached:
8986
return cached

package_control/downloaders/wget_downloader.py

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ def close(self):
4040

4141
pass
4242

43-
def download(self, url, error_message, timeout, tries, prefer_cached=False):
43+
def download(self, url, error_message, timeout, tries):
4444
"""
4545
Downloads a URL and returns the contents
4646
@@ -58,9 +58,6 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False):
5858
The int number of times to try and download the URL in the case of
5959
a timeout or HTTP 503 error
6060
61-
:param prefer_cached:
62-
If a cached version should be returned instead of trying a new request
63-
6461
:raises:
6562
RateLimitException: when a rate limit is hit
6663
DownloaderException: when any other download error occurs
@@ -69,7 +66,7 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False):
6966
The string contents of the URL
7067
"""
7168

72-
if prefer_cached or self.is_cache_fresh(url):
69+
if self.is_cache_fresh(url):
7370
cached = self.retrieve_cached(url)
7471
if cached:
7572
return cached

package_control/downloaders/wininet_downloader.py

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -212,7 +212,7 @@ def close(self):
212212
self.use_count = 0
213213
self.was_offline = None
214214

215-
def download(self, url, error_message, timeout, tries, prefer_cached=False):
215+
def download(self, url, error_message, timeout, tries):
216216
"""
217217
Downloads a URL and returns the contents
218218
@@ -230,9 +230,6 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False):
230230
The int number of times to try and download the URL in the case of
231231
a timeout or HTTP 503 error
232232
233-
:param prefer_cached:
234-
If a cached version should be returned instead of trying a new request
235-
236233
:raises:
237234
RateLimitException: when a rate limit is hit
238235
DownloaderException: when any other download error occurs
@@ -242,7 +239,7 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False):
242239
The string contents of the URL
243240
"""
244241

245-
if prefer_cached or self.is_cache_fresh(url):
242+
if self.is_cache_fresh(url):
246243
cached = self.retrieve_cached(url)
247244
if cached:
248245
return cached

0 commit comments

Comments
 (0)