From c46df592ff54e13b54239ac2f4341542785339e1 Mon Sep 17 00:00:00 2001 From: Siddhesh Agarwal Date: Sun, 12 May 2024 17:43:03 +0530 Subject: [PATCH 1/9] exact imports --- wikipedia/__init__.py | 23 +++++++++++++++++++++-- 1 file changed, 21 insertions(+), 2 deletions(-) diff --git a/wikipedia/__init__.py b/wikipedia/__init__.py index 0a6ee25..b7d1485 100644 --- a/wikipedia/__init__.py +++ b/wikipedia/__init__.py @@ -1,4 +1,23 @@ -from .wikipedia import * -from .exceptions import * +from .wikipedia import ( + donate, + geosearch, + languages, + page, + random, + search, + set_lang, + set_user_agent, + set_rate_limiting, + suggest, + summary, + WikipediaPage, +) +from .exceptions import ( + DisambiguationError, + HTTPTimeoutError, + PageError, + RedirectError, + WikipediaException, +) __version__ = (1, 4, 0) From 7d8536cd871d798db1c9972ab0f89a2f1ec9ed96 Mon Sep 17 00:00:00 2001 From: Siddhesh Agarwal Date: Sun, 12 May 2024 17:43:20 +0530 Subject: [PATCH 2/9] updated configs --- .editorconfig | 11 ++++++++++ setup.py | 61 ++++++++++++++++++++++++++++----------------------- 2 files changed, 44 insertions(+), 28 deletions(-) create mode 100644 .editorconfig diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000..9565cde --- /dev/null +++ b/.editorconfig @@ -0,0 +1,11 @@ +root = true + +[*] +charset = utf-8 +end_of_line = crlf +insert_final_newline = true + +[*.py] +indent_style = space +indent_size = 4 +trim_trailing_whitespace = true diff --git a/setup.py b/setup.py index 62a8f8c..3b4dc13 100644 --- a/setup.py +++ b/setup.py @@ -5,41 +5,46 @@ import setuptools -def local_file(file): - return codecs.open( - os.path.join(os.path.dirname(__file__), file), 'r', 'utf-8' - ) +def local_file(file: str): + return codecs.open(os.path.join(os.path.dirname(__file__), file), "r", "utf-8") + install_reqs = [ - line.strip() - for line in local_file('requirements.txt').readlines() - if line.strip() != '' + line.strip() + for line in local_file("requirements.txt").readlines() + if line.strip() != "" ] version = re.search( - "^__version__ = \((\d+), (\d+), (\d+)\)$", - local_file('wikipedia/__init__.py').read(), - re.MULTILINE + "^__version__ = \((\d+), (\d+), (\d+)\)$", + local_file("wikipedia/__init__.py").read(), + re.MULTILINE, ).groups() setuptools.setup( - name = "wikipedia", - version = '.'.join(version), - author = "Jonathan Goldsmith", - author_email = "jhghank@gmail.com", - description = "Wikipedia API for Python", - license = "MIT", - keywords = "python wikipedia API", - url = "https://github.com/goldsmith/Wikipedia", - install_requires = install_reqs, - packages = ['wikipedia'], - long_description = local_file('README.rst').read(), - classifiers = [ - 'Development Status :: 4 - Beta', - 'Topic :: Software Development :: Libraries', - 'License :: OSI Approved :: MIT License', - 'Programming Language :: Python', - 'Programming Language :: Python :: 3' - ] + name="wikipedia", + version=".".join(version), + author="Jonathan Goldsmith", + author_email="jhghank@gmail.com", + description="Wikipedia API for Python", + license="MIT", + keywords="python wikipedia API", + url="https://github.com/goldsmith/Wikipedia", + install_requires=install_reqs, + packages=["wikipedia"], + long_description=local_file("README.rst").read(), + classifiers=[ + "Development Status :: 4 - Beta", + "Topic :: Software Development :: Libraries", + "License :: OSI Approved :: MIT License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + ], ) From e6b260a42851266481754e60a10a26e6f53eeb92 Mon Sep 17 00:00:00 2001 From: Siddhesh Agarwal Date: Sun, 12 May 2024 17:43:51 +0530 Subject: [PATCH 3/9] formatted and added type hinting --- wikipedia/exceptions.py | 87 ++- wikipedia/util.py | 52 +- wikipedia/wikipedia.py | 1360 ++++++++++++++++++++------------------- 3 files changed, 781 insertions(+), 718 deletions(-) diff --git a/wikipedia/exceptions.py b/wikipedia/exceptions.py index 0295b1c..a376ff6 100644 --- a/wikipedia/exceptions.py +++ b/wikipedia/exceptions.py @@ -2,79 +2,76 @@ Global wikipedia exception and warning classes. """ -import sys +from typing import List, Union -ODD_ERROR_MESSAGE = "This shouldn't happen. Please report on GitHub: github.com/goldsmith/Wikipedia" +ODD_ERROR_MESSAGE = ( + "This shouldn't happen. Please report on GitHub: github.com/goldsmith/Wikipedia" +) class WikipediaException(Exception): - """Base Wikipedia exception class.""" + """Base Wikipedia exception class.""" - def __init__(self, error): - self.error = error + def __init__(self, error: str): + self.error = error - def __unicode__(self): - return "An unknown error occured: \"{0}\". Please report it on GitHub!".format(self.error) + def __unicode__(self): + return f'An unknown error occured: "{self.error}". Please report it on GitHub!' - if sys.version_info > (3, 0): def __str__(self): - return self.__unicode__() - - else: - def __str__(self): - return self.__unicode__().encode('utf8') + return self.__unicode__() class PageError(WikipediaException): - """Exception raised when no Wikipedia matched a query.""" + """Exception raised when no Wikipedia matched a query.""" - def __init__(self, pageid=None, *args): - if pageid: - self.pageid = pageid - else: - self.title = args[0] + def __init__(self, pageid: Union[str, None] = None, *args): + if pageid: + self.pageid = pageid + else: + self.title = args[0] - def __unicode__(self): - if hasattr(self, 'title'): - return u"\"{0}\" does not match any pages. Try another query!".format(self.title) - else: - return u"Page id \"{0}\" does not match any pages. Try another id!".format(self.pageid) + def __unicode__(self): + if hasattr(self, "title"): + return f'"{self.title}" does not match any pages. Try another query!' + else: + return f'Page id "{self.pageid}" does not match any pages. Try another id!' class DisambiguationError(WikipediaException): - """ - Exception raised when a page resolves to a Disambiguation page. + """ + Exception raised when a page resolves to a Disambiguation page. - The `options` property contains a list of titles - of Wikipedia pages that the query may refer to. + The `options` property contains a list of titles + of Wikipedia pages that the query may refer to. - .. note:: `options` does not include titles that do not link to a valid Wikipedia page. - """ + .. note:: `options` does not include titles that do not link to a valid Wikipedia page. + """ - def __init__(self, title, may_refer_to): - self.title = title - self.options = may_refer_to + def __init__(self, title: str, may_refer_to: List[str]): + self.title = title + self.options = may_refer_to - def __unicode__(self): - return u"\"{0}\" may refer to: \n{1}".format(self.title, '\n'.join(self.options)) + def __unicode__(self): + return '"{0}" may refer to: \n{1}'.format(self.title, "\n".join(self.options)) class RedirectError(WikipediaException): - """Exception raised when a page title unexpectedly resolves to a redirect.""" + """Exception raised when a page title unexpectedly resolves to a redirect.""" - def __init__(self, title): - self.title = title + def __init__(self, title: str): + self.title = title - def __unicode__(self): - return u"\"{0}\" resulted in a redirect. Set the redirect property to True to allow automatic redirects.".format(self.title) + def __unicode__(self): + return f'"{self.title}" resulted in a redirect. Set the redirect property to True to allow automatic redirects.' class HTTPTimeoutError(WikipediaException): - """Exception raised when a request to the Mediawiki servers times out.""" + """Exception raised when a request to the Mediawiki servers times out.""" - def __init__(self, query): - self.query = query + def __init__(self, query: str): + self.query = query - def __unicode__(self): - return u"Searching for \"{0}\" resulted in a timeout. Try again in a few seconds, and make sure you have rate limiting set to True.".format(self.query) + def __unicode__(self): + return f'Searching for "{self.query}" resulted in a timeout. Try again in a few seconds, and make sure you have rate limiting set to True.' diff --git a/wikipedia/util.py b/wikipedia/util.py index e0eb092..18c23ca 100644 --- a/wikipedia/util.py +++ b/wikipedia/util.py @@ -1,41 +1,39 @@ -from __future__ import print_function, unicode_literals - import sys import functools + def debug(fn): - def wrapper(*args, **kwargs): - print(fn.__name__, 'called!') - print(sorted(args), tuple(sorted(kwargs.items()))) - res = fn(*args, **kwargs) - print(res) - return res - return wrapper + def wrapper(*args, **kwargs): + print(fn.__name__, "called!") + print(sorted(args), tuple(sorted(kwargs.items()))) + res = fn(*args, **kwargs) + print(res) + return res + + return wrapper class cache(object): - def __init__(self, fn): - self.fn = fn - self._cache = {} - functools.update_wrapper(self, fn) + def __init__(self, fn): + self.fn = fn + self._cache = {} + functools.update_wrapper(self, fn) - def __call__(self, *args, **kwargs): - key = str(args) + str(kwargs) - if key in self._cache: - ret = self._cache[key] - else: - ret = self._cache[key] = self.fn(*args, **kwargs) + def __call__(self, *args, **kwargs): + key = str(args) + str(kwargs) + if key in self._cache: + ret = self._cache[key] + else: + ret = self._cache[key] = self.fn(*args, **kwargs) - return ret + return ret - def clear_cache(self): - self._cache = {} + def clear_cache(self): + self._cache = {} # from http://stackoverflow.com/questions/3627793/best-output-type-and-encoding-practices-for-repr-functions -def stdout_encode(u, default='UTF8'): - encoding = sys.stdout.encoding or default - if sys.version_info > (3, 0): - return u.encode(encoding).decode(encoding) - return u.encode(encoding) +def stdout_encode(u: str, default: str = "UTF8"): + encoding = sys.stdout.encoding or default + return u.encode(encoding, errors="replace").decode(encoding) diff --git a/wikipedia/wikipedia.py b/wikipedia/wikipedia.py index 040901a..8532f81 100644 --- a/wikipedia/wikipedia.py +++ b/wikipedia/wikipedia.py @@ -1,4 +1,5 @@ from __future__ import unicode_literals +from typing import Any, Dict, Generator, List, Tuple, Union import requests import time @@ -7,737 +8,804 @@ from decimal import Decimal from .exceptions import ( - PageError, DisambiguationError, RedirectError, HTTPTimeoutError, - WikipediaException, ODD_ERROR_MESSAGE) -from .util import cache, stdout_encode, debug + PageError, + DisambiguationError, + RedirectError, + HTTPTimeoutError, + WikipediaException, + ODD_ERROR_MESSAGE, +) +from .util import cache, stdout_encode import re -API_URL = 'http://en.wikipedia.org/w/api.php' +API_URL = "http://en.wikipedia.org/w/api.php" RATE_LIMIT = False RATE_LIMIT_MIN_WAIT = None RATE_LIMIT_LAST_CALL = None -USER_AGENT = 'wikipedia (https://github.com/goldsmith/Wikipedia/)' +USER_AGENT = "wikipedia (https://github.com/goldsmith/Wikipedia/)" -def set_lang(prefix): - ''' - Change the language of the API being requested. - Set `prefix` to one of the two letter prefixes found on the `list of all Wikipedias `_. +def set_lang(prefix: str): + """ + Change the language of the API being requested. + Set `prefix` to one of the two letter prefixes found on the `list of all Wikipedias `_. - After setting the language, the cache for ``search``, ``suggest``, and ``summary`` will be cleared. + After setting the language, the cache for ``search``, ``suggest``, and ``summary`` will be cleared. - .. note:: Make sure you search for page titles in the language that you have set. - ''' - global API_URL - API_URL = 'http://' + prefix.lower() + '.wikipedia.org/w/api.php' + .. note:: Make sure you search for page titles in the language that you have set. + """ + global API_URL + API_URL = f"http://{prefix.lower()}.wikipedia.org/w/api.php" - for cached_func in (search, suggest, summary): - cached_func.clear_cache() + for cached_func in (search, suggest, summary): + cached_func.clear_cache() -def set_user_agent(user_agent_string): - ''' - Set the User-Agent string to be used for all requests. +def set_user_agent(user_agent_string: str): + """ + Set the User-Agent string to be used for all requests. - Arguments: + Arguments: - * user_agent_string - (string) a string specifying the User-Agent header - ''' - global USER_AGENT - USER_AGENT = user_agent_string + * user_agent_string - (string) a string specifying the User-Agent header + """ + global USER_AGENT + USER_AGENT = user_agent_string -def set_rate_limiting(rate_limit, min_wait=timedelta(milliseconds=50)): - ''' - Enable or disable rate limiting on requests to the Mediawiki servers. - If rate limiting is not enabled, under some circumstances (depending on - load on Wikipedia, the number of requests you and other `wikipedia` users - are making, and other factors), Wikipedia may return an HTTP timeout error. +def set_rate_limiting( + rate_limit: bool, + min_wait: timedelta = timedelta(milliseconds=50), +): + """ + Enable or disable rate limiting on requests to the Mediawiki servers. + If rate limiting is not enabled, under some circumstances (depending on + load on Wikipedia, the number of requests you and other `wikipedia` users + are making, and other factors), Wikipedia may return an HTTP timeout error. - Enabling rate limiting generally prevents that issue, but please note that - HTTPTimeoutError still might be raised. + Enabling rate limiting generally prevents that issue, but please note that + HTTPTimeoutError still might be raised. - Arguments: + Arguments: - * rate_limit - (Boolean) whether to enable rate limiting or not + * rate_limit - (Boolean) whether to enable rate limiting or not - Keyword arguments: + Keyword arguments: - * min_wait - if rate limiting is enabled, `min_wait` is a timedelta describing the minimum time to wait before requests. - Defaults to timedelta(milliseconds=50) - ''' - global RATE_LIMIT - global RATE_LIMIT_MIN_WAIT - global RATE_LIMIT_LAST_CALL + * min_wait - if rate limiting is enabled, `min_wait` is a timedelta describing the minimum time to wait before requests. + Defaults to timedelta(milliseconds=50) + """ + global RATE_LIMIT + global RATE_LIMIT_MIN_WAIT + global RATE_LIMIT_LAST_CALL - RATE_LIMIT = rate_limit - if not rate_limit: - RATE_LIMIT_MIN_WAIT = None - else: - RATE_LIMIT_MIN_WAIT = min_wait - - RATE_LIMIT_LAST_CALL = None - - -@cache -def search(query, results=10, suggestion=False): - ''' - Do a Wikipedia search for `query`. - - Keyword arguments: - - * results - the maxmimum number of results returned - * suggestion - if True, return results and suggestion (if any) in a tuple - ''' - - search_params = { - 'list': 'search', - 'srprop': '', - 'srlimit': results, - 'limit': results, - 'srsearch': query - } - if suggestion: - search_params['srinfo'] = 'suggestion' - - raw_results = _wiki_request(search_params) - - if 'error' in raw_results: - if raw_results['error']['info'] in ('HTTP request timed out.', 'Pool queue is full'): - raise HTTPTimeoutError(query) - else: - raise WikipediaException(raw_results['error']['info']) - - search_results = (d['title'] for d in raw_results['query']['search']) - - if suggestion: - if raw_results['query'].get('searchinfo'): - return list(search_results), raw_results['query']['searchinfo']['suggestion'] + RATE_LIMIT = rate_limit + if not rate_limit: + RATE_LIMIT_MIN_WAIT = None else: - return list(search_results), None + RATE_LIMIT_MIN_WAIT = min_wait - return list(search_results) + RATE_LIMIT_LAST_CALL = None @cache -def geosearch(latitude, longitude, title=None, results=10, radius=1000): - ''' - Do a wikipedia geo search for `latitude` and `longitude` - using HTTP API described in http://www.mediawiki.org/wiki/Extension:GeoData - - Arguments: - - * latitude (float or decimal.Decimal) - * longitude (float or decimal.Decimal) - - Keyword arguments: - - * title - The title of an article to search for - * results - the maximum number of results returned - * radius - Search radius in meters. The value must be between 10 and 10000 - ''' +def search( + query: str, results: int = 10, suggestion: bool = False +) -> Tuple[List[Generator[Any, None, None]], Union[str, None]]: + """ + Do a Wikipedia search for `query`. + + Keyword arguments: + + * results - the maxmimum number of results returned + * suggestion - if True, return results and suggestion (if any) in a tuple + """ + + search_params = { + "list": "search", + "srprop": "", + "srlimit": results, + "limit": results, + "srsearch": query, + } + if suggestion: + sarch_params["srinfo"] = "suggestion" - search_params = { - 'list': 'geosearch', - 'gsradius': radius, - 'gscoord': '{0}|{1}'.format(latitude, longitude), - 'gslimit': results - } - if title: - search_params['titles'] = title + raw_results = _wiki_request(search_params) - raw_results = _wiki_request(search_params) + if "error" in raw_results: + if raw_results["error"]["info"] in ( + "HTTP request timed out.", + "Pool queue is full", + ): + raise HTTPTimeoutError(query) + else: + raise WikipediaException(raw_results["error"]["info"]) - if 'error' in raw_results: - if raw_results['error']['info'] in ('HTTP request timed out.', 'Pool queue is full'): - raise HTTPTimeoutError('{0}|{1}'.format(latitude, longitude)) - else: - raise WikipediaException(raw_results['error']['info']) + search_results = (d["title"] for d in raw_results["query"]["search"]) - search_pages = raw_results['query'].get('pages', None) - if search_pages: - search_results = (v['title'] for k, v in search_pages.items() if k != '-1') - else: - search_results = (d['title'] for d in raw_results['query']['geosearch']) + if suggestion: + if raw_results["query"].get("searchinfo"): + return ( + list(search_results), + raw_results["query"]["searchinfo"]["suggestion"], + ) + else: + return list(search_results), None - return list(search_results) + return list(search_results), None @cache -def suggest(query): - ''' - Get a Wikipedia search suggestion for `query`. - Returns a string or None if no suggestion was found. - ''' - - search_params = { - 'list': 'search', - 'srinfo': 'suggestion', - 'srprop': '', - } - search_params['srsearch'] = query - - raw_result = _wiki_request(search_params) - - if raw_result['query'].get('searchinfo'): - return raw_result['query']['searchinfo']['suggestion'] +def geosearch( + latitude: Union[float, Decimal], + longitude: Union[float, Decimal], + title: Union[str, None] = None, + results: int = 10, + radius: int = 1000, +): + """ + Do a wikipedia geo search for `latitude` and `longitude` + using HTTP API described in http://www.mediawiki.org/wiki/Extension:GeoData + + Arguments: + + * latitude (float or decimal.Decimal) + * longitude (float or decimal.Decimal) + + Keyword arguments: + + * title - The title of an article to search for + * results - the maximum number of results returned + * radius - Search radius in meters. The value must be between 10 and 10000 + """ + + search_params = { + "list": "geosearch", + "gsradius": radius, + "gscoord": f"{latitude}|{longitude}", + "gslimit": results, + } + if title: + search_params["titles"] = title - return None + raw_results = _wiki_request(search_params) + if "error" in raw_results: + if raw_results["error"]["info"] in ( + "HTTP request timed out.", + "Pool queue is full", + ): + raise HTTPTimeoutError(f"{latitude}|{longitude}") + else: + raise WikipediaException(raw_results["error"]["info"]) -def random(pages=1): - ''' - Get a list of random Wikipedia article titles. + search_pages = raw_results["query"].get("pages", None) + if search_pages: + search_results = (v["title"] for k, v in search_pages.items() if k != "-1") + else: + search_results = (d["title"] for d in raw_results["query"]["geosearch"]) - .. note:: Random only gets articles from namespace 0, meaning no Category, User talk, or other meta-Wikipedia pages. + return list(search_results) - Keyword arguments: - * pages - the number of random pages returned (max of 10) - ''' - #http://en.wikipedia.org/w/api.php?action=query&list=random&rnlimit=5000&format=jsonfm - query_params = { - 'list': 'random', - 'rnnamespace': 0, - 'rnlimit': pages, - } +@cache +def suggest(query: str): + """ + Get a Wikipedia search suggestion for `query`. + Returns a string or None if no suggestion was found. + """ + + search_params: Dict[str, Any] = { + "list": "search", + "srinfo": "suggestion", + "srprop": "", + } + search_params["srsearch"] = query - request = _wiki_request(query_params) - titles = [page['title'] for page in request['query']['random']] + raw_result = _wiki_request(search_params) - if len(titles) == 1: - return titles[0] + if raw_result["query"].get("searchinfo"): + return raw_result["query"]["searchinfo"]["suggestion"] - return titles + return None -@cache -def summary(title, sentences=0, chars=0, auto_suggest=True, redirect=True): - ''' - Plain text summary of the page. - - .. note:: This is a convenience wrapper - auto_suggest and redirect are enabled by default - - Keyword arguments: - - * sentences - if set, return the first `sentences` sentences (can be no greater than 10). - * chars - if set, return only the first `chars` characters (actual text returned may be slightly longer). - * auto_suggest - let Wikipedia find a valid page title for the query - * redirect - allow redirection without raising RedirectError - ''' - - # use auto_suggest and redirect to get the correct article - # also, use page's error checking to raise DisambiguationError if necessary - page_info = page(title, auto_suggest=auto_suggest, redirect=redirect) - title = page_info.title - pageid = page_info.pageid - - query_params = { - 'prop': 'extracts', - 'explaintext': '', - 'titles': title - } - - if sentences: - query_params['exsentences'] = sentences - elif chars: - query_params['exchars'] = chars - else: - query_params['exintro'] = '' - - request = _wiki_request(query_params) - summary = request['query']['pages'][pageid]['extract'] - - return summary - - -def page(title=None, pageid=None, auto_suggest=True, redirect=True, preload=False): - ''' - Get a WikipediaPage object for the page with title `title` or the pageid - `pageid` (mutually exclusive). - - Keyword arguments: - - * title - the title of the page to load - * pageid - the numeric pageid of the page to load - * auto_suggest - let Wikipedia find a valid page title for the query - * redirect - allow redirection without raising RedirectError - * preload - load content, summary, images, references, and links during initialization - ''' - - if title is not None: - if auto_suggest: - results, suggestion = search(title, results=1, suggestion=True) - try: - title = suggestion or results[0] - except IndexError: - # if there is no suggestion or search results, the page doesn't exist - raise PageError(title) - return WikipediaPage(title, redirect=redirect, preload=preload) - elif pageid is not None: - return WikipediaPage(pageid=pageid, preload=preload) - else: - raise ValueError("Either a title or a pageid must be specified") +def random(pages: int = 1): + """ + Get a list of random Wikipedia article titles. + .. note:: Random only gets articles from namespace 0, meaning no Category, User talk, or other meta-Wikipedia pages. + Keyword arguments: -class WikipediaPage(object): - ''' - Contains data from a Wikipedia page. - Uses property methods to filter data from the raw HTML. - ''' - - def __init__(self, title=None, pageid=None, redirect=True, preload=False, original_title=''): - if title is not None: - self.title = title - self.original_title = original_title or title - elif pageid is not None: - self.pageid = pageid - else: - raise ValueError("Either a title or a pageid must be specified") - - self.__load(redirect=redirect, preload=preload) - - if preload: - for prop in ('content', 'summary', 'images', 'references', 'links', 'sections'): - getattr(self, prop) - - def __repr__(self): - return stdout_encode(u''.format(self.title)) - - def __eq__(self, other): - try: - return ( - self.pageid == other.pageid - and self.title == other.title - and self.url == other.url - ) - except: - return False - - def __load(self, redirect=True, preload=False): - ''' - Load basic information from Wikipedia. - Confirm that page exists and is not a disambiguation/redirect. - - Does not need to be called manually, should be called automatically during __init__. - ''' - query_params = { - 'prop': 'info|pageprops', - 'inprop': 'url', - 'ppprop': 'disambiguation', - 'redirects': '', + * pages - the number of random pages returned (max of 10) + """ + # http://en.wikipedia.org/w/api.php?action=query&list=random&rnlimit=5000&format=jsonfm + query_params: Dict[str, Any] = { + "list": "random", + "rnnamespace": 0, + "rnlimit": pages, } - if not getattr(self, 'pageid', None): - query_params['titles'] = self.title - else: - query_params['pageids'] = self.pageid request = _wiki_request(query_params) + titles = [page["title"] for page in request["query"]["random"]] - query = request['query'] - pageid = list(query['pages'].keys())[0] - page = query['pages'][pageid] - - # missing is present if the page is missing - if 'missing' in page: - if hasattr(self, 'title'): - raise PageError(self.title) - else: - raise PageError(pageid=self.pageid) + if len(titles) == 1: + return titles[0] - # same thing for redirect, except it shows up in query instead of page for - # whatever silly reason - elif 'redirects' in query: - if redirect: - redirects = query['redirects'][0] + return titles - if 'normalized' in query: - normalized = query['normalized'][0] - assert normalized['from'] == self.title, ODD_ERROR_MESSAGE - from_title = normalized['to'] - - else: - from_title = self.title - - assert redirects['from'] == from_title, ODD_ERROR_MESSAGE - - # change the title and reload the whole object - self.__init__(redirects['to'], redirect=redirect, preload=preload) - - else: - raise RedirectError(getattr(self, 'title', page['title'])) - - # since we only asked for disambiguation in ppprop, - # if a pageprop is returned, - # then the page must be a disambiguation page - elif 'pageprops' in page: - query_params = { - 'prop': 'revisions', - 'rvprop': 'content', - 'rvparse': '', - 'rvlimit': 1 - } - if hasattr(self, 'pageid'): - query_params['pageids'] = self.pageid - else: - query_params['titles'] = self.title - request = _wiki_request(query_params) - html = request['query']['pages'][pageid]['revisions'][0]['*'] - - lis = BeautifulSoup(html, 'html.parser').find_all('li') - filtered_lis = [li for li in lis if not 'tocsection' in ''.join(li.get('class', []))] - may_refer_to = [li.a.get_text() for li in filtered_lis if li.a] - - raise DisambiguationError(getattr(self, 'title', page['title']), may_refer_to) +@cache +def summary( + title: str, + sentences: int = 0, + chars: int = 0, + auto_suggest: bool = True, + redirect: bool = True, +): + """ + Plain text summary of the page. - else: - self.pageid = pageid - self.title = page['title'] - self.url = page['fullurl'] + .. note:: This is a convenience wrapper - auto_suggest and redirect are enabled by default - def __continued_query(self, query_params): - ''' - Based on https://www.mediawiki.org/wiki/API:Query#Continuing_queries - ''' - query_params.update(self.__title_query_param) + Keyword arguments: - last_continue = {} - prop = query_params.get('prop', None) + * sentences - if set, return the first `sentences` sentences (can be no greater than 10). + * chars - if set, return only the first `chars` characters (actual text returned may be slightly longer). + * auto_suggest - let Wikipedia find a valid page title for the query + * redirect - allow redirection without raising RedirectError + """ - while True: - params = query_params.copy() - params.update(last_continue) + # use auto_suggest and redirect to get the correct article + # also, use page's error checking to raise DisambiguationError if necessary + page_info = page(title, auto_suggest=auto_suggest, redirect=redirect) + title = page_info.title + pageid = page_info.pageid - request = _wiki_request(params) + query_params: Dict[str, Any] = { + "prop": "extracts", + "explaintext": "", + "titles": title, + } - if 'query' not in request: - break + if sentences: + query_params["exsentences"] = sentences + elif chars: + query_params["exchars"] = chars + else: + query_params["exintro"] = "" - pages = request['query']['pages'] - if 'generator' in query_params: - for datum in pages.values(): # in python 3.3+: "yield from pages.values()" - yield datum - else: - for datum in pages[self.pageid][prop]: - yield datum + request = _wiki_request(query_params) + summary = request["query"]["pages"][pageid]["extract"] + + return summary + + +def page( + title: Union[str, None] = None, + pageid: Union[int, None] = None, + auto_suggest: bool = True, + redirect: bool = True, + preload: bool = False, +): + """ + Get a WikipediaPage object for the page with title `title` or the pageid + `pageid` (mutually exclusive). + + Keyword arguments: + + * title - the title of the page to load + * pageid - the numeric pageid of the page to load + * auto_suggest - let Wikipedia find a valid page title for the query + * redirect - allow redirection without raising RedirectError + * preload - load content, summary, images, references, and links during initialization + """ + + if title: + if auto_suggest: + results, suggestion = search(title, results=1, suggestion=True) + try: + title = suggestion or results[0] + except IndexError as e: + # if there is no suggestion or search results, the page doesn't exist + raise PageError(title) from e + return WikipediaPage(title, redirect=redirect, preload=preload) + elif pageid is not None: + return WikipediaPage(pageid=pageid, preload=preload) + else: + raise ValueError("Either a title or a pageid must be specified") - if 'continue' not in request: - break - last_continue = request['continue'] +class WikipediaPage(object): + """ + Contains data from a Wikipedia page. + Uses property methods to filter data from the raw HTML. + """ + + def __init__( + self, + title: Union[str, None] = None, + pageid: Union[int, None] = None, + redirect: bool = True, + preload: bool = False, + original_title: str = "", + ): + if title is not None: + self.title = title + self.original_title = original_title or title + elif pageid is not None: + self.pageid = pageid + else: + raise ValueError("Either a title or a pageid must be specified") + + self.__load(redirect=redirect, preload=preload) + + if preload: + for prop in ( + "content", + "summary", + "images", + "references", + "links", + "sections", + ): + getattr(self, prop) + + def __repr__(self): + return stdout_encode(f"") + + def __eq__(self, other: Any) -> bool: + if isinstance(other, self.__class__): + return all( + [ + self.pageid == other.pageid, + self.title == other.title, + self.url == other.url, + ] + ) + return False + + def __load(self, redirect: bool = True, preload: bool = False): + """ + Load basic information from Wikipedia. + Confirm that page exists and is not a disambiguation/redirect. + + Does not need to be called manually, should be called automatically during __init__. + """ + query_params: Dict[str, Any] = { + "prop": "info|pageprops", + "inprop": "url", + "ppprop": "disambiguation", + "redirects": "", + } + if not getattr(self, "pageid", None): + query_params["titles"] = self.title + else: + query_params["pageids"] = self.pageid + + request = _wiki_request(query_params) + + query = request["query"] + pageid = list(query["pages"].keys())[0] + page = query["pages"][pageid] + + # missing is present if the page is missing + if "missing" in page: + if hasattr(self, "title"): + raise PageError(self.title) + else: + raise PageError(pageid=self.pageid) + + # same thing for redirect, except it shows up in query instead of page for + # whatever silly reason + elif "redirects" in query: + if redirect: + redirects = query["redirects"][0] + + if "normalized" in query: + normalized = query["normalized"][0] + assert normalized["from"] == self.title, ODD_ERROR_MESSAGE + + from_title = normalized["to"] + + else: + from_title = self.title + + assert redirects["from"] == from_title, ODD_ERROR_MESSAGE + + # change the title and reload the whole object + self.__init__(redirects["to"], redirect=redirect, preload=preload) + + else: + raise RedirectError(getattr(self, "title", page["title"])) + + # since we only asked for disambiguation in ppprop, + # if a pageprop is returned, + # then the page must be a disambiguation page + elif "pageprops" in page: + query_params = { + "prop": "revisions", + "rvprop": "content", + "rvparse": "", + "rvlimit": 1, + } + if hasattr(self, "pageid"): + query_params["pageids"] = self.pageid + else: + query_params["titles"] = self.title + request = _wiki_request(query_params) + html = request["query"]["pages"][pageid]["revisions"][0]["*"] + + lis = BeautifulSoup(html, "html.parser").find_all("li") + filtered_lis = [ + li for li in lis if "tocsection" not in "".join(li.get("class", [])) + ] + may_refer_to = [li.a.get_text() for li in filtered_lis if li.a] + + raise DisambiguationError( + getattr(self, "title", page["title"]), may_refer_to + ) - @property - def __title_query_param(self): - if getattr(self, 'title', None) is not None: - return {'titles': self.title} - else: - return {'pageids': self.pageid} - - def html(self): - ''' - Get full page HTML. - - .. warning:: This can get pretty slow on long pages. - ''' - - if not getattr(self, '_html', False): - query_params = { - 'prop': 'revisions', - 'rvprop': 'content', - 'rvlimit': 1, - 'rvparse': '', - 'titles': self.title - } - - request = _wiki_request(query_params) - self._html = request['query']['pages'][self.pageid]['revisions'][0]['*'] - - return self._html - - @property - def content(self): - ''' - Plain text content of the page, excluding images, tables, and other data. - ''' - - if not getattr(self, '_content', False): - query_params = { - 'prop': 'extracts|revisions', - 'explaintext': '', - 'rvprop': 'ids' - } - if not getattr(self, 'title', None) is None: - query_params['titles'] = self.title - else: - query_params['pageids'] = self.pageid - request = _wiki_request(query_params) - self._content = request['query']['pages'][self.pageid]['extract'] - self._revision_id = request['query']['pages'][self.pageid]['revisions'][0]['revid'] - self._parent_id = request['query']['pages'][self.pageid]['revisions'][0]['parentid'] - - return self._content - - @property - def revision_id(self): - ''' - Revision ID of the page. - - The revision ID is a number that uniquely identifies the current - version of the page. It can be used to create the permalink or for - other direct API calls. See `Help:Page history - `_ for more - information. - ''' - - if not getattr(self, '_revid', False): - # fetch the content (side effect is loading the revid) - self.content - - return self._revision_id - - @property - def parent_id(self): - ''' - Revision ID of the parent version of the current revision of this - page. See ``revision_id`` for more information. - ''' - - if not getattr(self, '_parentid', False): - # fetch the content (side effect is loading the revid) - self.content - - return self._parent_id - - @property - def summary(self): - ''' - Plain text summary of the page. - ''' - - if not getattr(self, '_summary', False): - query_params = { - 'prop': 'extracts', - 'explaintext': '', - 'exintro': '', - } - if not getattr(self, 'title', None) is None: - query_params['titles'] = self.title - else: - query_params['pageids'] = self.pageid - - request = _wiki_request(query_params) - self._summary = request['query']['pages'][self.pageid]['extract'] - - return self._summary - - @property - def images(self): - ''' - List of URLs of images on the page. - ''' - - if not getattr(self, '_images', False): - self._images = [ - page['imageinfo'][0]['url'] - for page in self.__continued_query({ - 'generator': 'images', - 'gimlimit': 'max', - 'prop': 'imageinfo', - 'iiprop': 'url', - }) - if 'imageinfo' in page - ] - - return self._images - - @property - def coordinates(self): - ''' - Tuple of Decimals in the form of (lat, lon) or None - ''' - if not getattr(self, '_coordinates', False): - query_params = { - 'prop': 'coordinates', - 'colimit': 'max', - 'titles': self.title, - } - - request = _wiki_request(query_params) - - if 'query' in request: - coordinates = request['query']['pages'][self.pageid]['coordinates'] - self._coordinates = (Decimal(coordinates[0]['lat']), Decimal(coordinates[0]['lon'])) - else: - self._coordinates = None - - return self._coordinates - - @property - def references(self): - ''' - List of URLs of external links on a page. - May include external links within page that aren't technically cited anywhere. - ''' - - if not getattr(self, '_references', False): - def add_protocol(url): - return url if url.startswith('http') else 'http:' + url - - self._references = [ - add_protocol(link['*']) - for link in self.__continued_query({ - 'prop': 'extlinks', - 'ellimit': 'max' - }) - ] - - return self._references - - @property - def links(self): - ''' - List of titles of Wikipedia page links on a page. - - .. note:: Only includes articles from namespace 0, meaning no Category, User talk, or other meta-Wikipedia pages. - ''' - - if not getattr(self, '_links', False): - self._links = [ - link['title'] - for link in self.__continued_query({ - 'prop': 'links', - 'plnamespace': 0, - 'pllimit': 'max' - }) - ] - - return self._links - - @property - def categories(self): - ''' - List of categories of a page. - ''' - - if not getattr(self, '_categories', False): - self._categories = [re.sub(r'^Category:', '', x) for x in - [link['title'] - for link in self.__continued_query({ - 'prop': 'categories', - 'cllimit': 'max' - }) - ]] - - return self._categories - - @property - def sections(self): - ''' - List of section titles from the table of contents on the page. - ''' - - if not getattr(self, '_sections', False): - query_params = { - 'action': 'parse', - 'prop': 'sections', - } - if not getattr(self, 'title', None) is None: - query_params["page"] = self.title - - request = _wiki_request(query_params) - self._sections = [section['line'] for section in request['parse']['sections']] - - return self._sections - - def section(self, section_title): - ''' - Get the plain text content of a section from `self.sections`. - Returns None if `section_title` isn't found, otherwise returns a whitespace stripped string. - - This is a convenience method that wraps self.content. - - .. warning:: Calling `section` on a section that has subheadings will NOT return - the full text of all of the subsections. It only gets the text between - `section_title` and the next subheading, which is often empty. - ''' - - section = u"== {} ==".format(section_title) - try: - index = self.content.index(section) + len(section) - except ValueError: - return None - - try: - next_index = self.content.index("==", index) - except ValueError: - next_index = len(self.content) - - return self.content[index:next_index].lstrip("=").strip() + else: + self.pageid = pageid + self.title = page["title"] + self.url = page["fullurl"] + + def __continued_query(self, query_params: Dict[str, Any]): + """ + Based on https://www.mediawiki.org/wiki/API:Query#Continuing_queries + """ + query_params.update(self.__title_query_param) + + last_continue = {} + prop = query_params.get("prop", None) + + while True: + params = query_params.copy() + params.update(last_continue) + + request = _wiki_request(params) + + if "query" not in request: + break + + pages = request["query"]["pages"] + if "generator" in query_params: + for ( + datum + ) in pages.values(): # in python 3.3+: "yield from pages.values()" + yield datum + else: + for datum in pages[self.pageid][prop]: + yield datum + + if "continue" not in request: + break + + last_continue = request["continue"] + + @property + def __title_query_param(self): + if getattr(self, "title", None) is not None: + return {"titles": self.title} + else: + return {"pageids": self.pageid} + + def html(self): + """ + Get full page HTML. + + .. warning:: This can get pretty slow on long pages. + """ + + if not getattr(self, "_html", False): + query_params = { + "prop": "revisions", + "rvprop": "content", + "rvlimit": 1, + "rvparse": "", + "titles": self.title, + } + + request = _wiki_request(query_params) + self._html = request["query"]["pages"][self.pageid]["revisions"][0]["*"] + + return self._html + + @property + def content(self): + """ + Plain text content of the page, excluding images, tables, and other data. + """ + + if not getattr(self, "_content", False): + query_params = { + "prop": "extracts|revisions", + "explaintext": "", + "rvprop": "ids", + } + if not getattr(self, "title", None) is None: + query_params["titles"] = self.title + else: + query_params["pageids"] = self.pageid + request = _wiki_request(query_params) + self._content = request["query"]["pages"][self.pageid]["extract"] + self._revision_id = request["query"]["pages"][self.pageid]["revisions"][0][ + "revid" + ] + self._parent_id = request["query"]["pages"][self.pageid]["revisions"][0][ + "parentid" + ] + + return self._content + + @property + def revision_id(self): + """ + Revision ID of the page. + + The revision ID is a number that uniquely identifies the current + version of the page. It can be used to create the permalink or for + other direct API calls. See `Help:Page history + `_ for more + information. + """ + + if not getattr(self, "_revid", False): + # fetch the content (side effect is loading the revid) + self.content + + return self._revision_id + + @property + def parent_id(self): + """ + Revision ID of the parent version of the current revision of this + page. See ``revision_id`` for more information. + """ + + if not getattr(self, "_parentid", False): + # fetch the content (side effect is loading the revid) + self.content + + return self._parent_id + + @property + def summary(self): + """ + Plain text summary of the page. + """ + + if not getattr(self, "_summary", False): + query_params = { + "prop": "extracts", + "explaintext": "", + "exintro": "", + } + if not getattr(self, "title", None) is None: + query_params["titles"] = self.title + else: + query_params["pageids"] = self.pageid + + request = _wiki_request(query_params) + self._summary = request["query"]["pages"][self.pageid]["extract"] + + return self._summary + + @property + def images(self): + """ + List of URLs of images on the page. + """ + + if not getattr(self, "_images", False): + self._images = [ + page["imageinfo"][0]["url"] + for page in self.__continued_query( + { + "generator": "images", + "gimlimit": "max", + "prop": "imageinfo", + "iiprop": "url", + } + ) + if "imageinfo" in page + ] + + return self._images + + @property + def coordinates(self): + """ + Tuple of Decimals in the form of (lat, lon) or None + """ + if not getattr(self, "_coordinates", False): + query_params = { + "prop": "coordinates", + "colimit": "max", + "titles": self.title, + } + + request = _wiki_request(query_params) + + if "query" in request: + coordinates = request["query"]["pages"][self.pageid]["coordinates"] + self._coordinates = ( + Decimal(coordinates[0]["lat"]), + Decimal(coordinates[0]["lon"]), + ) + else: + self._coordinates = None + + return self._coordinates + + @property + def references(self): + """ + List of URLs of external links on a page. + May include external links within page that aren't technically cited anywhere. + """ + + if not getattr(self, "_references", False): + + def add_protocol(url: str): + return url if url.startswith("http") else "http:" + url + + self._references = [ + add_protocol(link["*"]) + for link in self.__continued_query( + {"prop": "extlinks", "ellimit": "max"} + ) + ] + + return self._references + + @property + def links(self): + """ + List of titles of Wikipedia page links on a page. + + .. note:: Only includes articles from namespace 0, meaning no Category, User talk, or other meta-Wikipedia pages. + """ + + if not getattr(self, "_links", False): + self._links = [ + link["title"] + for link in self.__continued_query( + {"prop": "links", "plnamespace": 0, "pllimit": "max"} + ) + ] + + return self._links + + @property + def categories(self): + """ + List of categories of a page. + """ + + if not getattr(self, "_categories", False): + self._categories = [ + re.sub(r"^Category:", "", x) + for x in [ + link["title"] + for link in self.__continued_query( + {"prop": "categories", "cllimit": "max"} + ) + ] + ] + + return self._categories + + @property + def sections(self): + """ + List of section titles from the table of contents on the page. + """ + + if not getattr(self, "_sections", False): + query_params = { + "action": "parse", + "prop": "sections", + } + if getattr(self, "title", None) is not None: + query_params["page"] = self.title + + request = _wiki_request(query_params) + self._sections = [ + section["line"] for section in request["parse"]["sections"] + ] + + return self._sections + + def section(self, section_title: str): + """ + Get the plain text content of a section from `self.sections`. + Returns None if `section_title` isn't found, otherwise returns a whitespace stripped string. + + This is a convenience method that wraps self.content. + + .. warning:: Calling `section` on a section that has subheadings will NOT return + the full text of all of the subsections. It only gets the text between + `section_title` and the next subheading, which is often empty. + """ + + section = f"== {section_title} ==" + try: + index = self.content.index(section) + len(section) + except ValueError: + return None + + try: + next_index = self.content.index("==", index) + except ValueError: + next_index = len(self.content) + + return self.content[index:next_index].lstrip("=").strip() @cache def languages(): - ''' - List all the currently supported language prefixes (usually ISO language code). + """ + List all the currently supported language prefixes (usually ISO language code). - Can be inputted to `set_lang` to change the Mediawiki that `wikipedia` requests - results from. + Can be inputted to `set_lang` to change the Mediawiki that `wikipedia` requests + results from. - Returns: dict of : pairs. To get just a list of prefixes, - use `wikipedia.languages().keys()`. - ''' - response = _wiki_request({ - 'meta': 'siteinfo', - 'siprop': 'languages' - }) + Returns: dict of : pairs. To get just a list of prefixes, + use `wikipedia.languages().keys()`. + """ + response = _wiki_request({"meta": "siteinfo", "siprop": "languages"}) - languages = response['query']['languages'] + languages = response["query"]["languages"] - return { - lang['code']: lang['*'] - for lang in languages - } + return {lang["code"]: lang["*"] for lang in languages} def donate(): - ''' - Open up the Wikimedia donate page in your favorite browser. - ''' - import webbrowser + """ + Open up the Wikimedia donate page in your favorite browser. + """ + import webbrowser - webbrowser.open('https://donate.wikimedia.org/w/index.php?title=Special:FundraiserLandingPage', new=2) + webbrowser.open( + "https://donate.wikimedia.org/w/index.php?title=Special:FundraiserLandingPage", + new=2, + ) -def _wiki_request(params): - ''' - Make a request to the Wikipedia API using the given search parameters. - Returns a parsed dict of the JSON response. - ''' - global RATE_LIMIT_LAST_CALL - global USER_AGENT +def _wiki_request(params: Dict[str, Any]): + """ + Make a request to the Wikipedia API using the given search parameters. + Returns a parsed dict of the JSON response. + """ + global RATE_LIMIT_LAST_CALL + global USER_AGENT - params['format'] = 'json' - if not 'action' in params: - params['action'] = 'query' + params["format"] = "json" + if "action" not in params: + params["action"] = "query" - headers = { - 'User-Agent': USER_AGENT - } + headers = {"User-Agent": USER_AGENT} + timeout = 10 # temporary fix - if RATE_LIMIT and RATE_LIMIT_LAST_CALL and \ - RATE_LIMIT_LAST_CALL + RATE_LIMIT_MIN_WAIT > datetime.now(): + if ( + RATE_LIMIT + and RATE_LIMIT_LAST_CALL + and RATE_LIMIT_LAST_CALL + RATE_LIMIT_MIN_WAIT > datetime.now() + ): - # it hasn't been long enough since the last API call - # so wait until we're in the clear to make the request + # it hasn't been long enough since the last API call + # so wait until we're in the clear to make the request - wait_time = (RATE_LIMIT_LAST_CALL + RATE_LIMIT_MIN_WAIT) - datetime.now() - time.sleep(int(wait_time.total_seconds())) + wait_time = (RATE_LIMIT_LAST_CALL + RATE_LIMIT_MIN_WAIT) - datetime.now() + time.sleep(int(wait_time.total_seconds())) - r = requests.get(API_URL, params=params, headers=headers) + r = requests.get(API_URL, params=params, headers=headers, timeout=timeout) - if RATE_LIMIT: - RATE_LIMIT_LAST_CALL = datetime.now() + if RATE_LIMIT: + RATE_LIMIT_LAST_CALL = datetime.now() - return r.json() + return r.json() From e0037002791fa4a16f57aa04d024f391cf8c9393 Mon Sep 17 00:00:00 2001 From: Siddhesh Agarwal Date: Sun, 12 May 2024 18:22:28 +0530 Subject: [PATCH 4/9] isort + version moved to new file --- wikipedia/__init__.py | 23 +++++++++++------------ wikipedia/exceptions.py | 1 - wikipedia/util.py | 2 +- wikipedia/version.py | 1 + wikipedia/wikipedia.py | 21 +++++++++++---------- 5 files changed, 24 insertions(+), 24 deletions(-) create mode 100644 wikipedia/version.py diff --git a/wikipedia/__init__.py b/wikipedia/__init__.py index b7d1485..c781093 100644 --- a/wikipedia/__init__.py +++ b/wikipedia/__init__.py @@ -1,4 +1,13 @@ -from .wikipedia import ( +from wikipedia.exceptions import ( + DisambiguationError, + HTTPTimeoutError, + PageError, + RedirectError, + WikipediaException, +) +from wikipedia.version import __version__ +from wikipedia.wikipedia import ( + WikipediaPage, donate, geosearch, languages, @@ -6,18 +15,8 @@ random, search, set_lang, - set_user_agent, set_rate_limiting, + set_user_agent, suggest, summary, - WikipediaPage, -) -from .exceptions import ( - DisambiguationError, - HTTPTimeoutError, - PageError, - RedirectError, - WikipediaException, ) - -__version__ = (1, 4, 0) diff --git a/wikipedia/exceptions.py b/wikipedia/exceptions.py index a376ff6..c1a4227 100644 --- a/wikipedia/exceptions.py +++ b/wikipedia/exceptions.py @@ -4,7 +4,6 @@ from typing import List, Union - ODD_ERROR_MESSAGE = ( "This shouldn't happen. Please report on GitHub: github.com/goldsmith/Wikipedia" ) diff --git a/wikipedia/util.py b/wikipedia/util.py index 18c23ca..e2f5dfa 100644 --- a/wikipedia/util.py +++ b/wikipedia/util.py @@ -1,5 +1,5 @@ -import sys import functools +import sys def debug(fn): diff --git a/wikipedia/version.py b/wikipedia/version.py new file mode 100644 index 0000000..532acc3 --- /dev/null +++ b/wikipedia/version.py @@ -0,0 +1 @@ +__version__ = (1, 4, 0) diff --git a/wikipedia/wikipedia.py b/wikipedia/wikipedia.py index 8532f81..3e3a328 100644 --- a/wikipedia/wikipedia.py +++ b/wikipedia/wikipedia.py @@ -1,28 +1,29 @@ -from __future__ import unicode_literals +import re +import time +from datetime import datetime, timedelta +from decimal import Decimal from typing import Any, Dict, Generator, List, Tuple, Union import requests -import time from bs4 import BeautifulSoup -from datetime import datetime, timedelta -from decimal import Decimal + +from wikipedia.util import cache, stdout_encode +from wikipedia.version import __version__ from .exceptions import ( - PageError, + ODD_ERROR_MESSAGE, DisambiguationError, - RedirectError, HTTPTimeoutError, + PageError, + RedirectError, WikipediaException, - ODD_ERROR_MESSAGE, ) -from .util import cache, stdout_encode -import re API_URL = "http://en.wikipedia.org/w/api.php" RATE_LIMIT = False RATE_LIMIT_MIN_WAIT = None RATE_LIMIT_LAST_CALL = None -USER_AGENT = "wikipedia (https://github.com/goldsmith/Wikipedia/)" +USER_AGENT = f"wikipedia/{'.'.join(str(x) for x in __version__)} (https://github.com/goldsmith/Wikipedia/)" def set_lang(prefix: str): From 7b6ff82c812d730a87be40b4196bac3b577c79ab Mon Sep 17 00:00:00 2001 From: Siddhesh Agarwal Date: Sun, 12 May 2024 18:29:31 +0530 Subject: [PATCH 5/9] format doc --- README.rst | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/README.rst b/README.rst index 03b8107..0e05490 100644 --- a/README.rst +++ b/README.rst @@ -91,16 +91,16 @@ full details. Credits ------- -- `wiki-api `__ by - @richardasaurus for inspiration +- `wiki-api `__ by + @richardasaurus for inspiration - @nmoroze and @themichaelyang for feedback and suggestions -- The `Wikimedia - Foundation `__ for giving - the world free access to data +- The `Wikimedia + Foundation `__ for giving + the world free access to data .. image:: https://d2weczhvl823v0.cloudfront.net/goldsmith/wikipedia/trend.png - :alt: Bitdeli badge - :target: https://bitdeli.com/free + :alt: Bitdeli badge + :target: https://bitdeli.com/free From 32789fb439ea099b386936981042dbdcef1559da Mon Sep 17 00:00:00 2001 From: Siddhesh Agarwal Date: Sun, 12 May 2024 19:01:13 +0530 Subject: [PATCH 6/9] fixed type hinting --- wikipedia/util.py | 23 ++++++++++++----------- 1 file changed, 12 insertions(+), 11 deletions(-) diff --git a/wikipedia/util.py b/wikipedia/util.py index e2f5dfa..1525d0c 100644 --- a/wikipedia/util.py +++ b/wikipedia/util.py @@ -1,9 +1,12 @@ import functools import sys +from typing import Any, Callable, Dict, List -def debug(fn): - def wrapper(*args, **kwargs): +def debug(fn: Callable[[Any], Any]): + """Print the function call and return value""" + + def wrapper(*args: List[Any], **kwargs: Dict[Any, Any]): print(fn.__name__, "called!") print(sorted(args), tuple(sorted(kwargs.items()))) res = fn(*args, **kwargs) @@ -14,19 +17,17 @@ def wrapper(*args, **kwargs): class cache(object): + """Cache the results of a function call""" - def __init__(self, fn): + def __init__(self, fn: Callable[[Any], Any]): self.fn = fn - self._cache = {} + self._cache: Dict[Any, Any] = {} functools.update_wrapper(self, fn) - def __call__(self, *args, **kwargs): + def __call__(self, *args: List[Any], **kwargs: Dict[Any, Any]): key = str(args) + str(kwargs) - if key in self._cache: - ret = self._cache[key] - else: - ret = self._cache[key] = self.fn(*args, **kwargs) - + ret = self._cache.get(key, self.fn(*args, **kwargs)) + self._cache[key] = ret return ret def clear_cache(self): @@ -34,6 +35,6 @@ def clear_cache(self): # from http://stackoverflow.com/questions/3627793/best-output-type-and-encoding-practices-for-repr-functions -def stdout_encode(u: str, default: str = "UTF8"): +def stdout_encode(u: str, default: str = "utf-8") -> str: encoding = sys.stdout.encoding or default return u.encode(encoding, errors="replace").decode(encoding) From 410e75f77535a506c60c7aa9675e7bb71c71fb0d Mon Sep 17 00:00:00 2001 From: Siddhesh Agarwal Date: Sun, 12 May 2024 19:01:24 +0530 Subject: [PATCH 7/9] fixed type hinting --- wikipedia/exceptions.py | 2 +- wikipedia/version.py | 5 ++++- wikipedia/wikipedia.py | 9 +++------ 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/wikipedia/exceptions.py b/wikipedia/exceptions.py index c1a4227..58e6d30 100644 --- a/wikipedia/exceptions.py +++ b/wikipedia/exceptions.py @@ -25,7 +25,7 @@ def __str__(self): class PageError(WikipediaException): """Exception raised when no Wikipedia matched a query.""" - def __init__(self, pageid: Union[str, None] = None, *args): + def __init__(self, pageid: Union[int, None] = None, *args: List[str]): if pageid: self.pageid = pageid else: diff --git a/wikipedia/version.py b/wikipedia/version.py index 532acc3..eee2da1 100644 --- a/wikipedia/version.py +++ b/wikipedia/version.py @@ -1 +1,4 @@ -__version__ = (1, 4, 0) +from typing import Tuple + + +__version__: Tuple[int, int, int] = (1, 4, 0) diff --git a/wikipedia/wikipedia.py b/wikipedia/wikipedia.py index 3e3a328..1a41519 100644 --- a/wikipedia/wikipedia.py +++ b/wikipedia/wikipedia.py @@ -76,9 +76,7 @@ def set_rate_limiting( * min_wait - if rate limiting is enabled, `min_wait` is a timedelta describing the minimum time to wait before requests. Defaults to timedelta(milliseconds=50) """ - global RATE_LIMIT - global RATE_LIMIT_MIN_WAIT - global RATE_LIMIT_LAST_CALL + global RATE_LIMIT, RATE_LIMIT_MIN_WAIT, RATE_LIMIT_LAST_CALL RATE_LIMIT = rate_limit if not rate_limit: @@ -110,7 +108,7 @@ def search( "srsearch": query, } if suggestion: - sarch_params["srinfo"] = "suggestion" + search_params["srinfo"] = "suggestion" raw_results = _wiki_request(search_params) @@ -782,8 +780,7 @@ def _wiki_request(params: Dict[str, Any]): Make a request to the Wikipedia API using the given search parameters. Returns a parsed dict of the JSON response. """ - global RATE_LIMIT_LAST_CALL - global USER_AGENT + global RATE_LIMIT_LAST_CALL, USER_AGENT params["format"] = "json" if "action" not in params: From a5fdee13403f2fcfa46409c6b9ace6bc2b16071e Mon Sep 17 00:00:00 2001 From: Siddhesh Agarwal Date: Sun, 12 May 2024 20:56:44 +0530 Subject: [PATCH 8/9] small fixes --- wikipedia/wikipedia.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/wikipedia/wikipedia.py b/wikipedia/wikipedia.py index 1a41519..c1bccfe 100644 --- a/wikipedia/wikipedia.py +++ b/wikipedia/wikipedia.py @@ -520,12 +520,12 @@ def content(self): """ if not getattr(self, "_content", False): - query_params = { + query_params: Dict[str, Any] = { "prop": "extracts|revisions", "explaintext": "", "rvprop": "ids", } - if not getattr(self, "title", None) is None: + if getattr(self, "title", None) is not None: query_params["titles"] = self.title else: query_params["pageids"] = self.pageid @@ -578,12 +578,12 @@ def summary(self): """ if not getattr(self, "_summary", False): - query_params = { + query_params: Dict[str, Any] = { "prop": "extracts", "explaintext": "", "exintro": "", } - if not getattr(self, "title", None) is None: + if getattr(self, "title", None) is not None: query_params["titles"] = self.title else: query_params["pageids"] = self.pageid From e53ef655ebb7f8a9d80743e86f34c0276a313255 Mon Sep 17 00:00:00 2001 From: Siddhesh Agarwal Date: Wed, 6 Nov 2024 22:50:35 +0530 Subject: [PATCH 9/9] added typing, raised for status --- .travis.yml | 7 +- docs/source/conf.py | 137 ++-- setup.py | 1 + tests/geosearch_test.py | 81 +- tests/lang_test.py | 8 +- tests/page_test.py | 337 ++++---- tests/request_mock_data.py | 1584 +++++++++++++++++++++++++++++++----- tests/search_test.py | 61 +- wikipedia/__init__.py | 20 + wikipedia/exceptions.py | 3 +- wikipedia/version.py | 1 - wikipedia/wikipedia.py | 19 +- 12 files changed, 1784 insertions(+), 475 deletions(-) diff --git a/.travis.yml b/.travis.yml index 87b483a..9c517e8 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,8 +1,9 @@ language: python python: - - 2.7 - - 3.3 - - 3.4 + - 3.9 + - 3.10 + - 3.11 + - 3.12 env: - REQUESTS=2.0.0 - REQUESTS=2.1.0 diff --git a/docs/source/conf.py b/docs/source/conf.py index 09ca2d0..a8b9cf7 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -11,164 +11,165 @@ # All configuration values have a default; values that are commented out # serve to show the default. -import sys, os +import os +import sys # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath('../..')) -sys.path.append(os.path.abspath('_themes')) +sys.path.insert(0, os.path.abspath("../..")) +sys.path.append(os.path.abspath("_themes")) # -- General configuration ----------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. -#needs_sphinx = '1.0' +# needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. -extensions = ['sphinx.ext.autodoc', 'sphinx.ext.coverage', 'sphinx.ext.ifconfig'] +extensions = ["sphinx.ext.autodoc", "sphinx.ext.coverage", "sphinx.ext.ifconfig"] # Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] +templates_path = ["_templates"] # The suffix of source filenames. -source_suffix = '.rst' +source_suffix = ".rst" # The encoding of source files. -#source_encoding = 'utf-8-sig' +# source_encoding = 'utf-8-sig' # The master toctree document. -master_doc = 'index' +master_doc = "index" # General information about the project. -project = u'wikipedia' -copyright = u'2013, Jonathan Goldsmith' +project = "wikipedia" +copyright = "2013, Jonathan Goldsmith" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. -version = '0.9' +version = "0.9" # The full version, including alpha/beta/rc tags. -release = '0.9' +release = "0.9" # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. -#language = None +# language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: -#today = '' +# today = '' # Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' +# today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = [] # The reST default role (used for this markup: `text`) to use for all documents. -#default_role = None +# default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True +# add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). -#add_module_names = True +# add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. -#show_authors = False +# show_authors = False # The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' +pygments_style = "sphinx" # A list of ignored prefixes for module index sorting. -#modindex_common_prefix = [] +# modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. -#keep_warnings = False +# keep_warnings = False # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. -html_theme = 'flask_small' +html_theme = "flask_small" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. -#html_theme_options = {} +# html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. -html_theme_path = ['_themes'] +html_theme_path = ["_themes"] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". -#html_title = None +# html_title = None # A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None +# html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. -#html_logo = None +# html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. -#html_favicon = None +# html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] +html_static_path = ["_static"] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. -#html_last_updated_fmt = '%b %d, %Y' +# html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. -#html_use_smartypants = True +# html_use_smartypants = True # Custom sidebar templates, maps document names to template names. -#html_sidebars = {} +# html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. -#html_additional_pages = {} +# html_additional_pages = {} # If false, no module index is generated. -#html_domain_indices = True +# html_domain_indices = True # If false, no index is generated. -#html_use_index = True +# html_use_index = True # If true, the index is split into individual pages for each letter. -#html_split_index = False +# html_split_index = False # If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True +# html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -#html_show_sphinx = True +# html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -#html_show_copyright = True +# html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. -#html_use_opensearch = '' +# html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None +# html_file_suffix = None # Output file base name for HTML help builder. -htmlhelp_basename = 'wikipediadoc' +htmlhelp_basename = "wikipediadoc" # -- Options for LaTeX output -------------------------------------------------- @@ -176,10 +177,8 @@ latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. # 'preamble': '', } @@ -187,29 +186,34 @@ # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ - ('index', 'wikipedia.tex', u'wikipedia Documentation', - u'Jonathan Goldsmith', 'manual'), + ( + "index", + "wikipedia.tex", + "wikipedia Documentation", + "Jonathan Goldsmith", + "manual", + ), ] # The name of an image file (relative to this directory) to place at the top of # the title page. -#latex_logo = None +# latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. -#latex_use_parts = False +# latex_use_parts = False # If true, show page references after internal links. -#latex_show_pagerefs = False +# latex_show_pagerefs = False # If true, show URL addresses after external links. -#latex_show_urls = False +# latex_show_urls = False # Documents to append as an appendix to all manuals. -#latex_appendices = [] +# latex_appendices = [] # If false, no module index is generated. -#latex_domain_indices = True +# latex_domain_indices = True # -- Options for manual page output -------------------------------------------- @@ -217,12 +221,11 @@ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ - ('index', 'wikipedia', u'wikipedia Documentation', - [u'Jonathan Goldsmith'], 1) + ("index", "wikipedia", "wikipedia Documentation", ["Jonathan Goldsmith"], 1) ] # If true, show URL addresses after external links. -#man_show_urls = False +# man_show_urls = False # -- Options for Texinfo output ------------------------------------------------ @@ -231,19 +234,25 @@ # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - ('index', 'wikipedia', u'wikipedia Documentation', - u'Jonathan Goldsmith', 'wikipedia', 'One line description of project.', - 'Miscellaneous'), + ( + "index", + "wikipedia", + "wikipedia Documentation", + "Jonathan Goldsmith", + "wikipedia", + "One line description of project.", + "Miscellaneous", + ), ] # Documents to append as an appendix to all manuals. -#texinfo_appendices = [] +# texinfo_appendices = [] # If false, no module index is generated. -#texinfo_domain_indices = True +# texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. -#texinfo_show_urls = 'footnote' +# texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. -#texinfo_no_detailmenu = False +# texinfo_no_detailmenu = False diff --git a/setup.py b/setup.py index 3b4dc13..ded347e 100644 --- a/setup.py +++ b/setup.py @@ -2,6 +2,7 @@ import codecs import os import re + import setuptools diff --git a/tests/geosearch_test.py b/tests/geosearch_test.py index 0fc12f5..4c1ca4b 100644 --- a/tests/geosearch_test.py +++ b/tests/geosearch_test.py @@ -1,52 +1,63 @@ # -*- coding: utf-8 -*- import unittest - from collections import defaultdict from decimal import Decimal -from wikipedia import wikipedia from request_mock_data import mock_data +from wikipedia import wikipedia + # mock out _wiki_request class _wiki_request(object): - calls = defaultdict(int) + calls = defaultdict(int) + + @classmethod + def __call__(cls, params): + cls.calls[params.__str__()] += 1 + return mock_data["_wiki_request calls"][tuple(sorted(params.items()))] - @classmethod - def __call__(cls, params): - cls.calls[params.__str__()] += 1 - return mock_data["_wiki_request calls"][tuple(sorted(params.items()))] wikipedia._wiki_request = _wiki_request() class TestSearchLoc(unittest.TestCase): - """Test the functionality of wikipedia.geosearch.""" - - def test_geosearch(self): - """Test parsing a Wikipedia location request result.""" - self.assertEqual( - wikipedia.geosearch(Decimal('40.67693'), Decimal('117.23193')), - mock_data['data']["great_wall_of_china.geo_seach"] - ) - - def test_geosearch_with_radius(self): - """Test parsing a Wikipedia location request result.""" - self.assertEqual(wikipedia.geosearch( - Decimal('40.67693'), Decimal('117.23193'), radius=10000), - mock_data['data']["great_wall_of_china.geo_seach_with_radius"] - ) - - def test_geosearch_with_existing_title(self): - """Test parsing a Wikipedia location request result.""" - self.assertEqual(wikipedia.geosearch( - Decimal('40.67693'), Decimal('117.23193'), title='Great Wall of China'), - mock_data['data']["great_wall_of_china.geo_seach_with_existing_article_name"] - ) - - def test_geosearch_with_non_existing_title(self): - self.assertEqual(wikipedia.geosearch( - Decimal('40.67693'), Decimal('117.23193'), title='Test'), - mock_data['data']["great_wall_of_china.geo_seach_with_non_existing_article_name"] - ) \ No newline at end of file + """Test the functionality of wikipedia.geosearch.""" + + def test_geosearch(self): + """Test parsing a Wikipedia location request result.""" + self.assertEqual( + wikipedia.geosearch(Decimal("40.67693"), Decimal("117.23193")), + mock_data["data"]["great_wall_of_china.geo_seach"], + ) + + def test_geosearch_with_radius(self): + """Test parsing a Wikipedia location request result.""" + self.assertEqual( + wikipedia.geosearch( + Decimal("40.67693"), Decimal("117.23193"), radius=10000 + ), + mock_data["data"]["great_wall_of_china.geo_seach_with_radius"], + ) + + def test_geosearch_with_existing_title(self): + """Test parsing a Wikipedia location request result.""" + self.assertEqual( + wikipedia.geosearch( + Decimal("40.67693"), Decimal("117.23193"), title="Great Wall of China" + ), + mock_data["data"][ + "great_wall_of_china.geo_seach_with_existing_article_name" + ], + ) + + def test_geosearch_with_non_existing_title(self): + self.assertEqual( + wikipedia.geosearch( + Decimal("40.67693"), Decimal("117.23193"), title="Test" + ), + mock_data["data"][ + "great_wall_of_china.geo_seach_with_non_existing_article_name" + ], + ) diff --git a/tests/lang_test.py b/tests/lang_test.py index 88a9e69..a8f2d8b 100644 --- a/tests/lang_test.py +++ b/tests/lang_test.py @@ -5,8 +5,8 @@ class TestLang(unittest.TestCase): - """Test the ability for wikipedia to change the language of the API being accessed.""" + """Test the ability for wikipedia to change the language of the API being accessed.""" - def test_lang(self): - wikipedia.set_lang("fr") - self.assertEqual(wikipedia.API_URL, 'http://fr.wikipedia.org/w/api.php') + def test_lang(self): + wikipedia.set_lang("fr") + self.assertEqual(wikipedia.API_URL, "http://fr.wikipedia.org/w/api.php") diff --git a/tests/page_test.py b/tests/page_test.py index 7b69da4..02b0d6b 100644 --- a/tests/page_test.py +++ b/tests/page_test.py @@ -1,161 +1,206 @@ # -*- coding: utf-8 -*- -from decimal import Decimal import unittest +from decimal import Decimal -from wikipedia import wikipedia from request_mock_data import mock_data +from wikipedia import wikipedia + # mock out _wiki_request def _wiki_request(params): - return mock_data["_wiki_request calls"][tuple(sorted(params.items()))] + return mock_data["_wiki_request calls"][tuple(sorted(params.items()))] + + wikipedia._wiki_request = _wiki_request class TestPageSetUp(unittest.TestCase): - """Test the functionality of wikipedia.page's __init__ and load functions.""" - - def test_missing(self): - """Test that page raises a PageError for a nonexistant page.""" - # Callicarpa? - purpleberry = lambda: wikipedia.page("purpleberry", auto_suggest=False) - self.assertRaises(wikipedia.PageError, purpleberry) - - def test_redirect_true(self): - """Test that a page successfully redirects a query.""" - # no error should be raised if redirect is test_redirect_true - mp = wikipedia.page("Menlo Park, New Jersey") - - self.assertEqual(mp.title, "Edison, New Jersey") - self.assertEqual(mp.url, "http://en.wikipedia.org/wiki/Edison,_New_Jersey") - - def test_redirect_false(self): - """Test that page raises an error on a redirect when redirect == False.""" - mp = lambda: wikipedia.page("Menlo Park, New Jersey", auto_suggest=False, redirect=False) - self.assertRaises(wikipedia.RedirectError, mp) - - def test_redirect_no_normalization(self): - """Test that a page with redirects but no normalization query loads correctly""" - the_party = wikipedia.page("Communist Party", auto_suggest=False) - self.assertIsInstance(the_party, wikipedia.WikipediaPage) - self.assertEqual(the_party.title, "Communist party") - - def test_redirect_with_normalization(self): - """Test that a page redirect with a normalized query loads correctly""" - the_party = wikipedia.page("communist Party", auto_suggest=False) - self.assertIsInstance(the_party, wikipedia.WikipediaPage) - self.assertEqual(the_party.title, "Communist party") - - def test_redirect_normalization(self): - """Test that a page redirect loads correctly with or without a query normalization""" - capital_party = wikipedia.page("Communist Party", auto_suggest=False) - lower_party = wikipedia.page("communist Party", auto_suggest=False) - - self.assertIsInstance(capital_party, wikipedia.WikipediaPage) - self.assertIsInstance(lower_party, wikipedia.WikipediaPage) - self.assertEqual(capital_party.title, "Communist party") - self.assertEqual(capital_party, lower_party) - - def test_disambiguate(self): - """Test that page raises an error when a disambiguation page is reached.""" - try: - ram = wikipedia.page("Dodge Ram (disambiguation)", auto_suggest=False, redirect=False) - error_raised = False - except wikipedia.DisambiguationError as e: - error_raised = True - options = e.options - - self.assertTrue(error_raised) - self.assertEqual(options, [u'Dodge Ramcharger', u'Dodge Ram Van', u'Dodge Mini Ram', u'Dodge Caravan C/V', u'Dodge Caravan C/V', u'Ram C/V', u'Dodge Ram 50', u'Dodge D-Series', u'Dodge Rampage', u'Ram (brand)']) - - def test_auto_suggest(self): - """Test that auto_suggest properly corrects a typo.""" - # yum, butter. - butterfly = wikipedia.page("butteryfly") - - self.assertEqual(butterfly.title, "Butterfly") - self.assertEqual(butterfly.url, "http://en.wikipedia.org/wiki/Butterfly") + """Test the functionality of wikipedia.page's __init__ and load functions.""" + + def test_missing(self): + """Test that page raises a PageError for a nonexistant page.""" + # Callicarpa? + purpleberry = lambda: wikipedia.page("purpleberry", auto_suggest=False) + self.assertRaises(wikipedia.PageError, purpleberry) + + def test_redirect_true(self): + """Test that a page successfully redirects a query.""" + # no error should be raised if redirect is test_redirect_true + mp = wikipedia.page("Menlo Park, New Jersey") + + self.assertEqual(mp.title, "Edison, New Jersey") + self.assertEqual(mp.url, "http://en.wikipedia.org/wiki/Edison,_New_Jersey") + + def test_redirect_false(self): + """Test that page raises an error on a redirect when redirect == False.""" + mp = lambda: wikipedia.page( + "Menlo Park, New Jersey", auto_suggest=False, redirect=False + ) + self.assertRaises(wikipedia.RedirectError, mp) + + def test_redirect_no_normalization(self): + """Test that a page with redirects but no normalization query loads correctly""" + the_party = wikipedia.page("Communist Party", auto_suggest=False) + self.assertIsInstance(the_party, wikipedia.WikipediaPage) + self.assertEqual(the_party.title, "Communist party") + + def test_redirect_with_normalization(self): + """Test that a page redirect with a normalized query loads correctly""" + the_party = wikipedia.page("communist Party", auto_suggest=False) + self.assertIsInstance(the_party, wikipedia.WikipediaPage) + self.assertEqual(the_party.title, "Communist party") + + def test_redirect_normalization(self): + """Test that a page redirect loads correctly with or without a query normalization""" + capital_party = wikipedia.page("Communist Party", auto_suggest=False) + lower_party = wikipedia.page("communist Party", auto_suggest=False) + + self.assertIsInstance(capital_party, wikipedia.WikipediaPage) + self.assertIsInstance(lower_party, wikipedia.WikipediaPage) + self.assertEqual(capital_party.title, "Communist party") + self.assertEqual(capital_party, lower_party) + + def test_disambiguate(self): + """Test that page raises an error when a disambiguation page is reached.""" + try: + ram = wikipedia.page( + "Dodge Ram (disambiguation)", auto_suggest=False, redirect=False + ) + error_raised = False + except wikipedia.DisambiguationError as e: + error_raised = True + options = e.options + + self.assertTrue(error_raised) + self.assertEqual( + options, + [ + "Dodge Ramcharger", + "Dodge Ram Van", + "Dodge Mini Ram", + "Dodge Caravan C/V", + "Dodge Caravan C/V", + "Ram C/V", + "Dodge Ram 50", + "Dodge D-Series", + "Dodge Rampage", + "Ram (brand)", + ], + ) + + def test_auto_suggest(self): + """Test that auto_suggest properly corrects a typo.""" + # yum, butter. + butterfly = wikipedia.page("butteryfly") + + self.assertEqual(butterfly.title, "Butterfly") + self.assertEqual(butterfly.url, "http://en.wikipedia.org/wiki/Butterfly") class TestPage(unittest.TestCase): - """Test the functionality of the rest of wikipedia.page.""" - - def setUp(self): - # shortest wikipedia articles with images and sections - self.celtuce = wikipedia.page("Celtuce") - self.cyclone = wikipedia.page("Tropical Depression Ten (2005)") - self.great_wall_of_china = wikipedia.page("Great Wall of China") - - def test_from_page_id(self): - """Test loading from a page id""" - self.assertEqual(self.celtuce, wikipedia.page(pageid=1868108)) - - def test_title(self): - """Test the title.""" - self.assertEqual(self.celtuce.title, "Celtuce") - self.assertEqual(self.cyclone.title, "Tropical Depression Ten (2005)") - - def test_url(self): - """Test the url.""" - self.assertEqual(self.celtuce.url, "http://en.wikipedia.org/wiki/Celtuce") - self.assertEqual(self.cyclone.url, "http://en.wikipedia.org/wiki/Tropical_Depression_Ten_(2005)") - - def test_content(self): - """Test the plain text content.""" - self.assertEqual(self.celtuce.content, mock_data['data']["celtuce.content"]) - self.assertEqual(self.cyclone.content, mock_data['data']["cyclone.content"]) - - def test_revision_id(self): - """Test the revision id.""" - self.assertEqual(self.celtuce.revision_id, mock_data['data']["celtuce.revid"]) - self.assertEqual(self.cyclone.revision_id, mock_data['data']["cyclone.revid"]) - - def test_parent_id(self): - """Test the parent id.""" - self.assertEqual(self.celtuce.parent_id, mock_data['data']["celtuce.parentid"]) - self.assertEqual(self.cyclone.parent_id, mock_data['data']["cyclone.parentid"]) - - - def test_summary(self): - """Test the summary.""" - self.assertEqual(self.celtuce.summary, mock_data['data']["celtuce.summary"]) - self.assertEqual(self.cyclone.summary, mock_data['data']["cyclone.summary"]) - - def test_images(self): - """Test the list of image URLs.""" - self.assertEqual(sorted(self.celtuce.images), mock_data['data']["celtuce.images"]) - self.assertEqual(sorted(self.cyclone.images), mock_data['data']["cyclone.images"]) - - def test_references(self): - """Test the list of reference URLs.""" - self.assertEqual(self.celtuce.references, mock_data['data']["celtuce.references"]) - self.assertEqual(self.cyclone.references, mock_data['data']["cyclone.references"]) - - def test_links(self): - """Test the list of titles of links to Wikipedia pages.""" - self.assertEqual(self.celtuce.links, mock_data['data']["celtuce.links"]) - self.assertEqual(self.cyclone.links, mock_data['data']["cyclone.links"]) - - def test_categories(self): - """Test the list of categories of Wikipedia pages.""" - self.assertEqual(self.celtuce.categories, mock_data['data']["celtuce.categories"]) - self.assertEqual(self.cyclone.categories, mock_data['data']["cyclone.categories"]) - - def test_html(self): - """Test the full HTML method.""" - self.assertEqual(self.celtuce.html(), mock_data['data']["celtuce.html"]) - - def test_sections(self): - """Test the list of section titles.""" - self.assertEqual(sorted(self.cyclone.sections), mock_data['data']["cyclone.sections"]) - - def test_section(self): - """Test text content of a single section.""" - self.assertEqual(self.cyclone.section("Impact"), mock_data['data']["cyclone.section.impact"]) - self.assertEqual(self.cyclone.section("History"), None) - - def test_coordinates(self): - """Test geo coordinates of a page""" - lat, lon = self.great_wall_of_china.coordinates - self.assertEqual(str(lat.quantize(Decimal('1.000'))), mock_data['data']['great_wall_of_china.coordinates.lat']) - self.assertEqual(str(lon.quantize(Decimal('1.000'))), mock_data['data']['great_wall_of_china.coordinates.lon']) + """Test the functionality of the rest of wikipedia.page.""" + + def setUp(self): + # shortest wikipedia articles with images and sections + self.celtuce = wikipedia.page("Celtuce") + self.cyclone = wikipedia.page("Tropical Depression Ten (2005)") + self.great_wall_of_china = wikipedia.page("Great Wall of China") + + def test_from_page_id(self): + """Test loading from a page id""" + self.assertEqual(self.celtuce, wikipedia.page(pageid=1868108)) + + def test_title(self): + """Test the title.""" + self.assertEqual(self.celtuce.title, "Celtuce") + self.assertEqual(self.cyclone.title, "Tropical Depression Ten (2005)") + + def test_url(self): + """Test the url.""" + self.assertEqual(self.celtuce.url, "http://en.wikipedia.org/wiki/Celtuce") + self.assertEqual( + self.cyclone.url, + "http://en.wikipedia.org/wiki/Tropical_Depression_Ten_(2005)", + ) + + def test_content(self): + """Test the plain text content.""" + self.assertEqual(self.celtuce.content, mock_data["data"]["celtuce.content"]) + self.assertEqual(self.cyclone.content, mock_data["data"]["cyclone.content"]) + + def test_revision_id(self): + """Test the revision id.""" + self.assertEqual(self.celtuce.revision_id, mock_data["data"]["celtuce.revid"]) + self.assertEqual(self.cyclone.revision_id, mock_data["data"]["cyclone.revid"]) + + def test_parent_id(self): + """Test the parent id.""" + self.assertEqual(self.celtuce.parent_id, mock_data["data"]["celtuce.parentid"]) + self.assertEqual(self.cyclone.parent_id, mock_data["data"]["cyclone.parentid"]) + + def test_summary(self): + """Test the summary.""" + self.assertEqual(self.celtuce.summary, mock_data["data"]["celtuce.summary"]) + self.assertEqual(self.cyclone.summary, mock_data["data"]["cyclone.summary"]) + + def test_images(self): + """Test the list of image URLs.""" + self.assertEqual( + sorted(self.celtuce.images), mock_data["data"]["celtuce.images"] + ) + self.assertEqual( + sorted(self.cyclone.images), mock_data["data"]["cyclone.images"] + ) + + def test_references(self): + """Test the list of reference URLs.""" + self.assertEqual( + self.celtuce.references, mock_data["data"]["celtuce.references"] + ) + self.assertEqual( + self.cyclone.references, mock_data["data"]["cyclone.references"] + ) + + def test_links(self): + """Test the list of titles of links to Wikipedia pages.""" + self.assertEqual(self.celtuce.links, mock_data["data"]["celtuce.links"]) + self.assertEqual(self.cyclone.links, mock_data["data"]["cyclone.links"]) + + def test_categories(self): + """Test the list of categories of Wikipedia pages.""" + self.assertEqual( + self.celtuce.categories, mock_data["data"]["celtuce.categories"] + ) + self.assertEqual( + self.cyclone.categories, mock_data["data"]["cyclone.categories"] + ) + + def test_html(self): + """Test the full HTML method.""" + self.assertEqual(self.celtuce.html(), mock_data["data"]["celtuce.html"]) + + def test_sections(self): + """Test the list of section titles.""" + self.assertEqual( + sorted(self.cyclone.sections), mock_data["data"]["cyclone.sections"] + ) + + def test_section(self): + """Test text content of a single section.""" + self.assertEqual( + self.cyclone.section("Impact"), mock_data["data"]["cyclone.section.impact"] + ) + self.assertEqual(self.cyclone.section("History"), None) + + def test_coordinates(self): + """Test geo coordinates of a page""" + lat, lon = self.great_wall_of_china.coordinates + self.assertEqual( + str(lat.quantize(Decimal("1.000"))), + mock_data["data"]["great_wall_of_china.coordinates.lat"], + ) + self.assertEqual( + str(lon.quantize(Decimal("1.000"))), + mock_data["data"]["great_wall_of_china.coordinates.lon"], + ) diff --git a/tests/request_mock_data.py b/tests/request_mock_data.py index 0f82ec8..737f2b0 100644 --- a/tests/request_mock_data.py +++ b/tests/request_mock_data.py @@ -2,187 +2,1405 @@ from __future__ import unicode_literals mock_data = { - "_wiki_request calls": { - - (('explaintext', ''), ('prop', 'extracts|revisions'), ('rvprop', 'ids'), ('titles', 'Celtuce')): - {'query': {'pages': {'1868108': {'extract': 'Celtuce (Lactuca sativa var. asparagina, augustana, or angustata), also called stem lettuce, celery lettuce, asparagus lettuce, or Chinese lettuce, IPA (UK,US) /\u02c8s\u025blt.\u0259s/, is a cultivar of lettuce grown primarily for its thick stem, used as a vegetable. It is especially popular in China, and is called wosun (Chinese: \u83b4\u7b0b; pinyin: w\u014ds\u016dn) or woju (Chinese: \u83b4\u82e3; pinyin: w\u014dj\xf9) (although the latter name may also be used to mean lettuce in general).\n\nThe stem is usually harvested at a length of around 15\u201320 cm and a diameter of around 3\u20134 cm. It is crisp, moist, and mildly flavored, and typically prepared by slicing and then stir frying with more strongly flavored ingredients.\n\nDown: Photos of the celtuce, chinese lettuce or "Wosun" taken in the province of Girona (Catalonia, Spain, Europe) in June 2013\nCeltuce Nutritional content', 'ns': 0, 'pageid': 1868108, 'revisions': [{'revid': 575687826, 'parentid': 574302108}], 'title': 'Celtuce'}}}}, - - (('explaintext', ''), ('prop', 'extracts|revisions'), ('rvprop', 'ids'), ('titles', 'Tropical Depression Ten (2005)')): - {'query': {'pages': {'21196082': {'extract': 'Tropical Depression Ten was the tenth tropical cyclone of the record-breaking 2005 Atlantic hurricane season. It formed on August 13 from a tropical wave that emerged from the west coast of Africa on August 8. As a result of strong wind shear, the depression remained weak and did not strengthen beyond tropical depression status. The cyclone degenerated on August 14, although its remnants partially contributed to the formation of Tropical Depression Twelve, which eventually intensified into Hurricane Katrina. The cyclone had no effect on land, and did not directly result in any fatalities or damage.\n\n\n== Meteorological history ==\n\nOn August 8, a tropical wave emerged from the west coast of Africa and entered the Atlantic Ocean. Tracking towards the west, the depression began to exhibit signs of convective organization on August 11. The system continued to develop, and it is estimated that Tropical Depression Ten formed at 1200 UTC on August 13. At the time, it was located about 1,600 miles (2,600 km) east of Barbados. Upon its designation, the depression consisted of a large area of thunderstorm activity, with curved banding features and expanding outflow. However, the environmental conditions were predicted to quickly become unfavorable. The depression moved erratically and slowly towards the west, and wind shear inhibited any significant intensification. Late on August 13, it was "beginning to look like Irene-junior as it undergoes southwesterly mid-level shear beneath the otherwise favorable upper-level outflow pattern". The wind shear was expected to relent within 48 hours, prompting some forecast models to suggest the depression would eventually attain hurricane status.\nBy early August 14, the shear had substantially disrupted the storm, leaving the low-level center of circulation exposed from the area of convection, which was also deteriorating. After meandering, the storm began to move westward. Forecasters expected it to resume a northwestward track as high pressure to the south of Bermuda was forecasted to weaken and another high was predicted to form southwest of the Azores. By 1800 UTC on August 14, the strong shear had further weakened the storm, and it no longer met the criteria for a tropical cyclone. It degenerated into a remnant low, and the National Hurricane Center issued their final advisory on the cyclone. Moving westward, it occasionally produced bursts of convective activity, before dissipating on August 18.\nTropical Depression Twelve formed over the southeastern Bahamas at 2100 UTC on August 23, partially from the remains of Tropical Depression Ten. While the normal standards for numbering tropical depressions in the Atlantic stipulate that the initial designation be retained when a depression regenerates, satellite imagery indicated that a second tropical wave had combined with Tropical Depression Ten north of Puerto Rico to form a new, more complex weather system, which was then designated as Tropical Depression Twelve. In a re-analysis, it was found that the low-level circulation of Tropical Depression Ten had completely detached and dissipated; only the remnant mid-level circulation moved on and merged with the second tropical wave. As a result, the criteria for keeping the same name and identity were not met. Tropical Depression Twelve later became Hurricane Katrina.\n\n\n== Impact ==\nBecause Tropical Depression Ten never approached land as a tropical cyclone, no tropical cyclone watches and warnings were issued for any land masses. No effects, damages, or fatalities were reported, and no ships reported tropical storm-force winds in association with the depression. The system did not attain tropical storm status; as such, it was not given a name by the National Hurricane Center. The storm partially contributed to the formation of Hurricane Katrina, which became a Category 5 hurricane on the Saffir-Simpson Hurricane Scale and made landfall in Louisiana, causing catastrophic damage. Katrina was the costliest hurricane, and one of the five deadliest, in the history of the United States.\n\n\n== See also ==\n\nMeteorological history of Hurricane Katrina\nList of storms in the 2005 Atlantic hurricane season\nTimeline of the 2005 Atlantic hurricane season\n\n\n== References ==\n\n\n== External links ==\n\nTropical Depression Ten Tropical Cyclone Report\nTropical Depression Ten advisory archive', 'ns': 0, 'pageid': 21196082, 'revisions': [{'revid': 572715399, 'parentid': 539367750}], 'title': 'Tropical Depression Ten (2005)'}}}}, - - (('inprop', 'url'), ('ppprop', 'disambiguation'), ('prop', 'info|pageprops'), ('redirects', ''), ('titles', 'purpleberry')): - {'query': {'normalized': [{'to': 'Purpleberry', 'from': 'purpleberry'}], 'pages': {'-1': {'missing': '', 'editurl': 'http://en.wikipedia.org/w/index.php?title=Purpleberry&action=edit', 'title': 'Purpleberry', 'contentmodel': 'wikitext', 'pagelanguage': 'en', 'ns': 0, 'fullurl': 'http://en.wikipedia.org/wiki/Purpleberry'}}}}, - - (('limit', 1), ('list', 'search'), ('srinfo', 'suggestion'), ('srlimit', 1), ('srprop', ''), ('srsearch', 'Menlo Park, New Jersey')): - {'query-continue': {'search': {'sroffset': 1}}, 'query': {'search': [{'ns': 0, 'title': 'Edison, New Jersey'}]}, 'warnings': {'main': {'*': "Unrecognized parameter: 'limit'"}}}, - - (('inprop', 'url'), ('ppprop', 'disambiguation'), ('prop', 'info|pageprops'), ('redirects', ''), ('titles', 'Menlo Park, New Jersey')): - {'query': {'redirects': [{'to': 'Edison, New Jersey', 'from': 'Menlo Park, New Jersey'}], 'pages': {'125414': {'lastrevid': 607768264, 'pageid': 125414, 'title': 'Edison, New Jersey', 'editurl': 'http://en.wikipedia.org/w/index.php?title=Edison,_New_Jersey&action=edit', 'counter': '', 'length': 85175, 'contentmodel': 'wikitext', 'pagelanguage': 'en', 'touched': '2014-05-14T17:10:49Z', 'ns': 0, 'fullurl': 'http://en.wikipedia.org/wiki/Edison,_New_Jersey'}}}}, - - (('inprop', 'url'), ('ppprop', 'disambiguation'), ('prop', 'info|pageprops'), ('redirects', ''), ('titles', 'Communist Party')): - {'query': {'redirects': [{'to': 'Communist party', 'from': 'Communist Party'}], 'pages': {'37008': {'lastrevid': 608086859, 'pageid': 37008, 'title': 'Communist party', 'editurl': 'http://en.wikipedia.org/w/index.php?title=Communist_party&action=edit', 'counter': '', 'length': 7868, 'contentmodel': 'wikitext', 'pagelanguage': 'en', 'touched': '2014-05-26T01:19:01Z', 'ns': 0, 'fullurl': 'http://en.wikipedia.org/wiki/Communist_party'}}}}, - - (('inprop', 'url'), ('ppprop', 'disambiguation'), ('prop', 'info|pageprops'), ('redirects', ''), ('titles', 'communist Party')): - {'query': {'redirects': [{'to': 'Communist party', 'from': 'Communist Party'}], 'normalized': [{'to': 'Communist Party', 'from': 'communist Party'}], 'pages': {'37008': {'lastrevid': 608086859, 'pageid': 37008, 'title': 'Communist party', 'editurl': 'http://en.wikipedia.org/w/index.php?title=Communist_party&action=edit', 'counter': '', 'length': 7868, 'contentmodel': 'wikitext', 'pagelanguage': 'en', 'touched': '2014-05-26T01:19:01Z', 'ns': 0, 'fullurl': 'http://en.wikipedia.org/wiki/Communist_party'}}}}, - - (('inprop', 'url'), ('ppprop', 'disambiguation'), ('prop', 'info|pageprops'), ('redirects', ''), ('titles', 'Communist party')): - {'query': {'pages': {'37008': {'lastrevid': 608086859, 'pageid': 37008, 'title': 'Communist party', 'editurl': 'http://en.wikipedia.org/w/index.php?title=Communist_party&action=edit', 'counter': '', 'length': 7868, 'contentmodel': 'wikitext', 'pagelanguage': 'en', 'touched': '2014-05-26T01:19:01Z', 'ns': 0, 'fullurl': 'http://en.wikipedia.org/wiki/Communist_party'}}}}, - - (('inprop', 'url'), ('ppprop', 'disambiguation'), ('prop', 'info|pageprops'), ('redirects', ''), ('titles', 'Edison, New Jersey')): - {'query': {'pages': {'125414': {'lastrevid': 607768264, 'pageid': 125414, 'title': 'Edison, New Jersey', 'editurl': 'http://en.wikipedia.org/w/index.php?title=Edison,_New_Jersey&action=edit', 'counter': '', 'length': 85175, 'contentmodel': 'wikitext', 'pagelanguage': 'en', 'touched': '2014-05-14T17:10:49Z', 'ns': 0, 'fullurl': 'http://en.wikipedia.org/wiki/Edison,_New_Jersey'}}}}, - - (('inprop', 'url'), ('ppprop', 'disambiguation'), ('prop', 'info|pageprops'), ('redirects', ''), ('titles', 'Dodge Ram (disambiguation)')): - {'query': {'pages': {'18803364': {'lastrevid': 567152802, 'pageid': 18803364, 'title': 'Dodge Ram (disambiguation)', 'editurl': 'http://en.wikipedia.org/w/index.php?title=Dodge_Ram_(disambiguation)&action=edit', 'counter': '', 'length': 702, 'contentmodel': 'wikitext', 'pagelanguage': 'en', 'touched': '2013-08-08T15:12:27Z', 'ns': 0, 'pageprops': {'disambiguation': ''}, 'fullurl': 'http://en.wikipedia.org/wiki/Dodge_Ram_(disambiguation)'}}}}, - - (('prop', 'revisions'), ('rvlimit', 1), ('rvparse', ''), ('rvprop', 'content'), ('titles', 'Dodge Ram (disambiguation)')): - {'query-continue': {'revisions': {'rvcontinue': 556603298}}, 'query': {'pages': {'18803364': {'ns': 0, 'pageid': 18803364, 'revisions': [{'*': '

Dodge Ram is a collective nameplate for light trucks made by Dodge\n

\n\n

See also:\n

\n\n\n\n\n\n\n\n'}], 'title': 'Dodge Ram (disambiguation)'}}}}, - - (('limit', 1), ('list', 'search'), ('srinfo', 'suggestion'), ('srlimit', 1), ('srprop', ''), ('srsearch', 'butteryfly')): - {'query-continue': {'search': {'sroffset': 1}}, 'query': {'searchinfo': {'suggestion': 'butterfly'}, 'search': [{'ns': 0, 'title': "Butterfly's Tongue"}]}, 'warnings': {'main': {'*': "Unrecognized parameter: 'limit'"}}}, - - (('inprop', 'url'), ('ppprop', 'disambiguation'), ('prop', 'info|pageprops'), ('redirects', ''), ('titles', 'butterfly')): - {'query': {'normalized': [{'to': 'Butterfly', 'from': 'butterfly'}], 'pages': {'48338': {'lastrevid': 566847704, 'pageid': 48338, 'title': 'Butterfly', 'editurl': 'http://en.wikipedia.org/w/index.php?title=Butterfly&action=edit', 'counter': '', 'length': 60572, 'contentmodel': 'wikitext', ' pagelanguage': 'en', 'touched': '2013-08-07T11:15:37Z', 'ns': 0, 'fullurl': 'http://en.wikipedia.org/wiki/Butterfly'}}}}, - - (('limit', 1), ('list', 'search'), ('srinfo', 'suggestion'), ('srlimit', 1), ('srprop', ''), ('srsearch', 'Celtuce')): - {'query-continue': {'search': {'sroffset': 1}}, 'query': {'search': [{'ns': 0, 'title': 'Celtuce'}]}, 'warnings': {'main': {'*': "Unrecognized parameter: 'limit'"}}}, - - (('limit', 1), ('list', 'search'), ('srinfo', 'suggestion'), ('srlimit', 1), ('srprop', ''), ('srsearch', 'Tropical Depression Ten (2005)')): - {'query-continue': {'search': {'sroffset': 1}}, 'query': {'search': [{'ns': 0, 'title': 'Tropical Depression Ten (2005)'}]}, 'warnings': {'main': {'*': "Unrecognized parameter: 'limit'"}}}, - - (('limit', 1), ('list', 'search'), ('srinfo', 'suggestion'), ('srlimit', 1), ('srprop', ''), ('srsearch', 'Great Wall of China')): - {'query-continue': {'search': {'sroffset': 1}}, 'query': {'search': [{'ns': 0, 'title': 'Great Wall of China'}]}, 'warnings': {'main': {'*': "Unrecognized parameter: 'limit'"}}}, - - (('inprop', 'url'), ('ppprop', 'disambiguation'), ('prop', 'info|pageprops'), ('redirects', ''), ('titles', 'Celtuce')): - {'query': {'pages': {'1868108': {'lastrevid': 562756085, 'pageid': 1868108, 'title': 'Celtuce', 'editurl': 'http://en.wikipedia.org/w/index.php?title=Celtuce&action=edit', 'counter': '', 'length': 1662, 'contentmodel': 'wikitext', 'pagelanguage': 'en', 'touched': '2013-08-17T03:30:23Z', 'ns': 0, 'fullurl': 'http://en.wikipedia.org/wiki/Celtuce'}}}}, - - (('inprop', 'url'), ('ppprop', 'disambiguation'), ('prop', 'info|pageprops'), ('redirects', ''), ('titles', 'Tropical Depression Ten (2005)')): - {'query': {'pages': {'21196082': {'lastrevid': 572715399, 'pageid': 21196082, 'title': 'Tropical Depression Ten (2005)', 'editurl': 'http://en.wikipedia.org/w/index.php?title=Tropical_Depression_Ten_(2005)&action=edit', 'counter': '', 'length': 8543, 'contentmodel': 'wikitext', 'pagelanguage': 'en', 'touched': '2013-09-18T13:45:33Z', 'ns': 0, 'fullurl': 'http://en.wikipedia.org/wiki/Tropical_Depression_Ten_(2005)'}}}}, - - (('inprop', 'url'), ('ppprop', 'disambiguation'), ('prop', 'info|pageprops'), ('redirects', ''), ('titles', 'Great Wall of China')): - {'query': {'pages': {'5094570': {'lastrevid': 604138653, 'pageid': 5094570, 'title': 'Great Wall of China', 'editurl': 'http://en.wikipedia.org/w/index.php?title=Great_Wall_of_China&action=edit', 'counter': '', 'length': 23895, 'contentmodel': 'wikitext', 'pagelanguage': 'en', 'touched': '2013-08-17T03:30:23Z', 'ns': 0, 'fullurl': 'http://en.wikipedia.org/wiki/Great_Wall_of_China'}}}}, - - (('explaintext', ''), ('prop', 'extracts'), ('titles', 'Celtuce')): - {'query': {'pages': {'1868108': {'extract': 'Celtuce (Lactuca sativa var. asparagina, augustana, or angustata), also called stem lettuce, celery lettuce, asparagus lettuce, or Chinese lettuce, IPA (UK,US) /\u02c8s\u025blt.\u0259s/, is a cultivar of lettuce grown primarily for its thick stem, used as a vegetable. It is especially popular in China, and is called wosun (Chinese: \u83b4\u7b0b; pinyin: w\u014ds\u016dn) or woju (Chinese: \u83b4\u82e3; pinyin: w\u014dj\xf9) (although the latter name may also be used to mean lettuce in general).\n\nThe stem is usually harvested at a length of around 15\u201320 cm and a diameter of around 3\u20134 cm. It is crisp, moist, and mildly flavored, and typically prepared by slicing and then stir frying with more strongly flavored ingredients.', 'ns': 0, 'pageid': 1868108, 'title': 'Celtuce'}}}}, - - (('exintro', ''), ('explaintext', ''), ('prop', 'extracts'), ('titles', 'Celtuce')): - {'query': {'pages': {'1868108': {'extract': 'Celtuce (Lactuca sativa var. asparagina, augustana, or angustata), also called stem lettuce, celery lettuce, asparagus lettuce, or Chinese lettuce, IPA (UK,US) /\u02c8s\u025blt.\u0259s/, is a cultivar of lettuce grown primarily for its thick stem, used as a vegetable. It is especially popular in China, and is called wosun (Chinese: \u83b4\u7b0b; pinyin: w\u014ds\u016dn) or woju (Chinese: \u83b4\u82e3; pinyin: w\u014dj\xf9) (although the latter name may also be used to mean lettuce in general).\n\nThe stem is usually harvested at a length of around 15\u201320 cm and a diameter of around 3\u20134 cm. It is crisp, moist, and mildly flavored, and typically prepared by slicing and then stir frying with more strongly flavored ingredients.', 'ns': 0, 'pageid': 1868108, 'title': 'Celtuce'}}}}, - - (('exintro', ''), ('explaintext', ''), ('prop', 'extracts'), ('titles', 'Tropical Depression Ten (2005)')): - {'query': {'pages': {'21196082': {'extract': 'Tropical Depression Ten was the tenth tropical cyclone of the record-breaking 2005 Atlantic hurricane season. It formed on August 13 from a tropical wave that emerged from the west coast of Africa on August 8. As a result of strong wind shear, the depression remained weak and did not strengthen beyond tropical depression status. The cyclone degenerated on August 14, although its remnants partially contributed to the formation of Tropical Depression Twelve, which eventually intensified into Hurricane Katrina. The cyclone had no effect on land, and did not directly result in any fatalities or damage.\n\n', 'ns': 0, 'pageid': 21196082, 'title': 'Tropical Depression Ten (2005)'}}}}, - - (('generator', 'images'), ('gimlimit', 'max'), ('iiprop', 'url'), ('prop', 'imageinfo'), ('titles', 'Celtuce')): - {'query': {'pages': {'22263385': {'imagerepository': 'local', 'ns': 6, 'pageid': 22263385, 'imageinfo': [{'url': 'http://upload.wikimedia.org/wikipedia/en/9/99/Question_book-new.svg', 'descriptionurl': 'http://en.wikipedia.org/wiki/File:Question_book-new.svg'}], 'title': 'File:Question book-new.svg'}, '-1': {'imagerepository': 'shared', 'ns': 6, 'imageinfo': [{'url': 'http://upload.wikimedia.org/wikipedia/commons/8/87/Celtuce.jpg', 'descriptionurl': 'http://commons.wikimedia.org/wiki/File:Celtuce.jpg'}], 'missing': '', 'title': 'File:Celtuce.jpg'}, '-3': {'imagerepository': 'shared', 'ns': 6, 'imageinfo': [{'url': 'http://upload.wikimedia.org/wikipedia/commons/7/79/VegCorn.jpg', 'descriptionurl': 'http://commons.wikimedia.org/wiki/File:VegCorn.jpg'}], 'missing': '', 'title': 'File:VegCorn.jpg'}, '-2': {'imagerepository': 'shared', 'ns': 6, 'imageinfo': [{'url': 'http://upload.wikimedia.org/wikipedia/commons/d/dc/The_farmer%27s_market_near_the_Potala_in_Lhasa.jpg', 'descriptionurl': 'http://commons.wikimedia.org/wiki/File:The_farmer%27s_market_near_the_Potala_in_Lhasa.jpg'}], 'missing': '', 'title': "File:The farmer's market near the Potala in Lhasa.jpg"}}}, 'limits': {'images': 500}}, - - (('generator', 'images'), ('gimlimit', 'max'), ('iiprop', 'url'), ('prop', 'imageinfo'), ('titles', 'Tropical Depression Ten (2005)')): - {'query': {'pages': {'33285577': {'imagerepository': 'local', 'ns': 6, 'pageid': 33285577, 'imageinfo': [{'url': 'http://upload.wikimedia.org/wikipedia/en/4/4a/Commons-logo.svg', 'descriptionurl': 'http://en.wikipedia.org/wiki/File:Commons-logo.svg'}], 'title': 'File:Commons-logo.svg'}, '23473511': {'imagerepository': 'local', 'ns': 6, 'pageid': 23473511, 'imageinfo': [{'url': 'http://upload.wikimedia.org/wikipedia/en/4/48/Folder_Hexagonal_Icon.svg', 'descriptionurl': 'http://en.wikipedia.org/wiki/File:Folder_Hexagonal_Icon.svg'}], 'title': 'File:Folder Hexagonal Icon.svg'}, '33285464': {'imagerepository': 'local', 'ns': 6, 'pageid': 33285464, 'imageinfo': [{'url': 'http://upload.wikimedia.org/wikipedia/en/e/e7/Cscr-featured.svg', 'descriptionurl': 'http://en.wikipedia.org/wiki/File:Cscr-featured.svg'}], 'title': 'File:Cscr-featured.svg'}, '2526001': {'imagerepository': 'shared', 'ns': 6, 'pageid': 2526001, 'imageinfo': [{'url': 'http://upload.wikimedia.org/wikipedia/commons/8/89/Cyclone_Catarina_from_the_ISS_on_March_26_2004.JPG', 'descriptionurl': 'http://commons.wikimedia.org/wiki/File:Cyclone_Catarina_from_the_ISS_on_March_26_2004.JPG'}], 'title': 'File:Cyclone Catarina from the ISS on March 26 2004.JPG'}, '33285257': {'imagerepository': 'local', 'ns': 6, 'pageid': 33285257, 'imageinfo': [{'url': 'http://upload.wikimedia.org/wikipedia/en/f/fd/Portal-puzzle.svg', 'descriptionurl': 'http://en.wikipedia.org/wiki/File:Portal-puzzle.svg'}], 'title': 'File:Portal-puzzle.svg'}, '-5': {'imagerepository': 'shared', 'ns': 6, 'imageinfo': [{'url': 'http://upload.wikimedia.org/wikipedia/commons/8/89/Symbol_book_class2.svg', 'descriptionurl': 'http://commons.wikimedia.org/wiki/File:Symbol_book_class2.svg'}], 'missing': '', 'title': 'File:Symbol book class2.svg'}, '-4': {'imagerepository': 'shared', 'ns': 6, 'imageinfo': [{'url': 'http://upload.wikimedia.org/wikipedia/commons/4/47/Sound-icon.svg', 'descriptionurl': 'http://commons.wikimedia.org/wiki/File:Sound-icon.svg'}], 'missing': '', 'title': 'File:Sound-icon.svg'}, '-7': {'imagerepository': 'shared', 'ns': 6, 'imageinfo': [{'url': 'http://upload.wikimedia.org/wikipedia/commons/7/7d/Tropical_Depression_10_%282005%29.png', 'descriptionurl': 'http://commons.wikimedia.org/wiki/File:Tropical_Depression_10_(2005).png'}], 'missing': '', 'title': 'File:Tropical Depression 10 (2005).png'}, '-6': {'imagerepository': 'shared', 'ns': 6, 'imageinfo': [{'url': 'http://upload.wikimedia.org/wikipedia/commons/4/4a/TD_10_August_13%2C_2005.jpg', 'descriptionurl': 'http://commons.wikimedia.org/wiki/File:TD_10_August_13,_2005.jpg'}], 'missing': '', 'title': 'File:TD 10 August 13, 2005.jpg'}, '-1': {'imagerepository': 'shared', 'ns': 6, 'imageinfo': [{'url': 'http://upload.wikimedia.org/wikipedia/commons/a/a5/10-L_2005_track.png', 'descriptionurl': 'http://commons.wikimedia.org/wiki/File:10-L_2005_track.png'}], 'missing': '', 'title': 'File:10-L 2005 track.png'}, '-3': {'imagerepository': 'shared', 'ns': 6, 'imageinfo': [{'url': 'http://upload.wikimedia.org/wikipedia/commons/3/37/People_icon.svg', 'descriptionurl': 'http://commons.wikimedia.org/wiki/File:People_icon.svg'}], 'missing': '', 'title': 'File:People icon.svg'}, '-2': {'imagerepository': 'shared', 'ns': 6, 'imageinfo': [{'url': 'http://upload.wikimedia.org/wikipedia/commons/e/e0/2005_Atlantic_hurricane_season_summary_map.png', 'descriptionurl': 'http://commons.wikimedia.org/wiki/File:2005_Atlantic_hurricane_season_summary_map.png'}], 'missing': '', 'title': 'File:2005 Atlantic hurricane season summary map.png'}, '-8': {'imagerepository': 'shared', 'ns': 6, 'imageinfo': [{'url': 'http://upload.wikimedia.org/wikipedia/commons/3/33/Tropical_Depression_Ten_%282005%29.ogg', 'descriptionurl': 'http://commons.wikimedia.org/wiki/File:Tropical_Depression_Ten_(2005).ogg'}], 'missing': '', 'title': 'File:Tropical Depression Ten (2005).ogg'}}}, 'limits': {'images': 500}}, - - (('ellimit', 'max'), ('prop', 'extlinks'), ('titles', 'Celtuce')): - {'query': {'pages': {'1868108': {'extlinks': [{'*': 'http://ndb.nal.usda.gov/ndb/search/list'}, {'*': 'http://ndb.nal.usda.gov/ndb/search/list?qlookup=11145&format=Full'}], 'ns': 0, 'pageid': 1868108, 'title': 'Celtuce'}}}, 'limits': {'extlinks': 500}}, - - (('ellimit', 'max'), ('prop', 'extlinks'), ('titles', 'Tropical Depression Ten (2005)')): - {'query': {'pages': {'21196082': {'extlinks': [{'*': 'http://books.google.com/?id=-a8DRl1HuwoC&q=%22tropical+depression+ten%22+2005&dq=%22tropical+depression+ten%22+2005'}, {'*': 'http://facstaff.unca.edu/chennon/research/documents/erb_ncur2006_preprint.pdf'}, {'*': 'http://www.nhc.noaa.gov/archive/2005/TEN.shtml?'}, {'*': 'http://www.nhc.noaa.gov/archive/2005/dis/al102005.discus.001.shtml?'}, {'*': 'http://www.nhc.noaa.gov/archive/2005/dis/al102005.discus.002.shtml?'}, {'*': 'http://www.nhc.noaa.gov/archive/2005/dis/al102005.discus.003.shtml?'}, {'*': 'http://www.nhc.noaa.gov/archive/2005/dis/al122005.discus.001.shtml'}, {'*': 'http://www.nhc.noaa.gov/pdf/TCR-AL102005_Ten.pdf'}, {'*': 'http://www.nhc.noaa.gov/pdf/TCR-AL122005_Katrina.pdf'}, {'*': 'http://www.wptv.com/content/chopper5/story/Capt-Julie-Reports-On-Hurricane-Katrina/q__v8S2TZES2GiccRTQ2bw.cspx'}], 'ns': 0, 'pageid': 21196082, 'title': 'Tropical Depression Ten (2005)'}}}, 'limits': {'extlinks': 500}}, - - (('pllimit', 'max'), ('plnamespace', 0), ('prop', 'links'), ('titles', 'Celtuce')): - {'query': {'pages': {'1868108': {'ns': 0, 'pageid': 1868108, 'links': [{'ns': 0, 'title': 'Calcium'}, {'ns': 0, 'title': 'Carbohydrate'}, {'ns': 0, 'title': 'Chinese language'}, {'ns': 0, 'title': 'Dietary Reference Intake'}, {'ns': 0, 'title': 'Dietary fiber'}, {'ns': 0, 'title': 'Fat'}, {'ns': 0, 'title': 'Folate'}, {'ns': 0, 'title': 'Food energy'}, {'ns': 0, 'title': 'Iron'}, {'ns': 0, 'title': 'Lettuce'}, {'ns': 0, 'title': 'Lhasa'}, {'ns': 0, 'title': 'Magnesium in biology'}, {'ns': 0, 'title': 'Manganese'}, {'ns': 0, 'title': 'Niacin'}, {'ns': 0, 'title': 'Pantothenic acid'}, {'ns': 0, 'title': 'Phosphorus'}, {'ns': 0, 'title': 'Pinyin'}, {'ns': 0, 'title': 'Plant stem'}, {'ns': 0, 'title': 'Potassium'}, {'ns': 0, 'title': 'Protein (nutrient)'}, {'ns': 0, 'title': 'Riboflavin'}, {'ns': 0, 'title': 'Sodium'}, {'ns': 0, 'title': 'Stir frying'}, {'ns': 0, 'title': 'Thiamine'}, {'ns': 0, 'title': 'Vegetable'}, {'ns': 0, 'title': 'Vitamin A'}, {'ns': 0, 'title': 'Vitamin B6'}, {'ns': 0, 'title': 'Vitamin C'}, {'ns': 0, 'title': 'Zinc'}], 'title': 'Celtuce'}}}, 'limits': {'links': 500}}, - - (('pllimit', 'max'), ('plnamespace', 0), ('prop', 'links'), ('titles', 'Tropical Depression Ten (2005)')): - {'query': {'pages': {'21196082': {'ns': 0, 'pageid': 21196082, 'links': [{'ns': 0, 'title': '2005 Atlantic hurricane season'}, {'ns': 0, 'title': '2005 Azores subtropical storm'}, {'ns': 0, 'title': 'Atlantic Ocean'}, {'ns': 0, 'title': 'Atmospheric circulation'}, {'ns': 0, 'title': 'Atmospheric convection'}, {'ns': 0, 'title': 'Azores'}, {'ns': 0, 'title': 'Bahamas'}, {'ns': 0, 'title': 'Bar (unit)'}, {'ns': 0, 'title': 'Barbados'}, {'ns': 0, 'title': 'Bermuda'}, {'ns': 0, 'title': 'High pressure area'}, {'ns': 0, 'title': 'Hurricane Beta'}, {'ns': 0, 'title': 'Hurricane Cindy (2005)'}, {'ns': 0, 'title': 'Hurricane Dennis'}, {'ns': 0, 'title': 'Hurricane Emily (2005)'}, {'ns': 0, 'title': 'Hurricane Epsilon'}, {'ns': 0, 'title': 'Hurricane Irene (2005)'}, {'ns': 0, 'title': 'Hurricane Katrina'}, {'ns': 0, 'title': 'Hurricane Maria (2005)'}, {'ns': 0, 'title': 'Hurricane Nate (2005)'}, {'ns': 0, 'title': 'Hurricane Ophelia (2005)'}, {'ns': 0, 'title': 'Hurricane Philippe (2005)'}, {'ns': 0, 'title': 'Hurricane Rita'}, {'ns': 0, 'title': 'Hurricane Stan'}, {'ns': 0, 'title': 'Hurricane Vince (2005)'}, {'ns': 0, 'title': 'Hurricane Wilma'}, {'ns': 0, 'title': 'Inch of mercury'}, {'ns': 0, 'title': 'International Standard Book Number'}, {'ns': 0, 'title': 'List of Category 5 Atlantic hurricanes'}, {'ns': 0, 'title': 'List of storms in the 2005 Atlantic hurricane season'}, {'ns': 0, 'title': 'Louisiana'}, {'ns': 0, 'title': 'Meteorological history of Hurricane Katrina'}, {'ns': 0, 'title': 'National Hurricane Center'}, {'ns': 0, 'title': 'North Atlantic tropical cyclone'}, {'ns': 0, 'title': 'Outflow (meteorology)'}, {'ns': 0, 'title': 'Pascal (unit)'}, {'ns': 0, 'title': 'Puerto Rico'}, {'ns': 0, 'title': 'Saffir-Simpson Hurricane Scale'}, {'ns': 0, 'title': 'Saffir\u2013Simpson hurricane wind scale'}, {'ns': 0, 'title': 'Timeline of the 2005 Atlantic hurricane season'}, {'ns': 0, 'title': 'Tropical Storm Alpha (2005)'}, {'ns': 0, 'title': 'Tropical Storm Arlene (2005)'}, {'ns': 0, 'title': 'Tropical Storm Bret (2005)'}, {'ns': 0, 'title': 'Tropical Storm Delta (2005)'}, {'ns': 0, 'title': 'Tropical Storm Franklin (2005)'}, {'ns': 0, 'title': 'Tropical Storm Gamma'}, {'ns': 0, 'title': 'Tropical Storm Gert (2005)'}, {'ns': 0, 'title': 'Tropical Storm Jose (2005)'}, {'ns': 0, 'title': 'Tropical Storm Tammy (2005)'}, {'ns': 0, 'title': 'Tropical Storm Zeta'}, {'ns': 0, 'title': 'Tropical cyclone'}, {'ns': 0, 'title': 'Tropical cyclone scales'}, {'ns': 0, 'title': 'Tropical cyclone watches and warnings'}, {'ns': 0, 'title': 'Tropical wave'}, {'ns': 0, 'title': 'Wind shear'}], 'title': 'Tropical Depression Ten (2005)'}}}, 'limits': {'links': 500}}, - - (('cllimit', 'max'), ('prop', 'categories'), ('titles', 'Celtuce')): - {"query":{"pages":{"1868108":{"pageid":1868108,"ns":0,"title":"Celtuce","categories":[{"ns":14,"title":"All articles lacking sources"},{"ns":14,"title":"All stub articles"},{"ns":14,"title":"Articles containing Chinese-language text"},{"ns":14,"title":"Articles lacking sources from December 2009"},{"ns":14,"title":"Stem vegetables"},{"ns":14,"title":"Vegetable stubs"}]}}},"limits":{"categories":500}}, - - (('cllimit', 'max'), ('prop', 'categories'), ('titles', 'Tropical Depression Ten (2005)')): - {"query":{"pages":{"21196082":{"pageid":21196082,"ns":0,"title":"Tropical Depression Ten (2005)","categories":[{"ns":14,"title":"2005 Atlantic hurricane season"},{"ns":14,"title":"Articles with hAudio microformats"},{"ns":14,"title":"Atlantic tropical depressions"},{"ns":14,"title":"CS1 errors: dates"},{"ns":14,"title":"Commons category with local link same as on Wikidata"},{"ns":14,"title":"Featured articles"},{"ns":14,"title":"Hurricane Katrina"},{"ns":14,"title":"Spoken articles"}]}}},"limits":{"categories":500}}, - - (('prop', 'revisions'), ('rvlimit', 1), ('rvparse', ''), ('rvprop', 'content'), ('titles', 'Celtuce')): - {'query-continue': {'revisions': {'rvcontinue': 547842204}}, 'query': {'pages': {'1868108': {'ns': 0, 'pageid': 1868108, 'revisions': [{'*': '\n
Celtuce stems & heads
\n

Celtuce (Lactuca sativa var. asparagina, augustana, or angustata), also called stem lettuce, celery lettuce, asparagus lettuce, or Chinese lettuce, IPA (UK,US) /\u02c8s\u025blt.\u0259s/, is a cultivar of lettuce grown primarily for its thick stem, used as a vegetable. It is especially popular in China, and is called wosun (Chinese: \u83b4\u7b0b; pinyin: w\u014ds\u016dn) or woju (Chinese: \u83b4\u82e3; pinyin: w\u014dj\xf9) (although the latter name may also be used to mean lettuce in general).\n

\n
Celtuce (foreground) for sale in Lhasa
\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n
Celtuce, raw\n\n
Nutritional value per 100 g (3.5 oz)\n
Energy\n 75 kJ (18 kcal)\n
Carbohydrates\n 3.65 g\n
- Dietary fiber\n 1.7 g\n
Fat\n 0.3 g\n
Protein\n 0.85 g\n
Vitamin A equiv.\n 175 \u03bcg (22%)\n
Thiamine (vit. B1)\n 0.055 mg (5%)\n
Riboflavin (vit. B2)\n 0.07 mg (6%)\n
Niacin (vit. B3)\n 0.55 mg (4%)\n
Pantothenic acid (B5)\n 0.183 mg (4%)\n
Vitamin B6\n 0.05 mg (4%)\n
Folate (vit. B9)\n 46 \u03bcg (12%)\n
Vitamin C\n 19.5 mg (23%)\n
Calcium\n 39 mg (4%)\n
Iron\n 0.55 mg (4%)\n
Magnesium\n 28 mg (8%)\n
Manganese\n 0.688 mg (33%)\n
Phosphorus\n 39 mg (6%)\n
Potassium\n 330 mg (7%)\n
Sodium\n 11 mg (1%)\n
Zinc\n 0.27 mg (3%)\n
Link to USDA Database entry
Percentages are roughly approximated
using US recommendations for adults.
Source: USDA Nutrient Database\n
\n

The stem is usually harvested at a length of around 15\u201320 cm and a diameter of around 3\u20134 cm. It is crisp, moist, and mildly flavored, and typically prepared by slicing and then stir frying with more strongly flavored ingredients.\n


\n

\n\n\n\n\n'}], 'title': 'Celtuce'}}}}, - - (('action', 'parse'), ('page', 'Tropical Depression Ten (2005)'), ('prop', 'sections')): - {'parse': {'sections': [{'index': '1', 'level': '2', 'fromtitle': 'Tropical_Depression_Ten_(2005)', 'toclevel': 1, 'number': '1', 'byteoffset': 1369, 'line': 'Meteorological history', 'anchor': 'Meteorological_history'}, {'index': '2', 'level': '2', 'fromtitle': 'Tropical_Depression_Ten_(2005)', 'toclevel': 1, 'number': '2', 'byteoffset': 6248, 'line': 'Impact', 'anchor': 'Impact'}, {'index': '3', 'level': '2', 'fromtitle': 'Tropical_Depression_Ten_(2005)', 'toclevel': 1, 'number': '3', 'byteoffset': 7678, 'line': 'See also', 'anchor': 'See_also'}, {'index': '4', 'level': '2', 'fromtitle': 'Tropical_Depression_Ten_(2005)', 'toclevel': 1, 'number': '4', 'byteoffset': 7885, 'line': 'References', 'anchor': 'References'}, {'index': '5', 'level': '2', 'fromtitle': 'Tropical_Depression_Ten_(2005)', 'toclevel': 1, 'number': '5', 'byteoffset': 7917, 'line': 'External links', 'anchor': 'External_links'}], 'title': 'Tropical Depression Ten (2005)'}}, - - (('limit', 10), ('list', 'search'), ('srlimit', 10), ('srprop', ''), ('srsearch', 'Barack Obama')): - {'query-continue': {'search': {'sroffset': 10}}, 'query': {'searchinfo': {'totalhits': 12987}, 'search': [{'ns': 0, 'title': 'Barack Obama'}, {'ns': 0, 'title': 'Barack Obama, Sr.'}, {'ns': 0, 'title': 'Presidency of Barack Obama'}, {'ns': 0, 'title': 'Barack Obama presidential campaign, 2008'}, {'ns': 0, 'title': 'List of federal judges appointed by Barack Obama'}, {'ns': 0, 'title': 'Barack Obama in comics'}, {'ns': 0, 'title': 'Political positions of Barack Obama'}, {'ns': 0, 'title': 'Barack Obama on social media'}, {'ns': 0, 'title': 'List of Batman: The Brave and the Bold characters'}, {'ns': 0, 'title': 'Family of Barack Obama'}]}, 'warnings': {'main': {'*': "Unrecognized parameter: 'limit'"}}}, - - (('limit', 3), ('list', 'search'), ('srlimit', 3), ('srprop', ''), ('srsearch', 'Porsche')): - {'query-continue': {'search': {'sroffset': 3}}, 'query': {'searchinfo': {'totalhits': 5335}, 'search': [{'ns': 0, 'title': 'Porsche'}, {'ns': 0, 'title': 'Porsche in motorsport'}, {'ns': 0, 'title': 'Porsche 911 GT3'}]}, 'warnings': {'main': {'*': "Unrecognized parameter: 'limit'"}}}, - - (('limit', 10), ('list', 'search'), ('srinfo', 'suggestion'), ('srlimit', 10), ('srprop', ''), ('srsearch', 'hallelulejah')): - {'query': {'searchinfo': {'suggestion': 'hallelujah'}, 'search': []}, 'warnings': {'main': {'*': "Unrecognized parameter: 'limit'"}}}, - - (('limit', 10), ('list', 'search'), ('srinfo', 'suggestion'), ('srlimit', 10), ('srprop', ''), ('srsearch', 'qmxjsudek')): - {'query': {'search': []}, 'warnings': {'main': {'*': "Unrecognized parameter: 'limit'"}}}, - - (('inprop', 'url'), ('pageids', 1868108), ('ppprop', 'disambiguation'), ('prop', 'info|pageprops'), ('redirects', '')): - {'query': {'pages': {'1868108': {'lastrevid': 575687826, 'pageid': 1868108, 'title': 'Celtuce', 'editurl': 'http://en.wikipedia.org/w/index.php?title=Celtuce&action=edit', 'counter': '', 'length': 1960, 'contentmodel': 'wikitext', 'pagelanguage': 'en', 'touched': '2014-01-12T09:30:00Z', 'ns': 0, 'fullurl': 'http://en.wikipedia.org/wiki/Celtuce'}}}}, - - (('colimit', 'max'), ('prop', 'coordinates'), ('titles', 'Great Wall of China')): - {'query': {'pages': {'5094570': {'ns': 0, 'pageid': 5094570, 'coordinates': [{'lat': 40.6769, 'globe': 'earth', 'lon': 117.232, 'primary': ''}], 'title': 'Great Wall of China'}}}, 'limits': {'extlinks': 500}}, - - (('gscoord', '40.67693|117.23193'), ('gslimit', 10), ('gsradius', 1000), ('list', 'geosearch')): - {'query': {'geosearch': [{'pageid': 5094570, 'title': 'Great Wall of China', 'lon': 117.232, 'primary': '', 'lat': 40.6769, 'dist': 6.8, 'ns': 0}]}}, - - (('gscoord', '40.67693|117.23193'), ('gslimit', 10), ('gsradius', 10000), ('list', 'geosearch')): - {'query': {'geosearch': [{'pageid': 5094570, 'title': 'Great Wall of China', 'lon': 117.232, 'primary': '', 'lat': 40.6769, 'dist': 6.8, 'ns': 0}, {'pageid': 10135375, 'title': 'Jinshanling', 'lon': 117.244, 'primary': '', 'lat': 40.6764, 'dist': 1019.6, 'ns': 0}]}}, - - (('gscoord', '40.67693|117.23193'), ('gslimit', 10), ('gsradius', 1000), ('list', 'geosearch'), ('titles', 'Great Wall of China')): - {'query': {'geosearch': [{'pageid': 5094570, 'title': 'Great Wall of China', 'lon': 117.232, 'primary': '', 'lat': 40.6769, 'dist': 6.8, 'ns': 0}]}}, - - (('gscoord', '40.67693|117.23193'), ('gslimit', 10), ('gsradius', 1000), ('list', 'geosearch'), ('titles', 'Test')): - {'query': {'geosearch': []}}, - }, - - "data": { - "celtuce.content": 'Celtuce (Lactuca sativa var. asparagina, augustana, or angustata), also called stem lettuce, celery lettuce, asparagus lettuce, or Chinese lettuce, IPA (UK,US) /\u02c8s\u025blt.\u0259s/, is a cultivar of lettuce grown primarily for its thick stem, used as a vegetable. It is especially popular in China, and is called wosun (Chinese: \u83b4\u7b0b; pinyin: w\u014ds\u016dn) or woju (Chinese: \u83b4\u82e3; pinyin: w\u014dj\xf9) (although the latter name may also be used to mean lettuce in general).\n\nThe stem is usually harvested at a length of around 15\u201320 cm and a diameter of around 3\u20134 cm. It is crisp, moist, and mildly flavored, and typically prepared by slicing and then stir frying with more strongly flavored ingredients.\n\nDown: Photos of the celtuce, chinese lettuce or "Wosun" taken in the province of Girona (Catalonia, Spain, Europe) in June 2013\nCeltuce Nutritional content', - - "celtuce.parentid": 574302108, - - "celtuce.revid": 575687826, - - "celtuce.summary": "Celtuce (Lactuca sativa var. asparagina, augustana, or angustata), also called stem lettuce, celery lettuce, asparagus lettuce, or Chinese lettuce, IPA (UK,US) /\u02c8s\u025blt.\u0259s/, is a cultivar of lettuce grown primarily for its thick stem, used as a vegetable. It is especially popular in China, and is called wosun (Chinese: \u83b4\u7b0b; pinyin: w\u014ds\u016dn) or woju (Chinese: \u83b4\u82e3; pinyin: w\u014dj\xf9) (although the latter name may also be used to mean lettuce in general).\n\nThe stem is usually harvested at a length of around 15\u201320 cm and a diameter of around 3\u20134 cm. It is crisp, moist, and mildly flavored, and typically prepared by slicing and then stir frying with more strongly flavored ingredients.", - - "celtuce.images": ['http://upload.wikimedia.org/wikipedia/commons/7/79/VegCorn.jpg', 'http://upload.wikimedia.org/wikipedia/commons/8/87/Celtuce.jpg', 'http://upload.wikimedia.org/wikipedia/commons/d/dc/The_farmer%27s_market_near_the_Potala_in_Lhasa.jpg', 'http://upload.wikimedia.org/wikipedia/en/9/99/Question_book-new.svg'], - - "celtuce.references": ['http://ndb.nal.usda.gov/ndb/search/list', 'http://ndb.nal.usda.gov/ndb/search/list?qlookup=11145&format=Full'], - - "celtuce.links": ['Calcium', 'Carbohydrate', 'Chinese language', 'Dietary Reference Intake', 'Dietary fiber', 'Fat', 'Folate', 'Food energy', 'Iron', 'Lettuce', 'Lhasa', 'Magnesium in biology', 'Manganese', 'Niacin', 'Pantothenic acid', 'Phosphorus', 'Pinyin', 'Plant stem', 'Potassium', 'Protein (nutrient)', 'Riboflavin', 'Sodium', 'Stir frying', 'Thiamine', 'Vegetable', 'Vitamin A', 'Vitamin B6', 'Vitamin C', 'Zinc'], - - "celtuce.categories": ['All articles lacking sources', 'All stub articles', 'Articles containing Chinese-language text', 'Articles lacking sources from December 2009', 'Stem vegetables', 'Vegetable stubs'], - - "celtuce.html": '\n
Celtuce stems & heads
\n

Celtuce (Lactuca sativa var. asparagina, augustana, or angustata), also called stem lettuce, celery lettuce, asparagus lettuce, or Chinese lettuce, IPA (UK,US) /\u02c8s\u025blt.\u0259s/, is a cultivar of lettuce grown primarily for its thick stem, used as a vegetable. It is especially popular in China, and is called wosun (Chinese: \u83b4\u7b0b; pinyin: w\u014ds\u016dn) or woju (Chinese: \u83b4\u82e3; pinyin: w\u014dj\xf9) (although the latter name may also be used to mean lettuce in general).\n

\n
Celtuce (foreground) for sale in Lhasa
\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n
Celtuce, raw\n\n
Nutritional value per 100 g (3.5 oz)\n
Energy\n 75 kJ (18 kcal)\n
Carbohydrates\n 3.65 g\n
- Dietary fiber\n 1.7 g\n
Fat\n 0.3 g\n
Protein\n 0.85 g\n
Vitamin A equiv.\n 175 \u03bcg (22%)\n
Thiamine (vit. B1)\n 0.055 mg (5%)\n
Riboflavin (vit. B2)\n 0.07 mg (6%)\n
Niacin (vit. B3)\n 0.55 mg (4%)\n
Pantothenic acid (B5)\n 0.183 mg (4%)\n
Vitamin B6\n 0.05 mg (4%)\n
Folate (vit. B9)\n 46 \u03bcg (12%)\n
Vitamin C\n 19.5 mg (23%)\n
Calcium\n 39 mg (4%)\n
Iron\n 0.55 mg (4%)\n
Magnesium\n 28 mg (8%)\n
Manganese\n 0.688 mg (33%)\n
Phosphorus\n 39 mg (6%)\n
Potassium\n 330 mg (7%)\n
Sodium\n 11 mg (1%)\n
Zinc\n 0.27 mg (3%)\n
Link to USDA Database entry
Percentages are roughly approximated
using US recommendations for adults.
Source: USDA Nutrient Database\n
\n

The stem is usually harvested at a length of around 15\u201320 cm and a diameter of around 3\u20134 cm. It is crisp, moist, and mildly flavored, and typically prepared by slicing and then stir frying with more strongly flavored ingredients.\n


\n

\n\n\n\n\n', - - "cyclone.content": 'Tropical Depression Ten was the tenth tropical cyclone of the record-breaking 2005 Atlantic hurricane season. It formed on August 13 from a tropical wave that emerged from the west coast of Africa on August 8. As a result of strong wind shear, the depression remained weak and did not strengthen beyond tropical depression status. The cyclone degenerated on August 14, although its remnants partially contributed to the formation of Tropical Depression Twelve, which eventually intensified into Hurricane Katrina. The cyclone had no effect on land, and did not directly result in any fatalities or damage.\n\n\n== Meteorological history ==\n\nOn August 8, a tropical wave emerged from the west coast of Africa and entered the Atlantic Ocean. Tracking towards the west, the depression began to exhibit signs of convective organization on August 11. The system continued to develop, and it is estimated that Tropical Depression Ten formed at 1200 UTC on August 13. At the time, it was located about 1,600 miles (2,600 km) east of Barbados. Upon its designation, the depression consisted of a large area of thunderstorm activity, with curved banding features and expanding outflow. However, the environmental conditions were predicted to quickly become unfavorable. The depression moved erratically and slowly towards the west, and wind shear inhibited any significant intensification. Late on August 13, it was "beginning to look like Irene-junior as it undergoes southwesterly mid-level shear beneath the otherwise favorable upper-level outflow pattern". The wind shear was expected to relent within 48 hours, prompting some forecast models to suggest the depression would eventually attain hurricane status.\nBy early August 14, the shear had substantially disrupted the storm, leaving the low-level center of circulation exposed from the area of convection, which was also deteriorating. After meandering, the storm began to move westward. Forecasters expected it to resume a northwestward track as high pressure to the south of Bermuda was forecasted to weaken and another high was predicted to form southwest of the Azores. By 1800 UTC on August 14, the strong shear had further weakened the storm, and it no longer met the criteria for a tropical cyclone. It degenerated into a remnant low, and the National Hurricane Center issued their final advisory on the cyclone. Moving westward, it occasionally produced bursts of convective activity, before dissipating on August 18.\nTropical Depression Twelve formed over the southeastern Bahamas at 2100 UTC on August 23, partially from the remains of Tropical Depression Ten. While the normal standards for numbering tropical depressions in the Atlantic stipulate that the initial designation be retained when a depression regenerates, satellite imagery indicated that a second tropical wave had combined with Tropical Depression Ten north of Puerto Rico to form a new, more complex weather system, which was then designated as Tropical Depression Twelve. In a re-analysis, it was found that the low-level circulation of Tropical Depression Ten had completely detached and dissipated; only the remnant mid-level circulation moved on and merged with the second tropical wave. As a result, the criteria for keeping the same name and identity were not met. Tropical Depression Twelve later became Hurricane Katrina.\n\n\n== Impact ==\nBecause Tropical Depression Ten never approached land as a tropical cyclone, no tropical cyclone watches and warnings were issued for any land masses. No effects, damages, or fatalities were reported, and no ships reported tropical storm-force winds in association with the depression. The system did not attain tropical storm status; as such, it was not given a name by the National Hurricane Center. The storm partially contributed to the formation of Hurricane Katrina, which became a Category 5 hurricane on the Saffir-Simpson Hurricane Scale and made landfall in Louisiana, causing catastrophic damage. Katrina was the costliest hurricane, and one of the five deadliest, in the history of the United States.\n\n\n== See also ==\n\nMeteorological history of Hurricane Katrina\nList of storms in the 2005 Atlantic hurricane season\nTimeline of the 2005 Atlantic hurricane season\n\n\n== References ==\n\n\n== External links ==\n\nTropical Depression Ten Tropical Cyclone Report\nTropical Depression Ten advisory archive', - - "cyclone.revid": 572715399, - - "cyclone.parentid": 539367750, - - "cyclone.summary": 'Tropical Depression Ten was the tenth tropical cyclone of the record-breaking 2005 Atlantic hurricane season. It formed on August 13 from a tropical wave that emerged from the west coast of Africa on August 8. As a result of strong wind shear, the depression remained weak and did not strengthen beyond tropical depression status. The cyclone degenerated on August 14, although its remnants partially contributed to the formation of Tropical Depression Twelve, which eventually intensified into Hurricane Katrina. The cyclone had no effect on land, and did not directly result in any fatalities or damage.\n\n', - - "cyclone.images": ['http://upload.wikimedia.org/wikipedia/commons/3/33/Tropical_Depression_Ten_%282005%29.ogg', 'http://upload.wikimedia.org/wikipedia/commons/3/37/People_icon.svg', 'http://upload.wikimedia.org/wikipedia/commons/4/47/Sound-icon.svg', 'http://upload.wikimedia.org/wikipedia/commons/4/4a/TD_10_August_13%2C_2005.jpg', 'http://upload.wikimedia.org/wikipedia/commons/7/7d/Tropical_Depression_10_%282005%29.png', 'http://upload.wikimedia.org/wikipedia/commons/8/89/Cyclone_Catarina_from_the_ISS_on_March_26_2004.JPG', 'http://upload.wikimedia.org/wikipedia/commons/8/89/Symbol_book_class2.svg', 'http://upload.wikimedia.org/wikipedia/commons/a/a5/10-L_2005_track.png', 'http://upload.wikimedia.org/wikipedia/commons/e/e0/2005_Atlantic_hurricane_season_summary_map.png', 'http://upload.wikimedia.org/wikipedia/en/4/48/Folder_Hexagonal_Icon.svg', 'http://upload.wikimedia.org/wikipedia/en/4/4a/Commons-logo.svg', 'http://upload.wikimedia.org/wikipedia/en/e/e7/Cscr-featured.svg', 'http://upload.wikimedia.org/wikipedia/en/f/fd/Portal-puzzle.svg'], - - "cyclone.references": ['http://books.google.com/?id=-a8DRl1HuwoC&q=%22tropical+depression+ten%22+2005&dq=%22tropical+depression+ten%22+2005', 'http://facstaff.unca.edu/chennon/research/documents/erb_ncur2006_preprint.pdf', 'http://www.nhc.noaa.gov/archive/2005/TEN.shtml?', 'http://www.nhc.noaa.gov/archive/2005/dis/al102005.discus.001.shtml?', 'http://www.nhc.noaa.gov/archive/2005/dis/al102005.discus.002.shtml?', 'http://www.nhc.noaa.gov/archive/2005/dis/al102005.discus.003.shtml?', 'http://www.nhc.noaa.gov/archive/2005/dis/al122005.discus.001.shtml', 'http://www.nhc.noaa.gov/pdf/TCR-AL102005_Ten.pdf', 'http://www.nhc.noaa.gov/pdf/TCR-AL122005_Katrina.pdf', 'http://www.wptv.com/content/chopper5/story/Capt-Julie-Reports-On-Hurricane-Katrina/q__v8S2TZES2GiccRTQ2bw.cspx'], - - "cyclone.links": ['2005 Atlantic hurricane season', '2005 Azores subtropical storm', 'Atlantic Ocean', 'Atmospheric circulation', 'Atmospheric convection', 'Azores', 'Bahamas', 'Bar (unit)', 'Barbados', 'Bermuda', 'High pressure area', 'Hurricane Beta', 'Hurricane Cindy (2005)', 'Hurricane Dennis', 'Hurricane Emily (2005)', 'Hurricane Epsilon', 'Hurricane Irene (2005)', 'Hurricane Katrina', 'Hurricane Maria (2005)', 'Hurricane Nate (2005)', 'Hurricane Ophelia (2005)', 'Hurricane Philippe (2005)', 'Hurricane Rita', 'Hurricane Stan', 'Hurricane Vince (2005)', 'Hurricane Wilma', 'Inch of mercury', 'International Standard Book Number', 'List of Category 5 Atlantic hurricanes', 'List of storms in the 2005 Atlantic hurricane season', 'Louisiana', 'Meteorological history of Hurricane Katrina', 'National Hurricane Center', 'North Atlantic tropical cyclone', 'Outflow (meteorology)', 'Pascal (unit)', 'Puerto Rico', 'Saffir-Simpson Hurricane Scale', 'Saffir\u2013Simpson hurricane wind scale', 'Timeline of the 2005 Atlantic hurricane season', 'Tropical Storm Alpha (2005)', 'Tropical Storm Arlene (2005)', 'Tropical Storm Bret (2005)', 'Tropical Storm Delta (2005)', 'Tropical Storm Franklin (2005)', 'Tropical Storm Gamma', 'Tropical Storm Gert (2005)', 'Tropical Storm Jose (2005)', 'Tropical Storm Tammy (2005)', 'Tropical Storm Zeta', 'Tropical cyclone', 'Tropical cyclone scales', 'Tropical cyclone watches and warnings', 'Tropical wave', 'Wind shear'], - - "cyclone.categories": ['2005 Atlantic hurricane season', 'Articles with hAudio microformats', 'Atlantic tropical depressions', 'CS1 errors: dates', 'Commons category with local link same as on Wikidata', 'Featured articles', 'Hurricane Katrina', 'Spoken articles'], - - "cyclone.sections": ['External links', 'Impact', 'Meteorological history', 'References', 'See also'], - - "cyclone.section.impact": 'Because Tropical Depression Ten never approached land as a tropical cyclone, no tropical cyclone watches and warnings were issued for any land masses. No effects, damages, or fatalities were reported, and no ships reported tropical storm-force winds in association with the depression. The system did not attain tropical storm status; as such, it was not given a name by the National Hurricane Center. The storm partially contributed to the formation of Hurricane Katrina, which became a Category 5 hurricane on the Saffir-Simpson Hurricane Scale and made landfall in Louisiana, causing catastrophic damage. Katrina was the costliest hurricane, and one of the five deadliest, in the history of the United States.', - - "barack.search": ['Barack Obama', 'Barack Obama, Sr.', 'Presidency of Barack Obama', 'Barack Obama presidential campaign, 2008', 'List of federal judges appointed by Barack Obama', 'Barack Obama in comics', 'Political positions of Barack Obama', 'Barack Obama on social media', 'List of Batman: The Brave and the Bold characters', 'Family of Barack Obama'], - - "porsche.search": ['Porsche', 'Porsche in motorsport', 'Porsche 911 GT3'], - - "great_wall_of_china.coordinates.lat": '40.677', - "great_wall_of_china.coordinates.lon": '117.232', - - "great_wall_of_china.geo_seach": ['Great Wall of China'], - - "great_wall_of_china.geo_seach_with_radius": ['Great Wall of China', 'Jinshanling'], - - "great_wall_of_china.geo_seach_with_existing_article_name": ['Great Wall of China'], - - "great_wall_of_china.geo_seach_with_non_existing_article_name": [], - } + "_wiki_request calls": { + ( + ("explaintext", ""), + ("prop", "extracts|revisions"), + ("rvprop", "ids"), + ("titles", "Celtuce"), + ): { + "query": { + "pages": { + "1868108": { + "extract": 'Celtuce (Lactuca sativa var. asparagina, augustana, or angustata), also called stem lettuce, celery lettuce, asparagus lettuce, or Chinese lettuce, IPA (UK,US) /\u02c8s\u025blt.\u0259s/, is a cultivar of lettuce grown primarily for its thick stem, used as a vegetable. It is especially popular in China, and is called wosun (Chinese: \u83b4\u7b0b; pinyin: w\u014ds\u016dn) or woju (Chinese: \u83b4\u82e3; pinyin: w\u014dj\xf9) (although the latter name may also be used to mean lettuce in general).\n\nThe stem is usually harvested at a length of around 15\u201320 cm and a diameter of around 3\u20134 cm. It is crisp, moist, and mildly flavored, and typically prepared by slicing and then stir frying with more strongly flavored ingredients.\n\nDown: Photos of the celtuce, chinese lettuce or "Wosun" taken in the province of Girona (Catalonia, Spain, Europe) in June 2013\nCeltuce Nutritional content', + "ns": 0, + "pageid": 1868108, + "revisions": [{"revid": 575687826, "parentid": 574302108}], + "title": "Celtuce", + } + } + } + }, + ( + ("explaintext", ""), + ("prop", "extracts|revisions"), + ("rvprop", "ids"), + ("titles", "Tropical Depression Ten (2005)"), + ): { + "query": { + "pages": { + "21196082": { + "extract": 'Tropical Depression Ten was the tenth tropical cyclone of the record-breaking 2005 Atlantic hurricane season. It formed on August 13 from a tropical wave that emerged from the west coast of Africa on August 8. As a result of strong wind shear, the depression remained weak and did not strengthen beyond tropical depression status. The cyclone degenerated on August 14, although its remnants partially contributed to the formation of Tropical Depression Twelve, which eventually intensified into Hurricane Katrina. The cyclone had no effect on land, and did not directly result in any fatalities or damage.\n\n\n== Meteorological history ==\n\nOn August 8, a tropical wave emerged from the west coast of Africa and entered the Atlantic Ocean. Tracking towards the west, the depression began to exhibit signs of convective organization on August 11. The system continued to develop, and it is estimated that Tropical Depression Ten formed at 1200 UTC on August 13. At the time, it was located about 1,600 miles (2,600 km) east of Barbados. Upon its designation, the depression consisted of a large area of thunderstorm activity, with curved banding features and expanding outflow. However, the environmental conditions were predicted to quickly become unfavorable. The depression moved erratically and slowly towards the west, and wind shear inhibited any significant intensification. Late on August 13, it was "beginning to look like Irene-junior as it undergoes southwesterly mid-level shear beneath the otherwise favorable upper-level outflow pattern". The wind shear was expected to relent within 48 hours, prompting some forecast models to suggest the depression would eventually attain hurricane status.\nBy early August 14, the shear had substantially disrupted the storm, leaving the low-level center of circulation exposed from the area of convection, which was also deteriorating. After meandering, the storm began to move westward. Forecasters expected it to resume a northwestward track as high pressure to the south of Bermuda was forecasted to weaken and another high was predicted to form southwest of the Azores. By 1800 UTC on August 14, the strong shear had further weakened the storm, and it no longer met the criteria for a tropical cyclone. It degenerated into a remnant low, and the National Hurricane Center issued their final advisory on the cyclone. Moving westward, it occasionally produced bursts of convective activity, before dissipating on August 18.\nTropical Depression Twelve formed over the southeastern Bahamas at 2100 UTC on August 23, partially from the remains of Tropical Depression Ten. While the normal standards for numbering tropical depressions in the Atlantic stipulate that the initial designation be retained when a depression regenerates, satellite imagery indicated that a second tropical wave had combined with Tropical Depression Ten north of Puerto Rico to form a new, more complex weather system, which was then designated as Tropical Depression Twelve. In a re-analysis, it was found that the low-level circulation of Tropical Depression Ten had completely detached and dissipated; only the remnant mid-level circulation moved on and merged with the second tropical wave. As a result, the criteria for keeping the same name and identity were not met. Tropical Depression Twelve later became Hurricane Katrina.\n\n\n== Impact ==\nBecause Tropical Depression Ten never approached land as a tropical cyclone, no tropical cyclone watches and warnings were issued for any land masses. No effects, damages, or fatalities were reported, and no ships reported tropical storm-force winds in association with the depression. The system did not attain tropical storm status; as such, it was not given a name by the National Hurricane Center. The storm partially contributed to the formation of Hurricane Katrina, which became a Category 5 hurricane on the Saffir-Simpson Hurricane Scale and made landfall in Louisiana, causing catastrophic damage. Katrina was the costliest hurricane, and one of the five deadliest, in the history of the United States.\n\n\n== See also ==\n\nMeteorological history of Hurricane Katrina\nList of storms in the 2005 Atlantic hurricane season\nTimeline of the 2005 Atlantic hurricane season\n\n\n== References ==\n\n\n== External links ==\n\nTropical Depression Ten Tropical Cyclone Report\nTropical Depression Ten advisory archive', + "ns": 0, + "pageid": 21196082, + "revisions": [{"revid": 572715399, "parentid": 539367750}], + "title": "Tropical Depression Ten (2005)", + } + } + } + }, + ( + ("inprop", "url"), + ("ppprop", "disambiguation"), + ("prop", "info|pageprops"), + ("redirects", ""), + ("titles", "purpleberry"), + ): { + "query": { + "normalized": [{"to": "Purpleberry", "from": "purpleberry"}], + "pages": { + "-1": { + "missing": "", + "editurl": "http://en.wikipedia.org/w/index.php?title=Purpleberry&action=edit", + "title": "Purpleberry", + "contentmodel": "wikitext", + "pagelanguage": "en", + "ns": 0, + "fullurl": "http://en.wikipedia.org/wiki/Purpleberry", + } + }, + } + }, + ( + ("limit", 1), + ("list", "search"), + ("srinfo", "suggestion"), + ("srlimit", 1), + ("srprop", ""), + ("srsearch", "Menlo Park, New Jersey"), + ): { + "query-continue": {"search": {"sroffset": 1}}, + "query": {"search": [{"ns": 0, "title": "Edison, New Jersey"}]}, + "warnings": {"main": {"*": "Unrecognized parameter: 'limit'"}}, + }, + ( + ("inprop", "url"), + ("ppprop", "disambiguation"), + ("prop", "info|pageprops"), + ("redirects", ""), + ("titles", "Menlo Park, New Jersey"), + ): { + "query": { + "redirects": [ + {"to": "Edison, New Jersey", "from": "Menlo Park, New Jersey"} + ], + "pages": { + "125414": { + "lastrevid": 607768264, + "pageid": 125414, + "title": "Edison, New Jersey", + "editurl": "http://en.wikipedia.org/w/index.php?title=Edison,_New_Jersey&action=edit", + "counter": "", + "length": 85175, + "contentmodel": "wikitext", + "pagelanguage": "en", + "touched": "2014-05-14T17:10:49Z", + "ns": 0, + "fullurl": "http://en.wikipedia.org/wiki/Edison,_New_Jersey", + } + }, + } + }, + ( + ("inprop", "url"), + ("ppprop", "disambiguation"), + ("prop", "info|pageprops"), + ("redirects", ""), + ("titles", "Communist Party"), + ): { + "query": { + "redirects": [{"to": "Communist party", "from": "Communist Party"}], + "pages": { + "37008": { + "lastrevid": 608086859, + "pageid": 37008, + "title": "Communist party", + "editurl": "http://en.wikipedia.org/w/index.php?title=Communist_party&action=edit", + "counter": "", + "length": 7868, + "contentmodel": "wikitext", + "pagelanguage": "en", + "touched": "2014-05-26T01:19:01Z", + "ns": 0, + "fullurl": "http://en.wikipedia.org/wiki/Communist_party", + } + }, + } + }, + ( + ("inprop", "url"), + ("ppprop", "disambiguation"), + ("prop", "info|pageprops"), + ("redirects", ""), + ("titles", "communist Party"), + ): { + "query": { + "redirects": [{"to": "Communist party", "from": "Communist Party"}], + "normalized": [{"to": "Communist Party", "from": "communist Party"}], + "pages": { + "37008": { + "lastrevid": 608086859, + "pageid": 37008, + "title": "Communist party", + "editurl": "http://en.wikipedia.org/w/index.php?title=Communist_party&action=edit", + "counter": "", + "length": 7868, + "contentmodel": "wikitext", + "pagelanguage": "en", + "touched": "2014-05-26T01:19:01Z", + "ns": 0, + "fullurl": "http://en.wikipedia.org/wiki/Communist_party", + } + }, + } + }, + ( + ("inprop", "url"), + ("ppprop", "disambiguation"), + ("prop", "info|pageprops"), + ("redirects", ""), + ("titles", "Communist party"), + ): { + "query": { + "pages": { + "37008": { + "lastrevid": 608086859, + "pageid": 37008, + "title": "Communist party", + "editurl": "http://en.wikipedia.org/w/index.php?title=Communist_party&action=edit", + "counter": "", + "length": 7868, + "contentmodel": "wikitext", + "pagelanguage": "en", + "touched": "2014-05-26T01:19:01Z", + "ns": 0, + "fullurl": "http://en.wikipedia.org/wiki/Communist_party", + } + } + } + }, + ( + ("inprop", "url"), + ("ppprop", "disambiguation"), + ("prop", "info|pageprops"), + ("redirects", ""), + ("titles", "Edison, New Jersey"), + ): { + "query": { + "pages": { + "125414": { + "lastrevid": 607768264, + "pageid": 125414, + "title": "Edison, New Jersey", + "editurl": "http://en.wikipedia.org/w/index.php?title=Edison,_New_Jersey&action=edit", + "counter": "", + "length": 85175, + "contentmodel": "wikitext", + "pagelanguage": "en", + "touched": "2014-05-14T17:10:49Z", + "ns": 0, + "fullurl": "http://en.wikipedia.org/wiki/Edison,_New_Jersey", + } + } + } + }, + ( + ("inprop", "url"), + ("ppprop", "disambiguation"), + ("prop", "info|pageprops"), + ("redirects", ""), + ("titles", "Dodge Ram (disambiguation)"), + ): { + "query": { + "pages": { + "18803364": { + "lastrevid": 567152802, + "pageid": 18803364, + "title": "Dodge Ram (disambiguation)", + "editurl": "http://en.wikipedia.org/w/index.php?title=Dodge_Ram_(disambiguation)&action=edit", + "counter": "", + "length": 702, + "contentmodel": "wikitext", + "pagelanguage": "en", + "touched": "2013-08-08T15:12:27Z", + "ns": 0, + "pageprops": {"disambiguation": ""}, + "fullurl": "http://en.wikipedia.org/wiki/Dodge_Ram_(disambiguation)", + } + } + } + }, + ( + ("prop", "revisions"), + ("rvlimit", 1), + ("rvparse", ""), + ("rvprop", "content"), + ("titles", "Dodge Ram (disambiguation)"), + ): { + "query-continue": {"revisions": {"rvcontinue": 556603298}}, + "query": { + "pages": { + "18803364": { + "ns": 0, + "pageid": 18803364, + "revisions": [ + { + "*": '

Dodge Ram is a collective nameplate for light trucks made by Dodge\n

\n\n

See also:\n

\n\n\n\n\n\n\n\n' + } + ], + "title": "Dodge Ram (disambiguation)", + } + } + }, + }, + ( + ("limit", 1), + ("list", "search"), + ("srinfo", "suggestion"), + ("srlimit", 1), + ("srprop", ""), + ("srsearch", "butteryfly"), + ): { + "query-continue": {"search": {"sroffset": 1}}, + "query": { + "searchinfo": {"suggestion": "butterfly"}, + "search": [{"ns": 0, "title": "Butterfly's Tongue"}], + }, + "warnings": {"main": {"*": "Unrecognized parameter: 'limit'"}}, + }, + ( + ("inprop", "url"), + ("ppprop", "disambiguation"), + ("prop", "info|pageprops"), + ("redirects", ""), + ("titles", "butterfly"), + ): { + "query": { + "normalized": [{"to": "Butterfly", "from": "butterfly"}], + "pages": { + "48338": { + "lastrevid": 566847704, + "pageid": 48338, + "title": "Butterfly", + "editurl": "http://en.wikipedia.org/w/index.php?title=Butterfly&action=edit", + "counter": "", + "length": 60572, + "contentmodel": "wikitext", + " pagelanguage": "en", + "touched": "2013-08-07T11:15:37Z", + "ns": 0, + "fullurl": "http://en.wikipedia.org/wiki/Butterfly", + } + }, + } + }, + ( + ("limit", 1), + ("list", "search"), + ("srinfo", "suggestion"), + ("srlimit", 1), + ("srprop", ""), + ("srsearch", "Celtuce"), + ): { + "query-continue": {"search": {"sroffset": 1}}, + "query": {"search": [{"ns": 0, "title": "Celtuce"}]}, + "warnings": {"main": {"*": "Unrecognized parameter: 'limit'"}}, + }, + ( + ("limit", 1), + ("list", "search"), + ("srinfo", "suggestion"), + ("srlimit", 1), + ("srprop", ""), + ("srsearch", "Tropical Depression Ten (2005)"), + ): { + "query-continue": {"search": {"sroffset": 1}}, + "query": {"search": [{"ns": 0, "title": "Tropical Depression Ten (2005)"}]}, + "warnings": {"main": {"*": "Unrecognized parameter: 'limit'"}}, + }, + ( + ("limit", 1), + ("list", "search"), + ("srinfo", "suggestion"), + ("srlimit", 1), + ("srprop", ""), + ("srsearch", "Great Wall of China"), + ): { + "query-continue": {"search": {"sroffset": 1}}, + "query": {"search": [{"ns": 0, "title": "Great Wall of China"}]}, + "warnings": {"main": {"*": "Unrecognized parameter: 'limit'"}}, + }, + ( + ("inprop", "url"), + ("ppprop", "disambiguation"), + ("prop", "info|pageprops"), + ("redirects", ""), + ("titles", "Celtuce"), + ): { + "query": { + "pages": { + "1868108": { + "lastrevid": 562756085, + "pageid": 1868108, + "title": "Celtuce", + "editurl": "http://en.wikipedia.org/w/index.php?title=Celtuce&action=edit", + "counter": "", + "length": 1662, + "contentmodel": "wikitext", + "pagelanguage": "en", + "touched": "2013-08-17T03:30:23Z", + "ns": 0, + "fullurl": "http://en.wikipedia.org/wiki/Celtuce", + } + } + } + }, + ( + ("inprop", "url"), + ("ppprop", "disambiguation"), + ("prop", "info|pageprops"), + ("redirects", ""), + ("titles", "Tropical Depression Ten (2005)"), + ): { + "query": { + "pages": { + "21196082": { + "lastrevid": 572715399, + "pageid": 21196082, + "title": "Tropical Depression Ten (2005)", + "editurl": "http://en.wikipedia.org/w/index.php?title=Tropical_Depression_Ten_(2005)&action=edit", + "counter": "", + "length": 8543, + "contentmodel": "wikitext", + "pagelanguage": "en", + "touched": "2013-09-18T13:45:33Z", + "ns": 0, + "fullurl": "http://en.wikipedia.org/wiki/Tropical_Depression_Ten_(2005)", + } + } + } + }, + ( + ("inprop", "url"), + ("ppprop", "disambiguation"), + ("prop", "info|pageprops"), + ("redirects", ""), + ("titles", "Great Wall of China"), + ): { + "query": { + "pages": { + "5094570": { + "lastrevid": 604138653, + "pageid": 5094570, + "title": "Great Wall of China", + "editurl": "http://en.wikipedia.org/w/index.php?title=Great_Wall_of_China&action=edit", + "counter": "", + "length": 23895, + "contentmodel": "wikitext", + "pagelanguage": "en", + "touched": "2013-08-17T03:30:23Z", + "ns": 0, + "fullurl": "http://en.wikipedia.org/wiki/Great_Wall_of_China", + } + } + } + }, + (("explaintext", ""), ("prop", "extracts"), ("titles", "Celtuce")): { + "query": { + "pages": { + "1868108": { + "extract": "Celtuce (Lactuca sativa var. asparagina, augustana, or angustata), also called stem lettuce, celery lettuce, asparagus lettuce, or Chinese lettuce, IPA (UK,US) /\u02c8s\u025blt.\u0259s/, is a cultivar of lettuce grown primarily for its thick stem, used as a vegetable. It is especially popular in China, and is called wosun (Chinese: \u83b4\u7b0b; pinyin: w\u014ds\u016dn) or woju (Chinese: \u83b4\u82e3; pinyin: w\u014dj\xf9) (although the latter name may also be used to mean lettuce in general).\n\nThe stem is usually harvested at a length of around 15\u201320 cm and a diameter of around 3\u20134 cm. It is crisp, moist, and mildly flavored, and typically prepared by slicing and then stir frying with more strongly flavored ingredients.", + "ns": 0, + "pageid": 1868108, + "title": "Celtuce", + } + } + } + }, + ( + ("exintro", ""), + ("explaintext", ""), + ("prop", "extracts"), + ("titles", "Celtuce"), + ): { + "query": { + "pages": { + "1868108": { + "extract": "Celtuce (Lactuca sativa var. asparagina, augustana, or angustata), also called stem lettuce, celery lettuce, asparagus lettuce, or Chinese lettuce, IPA (UK,US) /\u02c8s\u025blt.\u0259s/, is a cultivar of lettuce grown primarily for its thick stem, used as a vegetable. It is especially popular in China, and is called wosun (Chinese: \u83b4\u7b0b; pinyin: w\u014ds\u016dn) or woju (Chinese: \u83b4\u82e3; pinyin: w\u014dj\xf9) (although the latter name may also be used to mean lettuce in general).\n\nThe stem is usually harvested at a length of around 15\u201320 cm and a diameter of around 3\u20134 cm. It is crisp, moist, and mildly flavored, and typically prepared by slicing and then stir frying with more strongly flavored ingredients.", + "ns": 0, + "pageid": 1868108, + "title": "Celtuce", + } + } + } + }, + ( + ("exintro", ""), + ("explaintext", ""), + ("prop", "extracts"), + ("titles", "Tropical Depression Ten (2005)"), + ): { + "query": { + "pages": { + "21196082": { + "extract": "Tropical Depression Ten was the tenth tropical cyclone of the record-breaking 2005 Atlantic hurricane season. It formed on August 13 from a tropical wave that emerged from the west coast of Africa on August 8. As a result of strong wind shear, the depression remained weak and did not strengthen beyond tropical depression status. The cyclone degenerated on August 14, although its remnants partially contributed to the formation of Tropical Depression Twelve, which eventually intensified into Hurricane Katrina. The cyclone had no effect on land, and did not directly result in any fatalities or damage.\n\n", + "ns": 0, + "pageid": 21196082, + "title": "Tropical Depression Ten (2005)", + } + } + } + }, + ( + ("generator", "images"), + ("gimlimit", "max"), + ("iiprop", "url"), + ("prop", "imageinfo"), + ("titles", "Celtuce"), + ): { + "query": { + "pages": { + "22263385": { + "imagerepository": "local", + "ns": 6, + "pageid": 22263385, + "imageinfo": [ + { + "url": "http://upload.wikimedia.org/wikipedia/en/9/99/Question_book-new.svg", + "descriptionurl": "http://en.wikipedia.org/wiki/File:Question_book-new.svg", + } + ], + "title": "File:Question book-new.svg", + }, + "-1": { + "imagerepository": "shared", + "ns": 6, + "imageinfo": [ + { + "url": "http://upload.wikimedia.org/wikipedia/commons/8/87/Celtuce.jpg", + "descriptionurl": "http://commons.wikimedia.org/wiki/File:Celtuce.jpg", + } + ], + "missing": "", + "title": "File:Celtuce.jpg", + }, + "-3": { + "imagerepository": "shared", + "ns": 6, + "imageinfo": [ + { + "url": "http://upload.wikimedia.org/wikipedia/commons/7/79/VegCorn.jpg", + "descriptionurl": "http://commons.wikimedia.org/wiki/File:VegCorn.jpg", + } + ], + "missing": "", + "title": "File:VegCorn.jpg", + }, + "-2": { + "imagerepository": "shared", + "ns": 6, + "imageinfo": [ + { + "url": "http://upload.wikimedia.org/wikipedia/commons/d/dc/The_farmer%27s_market_near_the_Potala_in_Lhasa.jpg", + "descriptionurl": "http://commons.wikimedia.org/wiki/File:The_farmer%27s_market_near_the_Potala_in_Lhasa.jpg", + } + ], + "missing": "", + "title": "File:The farmer's market near the Potala in Lhasa.jpg", + }, + } + }, + "limits": {"images": 500}, + }, + ( + ("generator", "images"), + ("gimlimit", "max"), + ("iiprop", "url"), + ("prop", "imageinfo"), + ("titles", "Tropical Depression Ten (2005)"), + ): { + "query": { + "pages": { + "33285577": { + "imagerepository": "local", + "ns": 6, + "pageid": 33285577, + "imageinfo": [ + { + "url": "http://upload.wikimedia.org/wikipedia/en/4/4a/Commons-logo.svg", + "descriptionurl": "http://en.wikipedia.org/wiki/File:Commons-logo.svg", + } + ], + "title": "File:Commons-logo.svg", + }, + "23473511": { + "imagerepository": "local", + "ns": 6, + "pageid": 23473511, + "imageinfo": [ + { + "url": "http://upload.wikimedia.org/wikipedia/en/4/48/Folder_Hexagonal_Icon.svg", + "descriptionurl": "http://en.wikipedia.org/wiki/File:Folder_Hexagonal_Icon.svg", + } + ], + "title": "File:Folder Hexagonal Icon.svg", + }, + "33285464": { + "imagerepository": "local", + "ns": 6, + "pageid": 33285464, + "imageinfo": [ + { + "url": "http://upload.wikimedia.org/wikipedia/en/e/e7/Cscr-featured.svg", + "descriptionurl": "http://en.wikipedia.org/wiki/File:Cscr-featured.svg", + } + ], + "title": "File:Cscr-featured.svg", + }, + "2526001": { + "imagerepository": "shared", + "ns": 6, + "pageid": 2526001, + "imageinfo": [ + { + "url": "http://upload.wikimedia.org/wikipedia/commons/8/89/Cyclone_Catarina_from_the_ISS_on_March_26_2004.JPG", + "descriptionurl": "http://commons.wikimedia.org/wiki/File:Cyclone_Catarina_from_the_ISS_on_March_26_2004.JPG", + } + ], + "title": "File:Cyclone Catarina from the ISS on March 26 2004.JPG", + }, + "33285257": { + "imagerepository": "local", + "ns": 6, + "pageid": 33285257, + "imageinfo": [ + { + "url": "http://upload.wikimedia.org/wikipedia/en/f/fd/Portal-puzzle.svg", + "descriptionurl": "http://en.wikipedia.org/wiki/File:Portal-puzzle.svg", + } + ], + "title": "File:Portal-puzzle.svg", + }, + "-5": { + "imagerepository": "shared", + "ns": 6, + "imageinfo": [ + { + "url": "http://upload.wikimedia.org/wikipedia/commons/8/89/Symbol_book_class2.svg", + "descriptionurl": "http://commons.wikimedia.org/wiki/File:Symbol_book_class2.svg", + } + ], + "missing": "", + "title": "File:Symbol book class2.svg", + }, + "-4": { + "imagerepository": "shared", + "ns": 6, + "imageinfo": [ + { + "url": "http://upload.wikimedia.org/wikipedia/commons/4/47/Sound-icon.svg", + "descriptionurl": "http://commons.wikimedia.org/wiki/File:Sound-icon.svg", + } + ], + "missing": "", + "title": "File:Sound-icon.svg", + }, + "-7": { + "imagerepository": "shared", + "ns": 6, + "imageinfo": [ + { + "url": "http://upload.wikimedia.org/wikipedia/commons/7/7d/Tropical_Depression_10_%282005%29.png", + "descriptionurl": "http://commons.wikimedia.org/wiki/File:Tropical_Depression_10_(2005).png", + } + ], + "missing": "", + "title": "File:Tropical Depression 10 (2005).png", + }, + "-6": { + "imagerepository": "shared", + "ns": 6, + "imageinfo": [ + { + "url": "http://upload.wikimedia.org/wikipedia/commons/4/4a/TD_10_August_13%2C_2005.jpg", + "descriptionurl": "http://commons.wikimedia.org/wiki/File:TD_10_August_13,_2005.jpg", + } + ], + "missing": "", + "title": "File:TD 10 August 13, 2005.jpg", + }, + "-1": { + "imagerepository": "shared", + "ns": 6, + "imageinfo": [ + { + "url": "http://upload.wikimedia.org/wikipedia/commons/a/a5/10-L_2005_track.png", + "descriptionurl": "http://commons.wikimedia.org/wiki/File:10-L_2005_track.png", + } + ], + "missing": "", + "title": "File:10-L 2005 track.png", + }, + "-3": { + "imagerepository": "shared", + "ns": 6, + "imageinfo": [ + { + "url": "http://upload.wikimedia.org/wikipedia/commons/3/37/People_icon.svg", + "descriptionurl": "http://commons.wikimedia.org/wiki/File:People_icon.svg", + } + ], + "missing": "", + "title": "File:People icon.svg", + }, + "-2": { + "imagerepository": "shared", + "ns": 6, + "imageinfo": [ + { + "url": "http://upload.wikimedia.org/wikipedia/commons/e/e0/2005_Atlantic_hurricane_season_summary_map.png", + "descriptionurl": "http://commons.wikimedia.org/wiki/File:2005_Atlantic_hurricane_season_summary_map.png", + } + ], + "missing": "", + "title": "File:2005 Atlantic hurricane season summary map.png", + }, + "-8": { + "imagerepository": "shared", + "ns": 6, + "imageinfo": [ + { + "url": "http://upload.wikimedia.org/wikipedia/commons/3/33/Tropical_Depression_Ten_%282005%29.ogg", + "descriptionurl": "http://commons.wikimedia.org/wiki/File:Tropical_Depression_Ten_(2005).ogg", + } + ], + "missing": "", + "title": "File:Tropical Depression Ten (2005).ogg", + }, + } + }, + "limits": {"images": 500}, + }, + (("ellimit", "max"), ("prop", "extlinks"), ("titles", "Celtuce")): { + "query": { + "pages": { + "1868108": { + "extlinks": [ + {"*": "http://ndb.nal.usda.gov/ndb/search/list"}, + { + "*": "http://ndb.nal.usda.gov/ndb/search/list?qlookup=11145&format=Full" + }, + ], + "ns": 0, + "pageid": 1868108, + "title": "Celtuce", + } + } + }, + "limits": {"extlinks": 500}, + }, + ( + ("ellimit", "max"), + ("prop", "extlinks"), + ("titles", "Tropical Depression Ten (2005)"), + ): { + "query": { + "pages": { + "21196082": { + "extlinks": [ + { + "*": "http://books.google.com/?id=-a8DRl1HuwoC&q=%22tropical+depression+ten%22+2005&dq=%22tropical+depression+ten%22+2005" + }, + { + "*": "http://facstaff.unca.edu/chennon/research/documents/erb_ncur2006_preprint.pdf" + }, + {"*": "http://www.nhc.noaa.gov/archive/2005/TEN.shtml?"}, + { + "*": "http://www.nhc.noaa.gov/archive/2005/dis/al102005.discus.001.shtml?" + }, + { + "*": "http://www.nhc.noaa.gov/archive/2005/dis/al102005.discus.002.shtml?" + }, + { + "*": "http://www.nhc.noaa.gov/archive/2005/dis/al102005.discus.003.shtml?" + }, + { + "*": "http://www.nhc.noaa.gov/archive/2005/dis/al122005.discus.001.shtml" + }, + {"*": "http://www.nhc.noaa.gov/pdf/TCR-AL102005_Ten.pdf"}, + { + "*": "http://www.nhc.noaa.gov/pdf/TCR-AL122005_Katrina.pdf" + }, + { + "*": "http://www.wptv.com/content/chopper5/story/Capt-Julie-Reports-On-Hurricane-Katrina/q__v8S2TZES2GiccRTQ2bw.cspx" + }, + ], + "ns": 0, + "pageid": 21196082, + "title": "Tropical Depression Ten (2005)", + } + } + }, + "limits": {"extlinks": 500}, + }, + ( + ("pllimit", "max"), + ("plnamespace", 0), + ("prop", "links"), + ("titles", "Celtuce"), + ): { + "query": { + "pages": { + "1868108": { + "ns": 0, + "pageid": 1868108, + "links": [ + {"ns": 0, "title": "Calcium"}, + {"ns": 0, "title": "Carbohydrate"}, + {"ns": 0, "title": "Chinese language"}, + {"ns": 0, "title": "Dietary Reference Intake"}, + {"ns": 0, "title": "Dietary fiber"}, + {"ns": 0, "title": "Fat"}, + {"ns": 0, "title": "Folate"}, + {"ns": 0, "title": "Food energy"}, + {"ns": 0, "title": "Iron"}, + {"ns": 0, "title": "Lettuce"}, + {"ns": 0, "title": "Lhasa"}, + {"ns": 0, "title": "Magnesium in biology"}, + {"ns": 0, "title": "Manganese"}, + {"ns": 0, "title": "Niacin"}, + {"ns": 0, "title": "Pantothenic acid"}, + {"ns": 0, "title": "Phosphorus"}, + {"ns": 0, "title": "Pinyin"}, + {"ns": 0, "title": "Plant stem"}, + {"ns": 0, "title": "Potassium"}, + {"ns": 0, "title": "Protein (nutrient)"}, + {"ns": 0, "title": "Riboflavin"}, + {"ns": 0, "title": "Sodium"}, + {"ns": 0, "title": "Stir frying"}, + {"ns": 0, "title": "Thiamine"}, + {"ns": 0, "title": "Vegetable"}, + {"ns": 0, "title": "Vitamin A"}, + {"ns": 0, "title": "Vitamin B6"}, + {"ns": 0, "title": "Vitamin C"}, + {"ns": 0, "title": "Zinc"}, + ], + "title": "Celtuce", + } + } + }, + "limits": {"links": 500}, + }, + ( + ("pllimit", "max"), + ("plnamespace", 0), + ("prop", "links"), + ("titles", "Tropical Depression Ten (2005)"), + ): { + "query": { + "pages": { + "21196082": { + "ns": 0, + "pageid": 21196082, + "links": [ + {"ns": 0, "title": "2005 Atlantic hurricane season"}, + {"ns": 0, "title": "2005 Azores subtropical storm"}, + {"ns": 0, "title": "Atlantic Ocean"}, + {"ns": 0, "title": "Atmospheric circulation"}, + {"ns": 0, "title": "Atmospheric convection"}, + {"ns": 0, "title": "Azores"}, + {"ns": 0, "title": "Bahamas"}, + {"ns": 0, "title": "Bar (unit)"}, + {"ns": 0, "title": "Barbados"}, + {"ns": 0, "title": "Bermuda"}, + {"ns": 0, "title": "High pressure area"}, + {"ns": 0, "title": "Hurricane Beta"}, + {"ns": 0, "title": "Hurricane Cindy (2005)"}, + {"ns": 0, "title": "Hurricane Dennis"}, + {"ns": 0, "title": "Hurricane Emily (2005)"}, + {"ns": 0, "title": "Hurricane Epsilon"}, + {"ns": 0, "title": "Hurricane Irene (2005)"}, + {"ns": 0, "title": "Hurricane Katrina"}, + {"ns": 0, "title": "Hurricane Maria (2005)"}, + {"ns": 0, "title": "Hurricane Nate (2005)"}, + {"ns": 0, "title": "Hurricane Ophelia (2005)"}, + {"ns": 0, "title": "Hurricane Philippe (2005)"}, + {"ns": 0, "title": "Hurricane Rita"}, + {"ns": 0, "title": "Hurricane Stan"}, + {"ns": 0, "title": "Hurricane Vince (2005)"}, + {"ns": 0, "title": "Hurricane Wilma"}, + {"ns": 0, "title": "Inch of mercury"}, + {"ns": 0, "title": "International Standard Book Number"}, + { + "ns": 0, + "title": "List of Category 5 Atlantic hurricanes", + }, + { + "ns": 0, + "title": "List of storms in the 2005 Atlantic hurricane season", + }, + {"ns": 0, "title": "Louisiana"}, + { + "ns": 0, + "title": "Meteorological history of Hurricane Katrina", + }, + {"ns": 0, "title": "National Hurricane Center"}, + {"ns": 0, "title": "North Atlantic tropical cyclone"}, + {"ns": 0, "title": "Outflow (meteorology)"}, + {"ns": 0, "title": "Pascal (unit)"}, + {"ns": 0, "title": "Puerto Rico"}, + {"ns": 0, "title": "Saffir-Simpson Hurricane Scale"}, + { + "ns": 0, + "title": "Saffir\u2013Simpson hurricane wind scale", + }, + { + "ns": 0, + "title": "Timeline of the 2005 Atlantic hurricane season", + }, + {"ns": 0, "title": "Tropical Storm Alpha (2005)"}, + {"ns": 0, "title": "Tropical Storm Arlene (2005)"}, + {"ns": 0, "title": "Tropical Storm Bret (2005)"}, + {"ns": 0, "title": "Tropical Storm Delta (2005)"}, + {"ns": 0, "title": "Tropical Storm Franklin (2005)"}, + {"ns": 0, "title": "Tropical Storm Gamma"}, + {"ns": 0, "title": "Tropical Storm Gert (2005)"}, + {"ns": 0, "title": "Tropical Storm Jose (2005)"}, + {"ns": 0, "title": "Tropical Storm Tammy (2005)"}, + {"ns": 0, "title": "Tropical Storm Zeta"}, + {"ns": 0, "title": "Tropical cyclone"}, + {"ns": 0, "title": "Tropical cyclone scales"}, + {"ns": 0, "title": "Tropical cyclone watches and warnings"}, + {"ns": 0, "title": "Tropical wave"}, + {"ns": 0, "title": "Wind shear"}, + ], + "title": "Tropical Depression Ten (2005)", + } + } + }, + "limits": {"links": 500}, + }, + (("cllimit", "max"), ("prop", "categories"), ("titles", "Celtuce")): { + "query": { + "pages": { + "1868108": { + "pageid": 1868108, + "ns": 0, + "title": "Celtuce", + "categories": [ + {"ns": 14, "title": "All articles lacking sources"}, + {"ns": 14, "title": "All stub articles"}, + { + "ns": 14, + "title": "Articles containing Chinese-language text", + }, + { + "ns": 14, + "title": "Articles lacking sources from December 2009", + }, + {"ns": 14, "title": "Stem vegetables"}, + {"ns": 14, "title": "Vegetable stubs"}, + ], + } + } + }, + "limits": {"categories": 500}, + }, + ( + ("cllimit", "max"), + ("prop", "categories"), + ("titles", "Tropical Depression Ten (2005)"), + ): { + "query": { + "pages": { + "21196082": { + "pageid": 21196082, + "ns": 0, + "title": "Tropical Depression Ten (2005)", + "categories": [ + {"ns": 14, "title": "2005 Atlantic hurricane season"}, + {"ns": 14, "title": "Articles with hAudio microformats"}, + {"ns": 14, "title": "Atlantic tropical depressions"}, + {"ns": 14, "title": "CS1 errors: dates"}, + { + "ns": 14, + "title": "Commons category with local link same as on Wikidata", + }, + {"ns": 14, "title": "Featured articles"}, + {"ns": 14, "title": "Hurricane Katrina"}, + {"ns": 14, "title": "Spoken articles"}, + ], + } + } + }, + "limits": {"categories": 500}, + }, + ( + ("prop", "revisions"), + ("rvlimit", 1), + ("rvparse", ""), + ("rvprop", "content"), + ("titles", "Celtuce"), + ): { + "query-continue": {"revisions": {"rvcontinue": 547842204}}, + "query": { + "pages": { + "1868108": { + "ns": 0, + "pageid": 1868108, + "revisions": [ + { + "*": '\n
Celtuce stems & heads
\n

Celtuce (Lactuca sativa var. asparagina, augustana, or angustata), also called stem lettuce, celery lettuce, asparagus lettuce, or Chinese lettuce, IPA (UK,US) /\u02c8s\u025blt.\u0259s/, is a cultivar of lettuce grown primarily for its thick stem, used as a vegetable. It is especially popular in China, and is called wosun (Chinese: \u83b4\u7b0b; pinyin: w\u014ds\u016dn) or woju (Chinese: \u83b4\u82e3; pinyin: w\u014dj\xf9) (although the latter name may also be used to mean lettuce in general).\n

\n
Celtuce (foreground) for sale in Lhasa
\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n
Celtuce, raw\n\n
Nutritional value per 100 g (3.5 oz)\n
Energy\n 75 kJ (18 kcal)\n
Carbohydrates\n 3.65 g\n
- Dietary fiber\n 1.7 g\n
Fat\n 0.3 g\n
Protein\n 0.85 g\n
Vitamin A equiv.\n 175 \u03bcg (22%)\n
Thiamine (vit. B1)\n 0.055 mg (5%)\n
Riboflavin (vit. B2)\n 0.07 mg (6%)\n
Niacin (vit. B3)\n 0.55 mg (4%)\n
Pantothenic acid (B5)\n 0.183 mg (4%)\n
Vitamin B6\n 0.05 mg (4%)\n
Folate (vit. B9)\n 46 \u03bcg (12%)\n
Vitamin C\n 19.5 mg (23%)\n
Calcium\n 39 mg (4%)\n
Iron\n 0.55 mg (4%)\n
Magnesium\n 28 mg (8%)\n
Manganese\n 0.688 mg (33%)\n
Phosphorus\n 39 mg (6%)\n
Potassium\n 330 mg (7%)\n
Sodium\n 11 mg (1%)\n
Zinc\n 0.27 mg (3%)\n
Link to USDA Database entry
Percentages are roughly approximated
using US recommendations for adults.
Source: USDA Nutrient Database\n
\n

The stem is usually harvested at a length of around 15\u201320 cm and a diameter of around 3\u20134 cm. It is crisp, moist, and mildly flavored, and typically prepared by slicing and then stir frying with more strongly flavored ingredients.\n


\n

\n\n\n\n\n' + } + ], + "title": "Celtuce", + } + } + }, + }, + ( + ("action", "parse"), + ("page", "Tropical Depression Ten (2005)"), + ("prop", "sections"), + ): { + "parse": { + "sections": [ + { + "index": "1", + "level": "2", + "fromtitle": "Tropical_Depression_Ten_(2005)", + "toclevel": 1, + "number": "1", + "byteoffset": 1369, + "line": "Meteorological history", + "anchor": "Meteorological_history", + }, + { + "index": "2", + "level": "2", + "fromtitle": "Tropical_Depression_Ten_(2005)", + "toclevel": 1, + "number": "2", + "byteoffset": 6248, + "line": "Impact", + "anchor": "Impact", + }, + { + "index": "3", + "level": "2", + "fromtitle": "Tropical_Depression_Ten_(2005)", + "toclevel": 1, + "number": "3", + "byteoffset": 7678, + "line": "See also", + "anchor": "See_also", + }, + { + "index": "4", + "level": "2", + "fromtitle": "Tropical_Depression_Ten_(2005)", + "toclevel": 1, + "number": "4", + "byteoffset": 7885, + "line": "References", + "anchor": "References", + }, + { + "index": "5", + "level": "2", + "fromtitle": "Tropical_Depression_Ten_(2005)", + "toclevel": 1, + "number": "5", + "byteoffset": 7917, + "line": "External links", + "anchor": "External_links", + }, + ], + "title": "Tropical Depression Ten (2005)", + } + }, + ( + ("limit", 10), + ("list", "search"), + ("srlimit", 10), + ("srprop", ""), + ("srsearch", "Barack Obama"), + ): { + "query-continue": {"search": {"sroffset": 10}}, + "query": { + "searchinfo": {"totalhits": 12987}, + "search": [ + {"ns": 0, "title": "Barack Obama"}, + {"ns": 0, "title": "Barack Obama, Sr."}, + {"ns": 0, "title": "Presidency of Barack Obama"}, + {"ns": 0, "title": "Barack Obama presidential campaign, 2008"}, + { + "ns": 0, + "title": "List of federal judges appointed by Barack Obama", + }, + {"ns": 0, "title": "Barack Obama in comics"}, + {"ns": 0, "title": "Political positions of Barack Obama"}, + {"ns": 0, "title": "Barack Obama on social media"}, + { + "ns": 0, + "title": "List of Batman: The Brave and the Bold characters", + }, + {"ns": 0, "title": "Family of Barack Obama"}, + ], + }, + "warnings": {"main": {"*": "Unrecognized parameter: 'limit'"}}, + }, + ( + ("limit", 3), + ("list", "search"), + ("srlimit", 3), + ("srprop", ""), + ("srsearch", "Porsche"), + ): { + "query-continue": {"search": {"sroffset": 3}}, + "query": { + "searchinfo": {"totalhits": 5335}, + "search": [ + {"ns": 0, "title": "Porsche"}, + {"ns": 0, "title": "Porsche in motorsport"}, + {"ns": 0, "title": "Porsche 911 GT3"}, + ], + }, + "warnings": {"main": {"*": "Unrecognized parameter: 'limit'"}}, + }, + ( + ("limit", 10), + ("list", "search"), + ("srinfo", "suggestion"), + ("srlimit", 10), + ("srprop", ""), + ("srsearch", "hallelulejah"), + ): { + "query": {"searchinfo": {"suggestion": "hallelujah"}, "search": []}, + "warnings": {"main": {"*": "Unrecognized parameter: 'limit'"}}, + }, + ( + ("limit", 10), + ("list", "search"), + ("srinfo", "suggestion"), + ("srlimit", 10), + ("srprop", ""), + ("srsearch", "qmxjsudek"), + ): { + "query": {"search": []}, + "warnings": {"main": {"*": "Unrecognized parameter: 'limit'"}}, + }, + ( + ("inprop", "url"), + ("pageids", 1868108), + ("ppprop", "disambiguation"), + ("prop", "info|pageprops"), + ("redirects", ""), + ): { + "query": { + "pages": { + "1868108": { + "lastrevid": 575687826, + "pageid": 1868108, + "title": "Celtuce", + "editurl": "http://en.wikipedia.org/w/index.php?title=Celtuce&action=edit", + "counter": "", + "length": 1960, + "contentmodel": "wikitext", + "pagelanguage": "en", + "touched": "2014-01-12T09:30:00Z", + "ns": 0, + "fullurl": "http://en.wikipedia.org/wiki/Celtuce", + } + } + } + }, + ( + ("colimit", "max"), + ("prop", "coordinates"), + ("titles", "Great Wall of China"), + ): { + "query": { + "pages": { + "5094570": { + "ns": 0, + "pageid": 5094570, + "coordinates": [ + { + "lat": 40.6769, + "globe": "earth", + "lon": 117.232, + "primary": "", + } + ], + "title": "Great Wall of China", + } + } + }, + "limits": {"extlinks": 500}, + }, + ( + ("gscoord", "40.67693|117.23193"), + ("gslimit", 10), + ("gsradius", 1000), + ("list", "geosearch"), + ): { + "query": { + "geosearch": [ + { + "pageid": 5094570, + "title": "Great Wall of China", + "lon": 117.232, + "primary": "", + "lat": 40.6769, + "dist": 6.8, + "ns": 0, + } + ] + } + }, + ( + ("gscoord", "40.67693|117.23193"), + ("gslimit", 10), + ("gsradius", 10000), + ("list", "geosearch"), + ): { + "query": { + "geosearch": [ + { + "pageid": 5094570, + "title": "Great Wall of China", + "lon": 117.232, + "primary": "", + "lat": 40.6769, + "dist": 6.8, + "ns": 0, + }, + { + "pageid": 10135375, + "title": "Jinshanling", + "lon": 117.244, + "primary": "", + "lat": 40.6764, + "dist": 1019.6, + "ns": 0, + }, + ] + } + }, + ( + ("gscoord", "40.67693|117.23193"), + ("gslimit", 10), + ("gsradius", 1000), + ("list", "geosearch"), + ("titles", "Great Wall of China"), + ): { + "query": { + "geosearch": [ + { + "pageid": 5094570, + "title": "Great Wall of China", + "lon": 117.232, + "primary": "", + "lat": 40.6769, + "dist": 6.8, + "ns": 0, + } + ] + } + }, + ( + ("gscoord", "40.67693|117.23193"), + ("gslimit", 10), + ("gsradius", 1000), + ("list", "geosearch"), + ("titles", "Test"), + ): {"query": {"geosearch": []}}, + }, + "data": { + "celtuce.content": 'Celtuce (Lactuca sativa var. asparagina, augustana, or angustata), also called stem lettuce, celery lettuce, asparagus lettuce, or Chinese lettuce, IPA (UK,US) /\u02c8s\u025blt.\u0259s/, is a cultivar of lettuce grown primarily for its thick stem, used as a vegetable. It is especially popular in China, and is called wosun (Chinese: \u83b4\u7b0b; pinyin: w\u014ds\u016dn) or woju (Chinese: \u83b4\u82e3; pinyin: w\u014dj\xf9) (although the latter name may also be used to mean lettuce in general).\n\nThe stem is usually harvested at a length of around 15\u201320 cm and a diameter of around 3\u20134 cm. It is crisp, moist, and mildly flavored, and typically prepared by slicing and then stir frying with more strongly flavored ingredients.\n\nDown: Photos of the celtuce, chinese lettuce or "Wosun" taken in the province of Girona (Catalonia, Spain, Europe) in June 2013\nCeltuce Nutritional content', + "celtuce.parentid": 574302108, + "celtuce.revid": 575687826, + "celtuce.summary": "Celtuce (Lactuca sativa var. asparagina, augustana, or angustata), also called stem lettuce, celery lettuce, asparagus lettuce, or Chinese lettuce, IPA (UK,US) /\u02c8s\u025blt.\u0259s/, is a cultivar of lettuce grown primarily for its thick stem, used as a vegetable. It is especially popular in China, and is called wosun (Chinese: \u83b4\u7b0b; pinyin: w\u014ds\u016dn) or woju (Chinese: \u83b4\u82e3; pinyin: w\u014dj\xf9) (although the latter name may also be used to mean lettuce in general).\n\nThe stem is usually harvested at a length of around 15\u201320 cm and a diameter of around 3\u20134 cm. It is crisp, moist, and mildly flavored, and typically prepared by slicing and then stir frying with more strongly flavored ingredients.", + "celtuce.images": [ + "http://upload.wikimedia.org/wikipedia/commons/7/79/VegCorn.jpg", + "http://upload.wikimedia.org/wikipedia/commons/8/87/Celtuce.jpg", + "http://upload.wikimedia.org/wikipedia/commons/d/dc/The_farmer%27s_market_near_the_Potala_in_Lhasa.jpg", + "http://upload.wikimedia.org/wikipedia/en/9/99/Question_book-new.svg", + ], + "celtuce.references": [ + "http://ndb.nal.usda.gov/ndb/search/list", + "http://ndb.nal.usda.gov/ndb/search/list?qlookup=11145&format=Full", + ], + "celtuce.links": [ + "Calcium", + "Carbohydrate", + "Chinese language", + "Dietary Reference Intake", + "Dietary fiber", + "Fat", + "Folate", + "Food energy", + "Iron", + "Lettuce", + "Lhasa", + "Magnesium in biology", + "Manganese", + "Niacin", + "Pantothenic acid", + "Phosphorus", + "Pinyin", + "Plant stem", + "Potassium", + "Protein (nutrient)", + "Riboflavin", + "Sodium", + "Stir frying", + "Thiamine", + "Vegetable", + "Vitamin A", + "Vitamin B6", + "Vitamin C", + "Zinc", + ], + "celtuce.categories": [ + "All articles lacking sources", + "All stub articles", + "Articles containing Chinese-language text", + "Articles lacking sources from December 2009", + "Stem vegetables", + "Vegetable stubs", + ], + "celtuce.html": '\n
Celtuce stems & heads
\n

Celtuce (Lactuca sativa var. asparagina, augustana, or angustata), also called stem lettuce, celery lettuce, asparagus lettuce, or Chinese lettuce, IPA (UK,US) /\u02c8s\u025blt.\u0259s/, is a cultivar of lettuce grown primarily for its thick stem, used as a vegetable. It is especially popular in China, and is called wosun (Chinese: \u83b4\u7b0b; pinyin: w\u014ds\u016dn) or woju (Chinese: \u83b4\u82e3; pinyin: w\u014dj\xf9) (although the latter name may also be used to mean lettuce in general).\n

\n
Celtuce (foreground) for sale in Lhasa
\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n
Celtuce, raw\n\n
Nutritional value per 100 g (3.5 oz)\n
Energy\n 75 kJ (18 kcal)\n
Carbohydrates\n 3.65 g\n
- Dietary fiber\n 1.7 g\n
Fat\n 0.3 g\n
Protein\n 0.85 g\n
Vitamin A equiv.\n 175 \u03bcg (22%)\n
Thiamine (vit. B1)\n 0.055 mg (5%)\n
Riboflavin (vit. B2)\n 0.07 mg (6%)\n
Niacin (vit. B3)\n 0.55 mg (4%)\n
Pantothenic acid (B5)\n 0.183 mg (4%)\n
Vitamin B6\n 0.05 mg (4%)\n
Folate (vit. B9)\n 46 \u03bcg (12%)\n
Vitamin C\n 19.5 mg (23%)\n
Calcium\n 39 mg (4%)\n
Iron\n 0.55 mg (4%)\n
Magnesium\n 28 mg (8%)\n
Manganese\n 0.688 mg (33%)\n
Phosphorus\n 39 mg (6%)\n
Potassium\n 330 mg (7%)\n
Sodium\n 11 mg (1%)\n
Zinc\n 0.27 mg (3%)\n
Link to USDA Database entry
Percentages are roughly approximated
using US recommendations for adults.
Source: USDA Nutrient Database\n
\n

The stem is usually harvested at a length of around 15\u201320 cm and a diameter of around 3\u20134 cm. It is crisp, moist, and mildly flavored, and typically prepared by slicing and then stir frying with more strongly flavored ingredients.\n


\n

\n\n\n\n\n', + "cyclone.content": 'Tropical Depression Ten was the tenth tropical cyclone of the record-breaking 2005 Atlantic hurricane season. It formed on August 13 from a tropical wave that emerged from the west coast of Africa on August 8. As a result of strong wind shear, the depression remained weak and did not strengthen beyond tropical depression status. The cyclone degenerated on August 14, although its remnants partially contributed to the formation of Tropical Depression Twelve, which eventually intensified into Hurricane Katrina. The cyclone had no effect on land, and did not directly result in any fatalities or damage.\n\n\n== Meteorological history ==\n\nOn August 8, a tropical wave emerged from the west coast of Africa and entered the Atlantic Ocean. Tracking towards the west, the depression began to exhibit signs of convective organization on August 11. The system continued to develop, and it is estimated that Tropical Depression Ten formed at 1200 UTC on August 13. At the time, it was located about 1,600 miles (2,600 km) east of Barbados. Upon its designation, the depression consisted of a large area of thunderstorm activity, with curved banding features and expanding outflow. However, the environmental conditions were predicted to quickly become unfavorable. The depression moved erratically and slowly towards the west, and wind shear inhibited any significant intensification. Late on August 13, it was "beginning to look like Irene-junior as it undergoes southwesterly mid-level shear beneath the otherwise favorable upper-level outflow pattern". The wind shear was expected to relent within 48 hours, prompting some forecast models to suggest the depression would eventually attain hurricane status.\nBy early August 14, the shear had substantially disrupted the storm, leaving the low-level center of circulation exposed from the area of convection, which was also deteriorating. After meandering, the storm began to move westward. Forecasters expected it to resume a northwestward track as high pressure to the south of Bermuda was forecasted to weaken and another high was predicted to form southwest of the Azores. By 1800 UTC on August 14, the strong shear had further weakened the storm, and it no longer met the criteria for a tropical cyclone. It degenerated into a remnant low, and the National Hurricane Center issued their final advisory on the cyclone. Moving westward, it occasionally produced bursts of convective activity, before dissipating on August 18.\nTropical Depression Twelve formed over the southeastern Bahamas at 2100 UTC on August 23, partially from the remains of Tropical Depression Ten. While the normal standards for numbering tropical depressions in the Atlantic stipulate that the initial designation be retained when a depression regenerates, satellite imagery indicated that a second tropical wave had combined with Tropical Depression Ten north of Puerto Rico to form a new, more complex weather system, which was then designated as Tropical Depression Twelve. In a re-analysis, it was found that the low-level circulation of Tropical Depression Ten had completely detached and dissipated; only the remnant mid-level circulation moved on and merged with the second tropical wave. As a result, the criteria for keeping the same name and identity were not met. Tropical Depression Twelve later became Hurricane Katrina.\n\n\n== Impact ==\nBecause Tropical Depression Ten never approached land as a tropical cyclone, no tropical cyclone watches and warnings were issued for any land masses. No effects, damages, or fatalities were reported, and no ships reported tropical storm-force winds in association with the depression. The system did not attain tropical storm status; as such, it was not given a name by the National Hurricane Center. The storm partially contributed to the formation of Hurricane Katrina, which became a Category 5 hurricane on the Saffir-Simpson Hurricane Scale and made landfall in Louisiana, causing catastrophic damage. Katrina was the costliest hurricane, and one of the five deadliest, in the history of the United States.\n\n\n== See also ==\n\nMeteorological history of Hurricane Katrina\nList of storms in the 2005 Atlantic hurricane season\nTimeline of the 2005 Atlantic hurricane season\n\n\n== References ==\n\n\n== External links ==\n\nTropical Depression Ten Tropical Cyclone Report\nTropical Depression Ten advisory archive', + "cyclone.revid": 572715399, + "cyclone.parentid": 539367750, + "cyclone.summary": "Tropical Depression Ten was the tenth tropical cyclone of the record-breaking 2005 Atlantic hurricane season. It formed on August 13 from a tropical wave that emerged from the west coast of Africa on August 8. As a result of strong wind shear, the depression remained weak and did not strengthen beyond tropical depression status. The cyclone degenerated on August 14, although its remnants partially contributed to the formation of Tropical Depression Twelve, which eventually intensified into Hurricane Katrina. The cyclone had no effect on land, and did not directly result in any fatalities or damage.\n\n", + "cyclone.images": [ + "http://upload.wikimedia.org/wikipedia/commons/3/33/Tropical_Depression_Ten_%282005%29.ogg", + "http://upload.wikimedia.org/wikipedia/commons/3/37/People_icon.svg", + "http://upload.wikimedia.org/wikipedia/commons/4/47/Sound-icon.svg", + "http://upload.wikimedia.org/wikipedia/commons/4/4a/TD_10_August_13%2C_2005.jpg", + "http://upload.wikimedia.org/wikipedia/commons/7/7d/Tropical_Depression_10_%282005%29.png", + "http://upload.wikimedia.org/wikipedia/commons/8/89/Cyclone_Catarina_from_the_ISS_on_March_26_2004.JPG", + "http://upload.wikimedia.org/wikipedia/commons/8/89/Symbol_book_class2.svg", + "http://upload.wikimedia.org/wikipedia/commons/a/a5/10-L_2005_track.png", + "http://upload.wikimedia.org/wikipedia/commons/e/e0/2005_Atlantic_hurricane_season_summary_map.png", + "http://upload.wikimedia.org/wikipedia/en/4/48/Folder_Hexagonal_Icon.svg", + "http://upload.wikimedia.org/wikipedia/en/4/4a/Commons-logo.svg", + "http://upload.wikimedia.org/wikipedia/en/e/e7/Cscr-featured.svg", + "http://upload.wikimedia.org/wikipedia/en/f/fd/Portal-puzzle.svg", + ], + "cyclone.references": [ + "http://books.google.com/?id=-a8DRl1HuwoC&q=%22tropical+depression+ten%22+2005&dq=%22tropical+depression+ten%22+2005", + "http://facstaff.unca.edu/chennon/research/documents/erb_ncur2006_preprint.pdf", + "http://www.nhc.noaa.gov/archive/2005/TEN.shtml?", + "http://www.nhc.noaa.gov/archive/2005/dis/al102005.discus.001.shtml?", + "http://www.nhc.noaa.gov/archive/2005/dis/al102005.discus.002.shtml?", + "http://www.nhc.noaa.gov/archive/2005/dis/al102005.discus.003.shtml?", + "http://www.nhc.noaa.gov/archive/2005/dis/al122005.discus.001.shtml", + "http://www.nhc.noaa.gov/pdf/TCR-AL102005_Ten.pdf", + "http://www.nhc.noaa.gov/pdf/TCR-AL122005_Katrina.pdf", + "http://www.wptv.com/content/chopper5/story/Capt-Julie-Reports-On-Hurricane-Katrina/q__v8S2TZES2GiccRTQ2bw.cspx", + ], + "cyclone.links": [ + "2005 Atlantic hurricane season", + "2005 Azores subtropical storm", + "Atlantic Ocean", + "Atmospheric circulation", + "Atmospheric convection", + "Azores", + "Bahamas", + "Bar (unit)", + "Barbados", + "Bermuda", + "High pressure area", + "Hurricane Beta", + "Hurricane Cindy (2005)", + "Hurricane Dennis", + "Hurricane Emily (2005)", + "Hurricane Epsilon", + "Hurricane Irene (2005)", + "Hurricane Katrina", + "Hurricane Maria (2005)", + "Hurricane Nate (2005)", + "Hurricane Ophelia (2005)", + "Hurricane Philippe (2005)", + "Hurricane Rita", + "Hurricane Stan", + "Hurricane Vince (2005)", + "Hurricane Wilma", + "Inch of mercury", + "International Standard Book Number", + "List of Category 5 Atlantic hurricanes", + "List of storms in the 2005 Atlantic hurricane season", + "Louisiana", + "Meteorological history of Hurricane Katrina", + "National Hurricane Center", + "North Atlantic tropical cyclone", + "Outflow (meteorology)", + "Pascal (unit)", + "Puerto Rico", + "Saffir-Simpson Hurricane Scale", + "Saffir\u2013Simpson hurricane wind scale", + "Timeline of the 2005 Atlantic hurricane season", + "Tropical Storm Alpha (2005)", + "Tropical Storm Arlene (2005)", + "Tropical Storm Bret (2005)", + "Tropical Storm Delta (2005)", + "Tropical Storm Franklin (2005)", + "Tropical Storm Gamma", + "Tropical Storm Gert (2005)", + "Tropical Storm Jose (2005)", + "Tropical Storm Tammy (2005)", + "Tropical Storm Zeta", + "Tropical cyclone", + "Tropical cyclone scales", + "Tropical cyclone watches and warnings", + "Tropical wave", + "Wind shear", + ], + "cyclone.categories": [ + "2005 Atlantic hurricane season", + "Articles with hAudio microformats", + "Atlantic tropical depressions", + "CS1 errors: dates", + "Commons category with local link same as on Wikidata", + "Featured articles", + "Hurricane Katrina", + "Spoken articles", + ], + "cyclone.sections": [ + "External links", + "Impact", + "Meteorological history", + "References", + "See also", + ], + "cyclone.section.impact": "Because Tropical Depression Ten never approached land as a tropical cyclone, no tropical cyclone watches and warnings were issued for any land masses. No effects, damages, or fatalities were reported, and no ships reported tropical storm-force winds in association with the depression. The system did not attain tropical storm status; as such, it was not given a name by the National Hurricane Center. The storm partially contributed to the formation of Hurricane Katrina, which became a Category 5 hurricane on the Saffir-Simpson Hurricane Scale and made landfall in Louisiana, causing catastrophic damage. Katrina was the costliest hurricane, and one of the five deadliest, in the history of the United States.", + "barack.search": [ + "Barack Obama", + "Barack Obama, Sr.", + "Presidency of Barack Obama", + "Barack Obama presidential campaign, 2008", + "List of federal judges appointed by Barack Obama", + "Barack Obama in comics", + "Political positions of Barack Obama", + "Barack Obama on social media", + "List of Batman: The Brave and the Bold characters", + "Family of Barack Obama", + ], + "porsche.search": ["Porsche", "Porsche in motorsport", "Porsche 911 GT3"], + "great_wall_of_china.coordinates.lat": "40.677", + "great_wall_of_china.coordinates.lon": "117.232", + "great_wall_of_china.geo_seach": ["Great Wall of China"], + "great_wall_of_china.geo_seach_with_radius": [ + "Great Wall of China", + "Jinshanling", + ], + "great_wall_of_china.geo_seach_with_existing_article_name": [ + "Great Wall of China" + ], + "great_wall_of_china.geo_seach_with_non_existing_article_name": [], + }, } diff --git a/tests/search_test.py b/tests/search_test.py index a021138..f2931e7 100644 --- a/tests/search_test.py +++ b/tests/search_test.py @@ -1,44 +1,49 @@ # -*- coding: utf-8 -*- import unittest - from collections import defaultdict -from wikipedia import wikipedia from request_mock_data import mock_data +from wikipedia import wikipedia + # mock out _wiki_request class _wiki_request(object): - calls = defaultdict(int) + calls = defaultdict(int) + + @classmethod + def __call__(cls, params): + cls.calls[params.__str__()] += 1 + return mock_data["_wiki_request calls"][tuple(sorted(params.items()))] - @classmethod - def __call__(cls, params): - cls.calls[params.__str__()] += 1 - return mock_data["_wiki_request calls"][tuple(sorted(params.items()))] wikipedia._wiki_request = _wiki_request() class TestSearch(unittest.TestCase): - """Test the functionality of wikipedia.search.""" - - def test_search(self): - """Test parsing a Wikipedia request result.""" - self.assertEqual(wikipedia.search("Barack Obama"), mock_data['data']["barack.search"]) - - def test_limit(self): - """Test limiting a request results.""" - self.assertEqual(wikipedia.search("Porsche", results=3), mock_data['data']["porsche.search"]) - - def test_suggestion(self): - """Test getting a suggestion as well as search results.""" - search, suggestion = wikipedia.search("hallelulejah", suggestion=True) - self.assertEqual(search, []) - self.assertEqual(suggestion, u'hallelujah') - - def test_suggestion_none(self): - """Test getting a suggestion when there is no suggestion.""" - search, suggestion = wikipedia.search("qmxjsudek", suggestion=True) - self.assertEqual(search, []) - self.assertEqual(suggestion, None) + """Test the functionality of wikipedia.search.""" + + def test_search(self): + """Test parsing a Wikipedia request result.""" + self.assertEqual( + wikipedia.search("Barack Obama"), mock_data["data"]["barack.search"] + ) + + def test_limit(self): + """Test limiting a request results.""" + self.assertEqual( + wikipedia.search("Porsche", results=3), mock_data["data"]["porsche.search"] + ) + + def test_suggestion(self): + """Test getting a suggestion as well as search results.""" + search, suggestion = wikipedia.search("hallelulejah", suggestion=True) + self.assertEqual(search, []) + self.assertEqual(suggestion, "hallelujah") + + def test_suggestion_none(self): + """Test getting a suggestion when there is no suggestion.""" + search, suggestion = wikipedia.search("qmxjsudek", suggestion=True) + self.assertEqual(search, []) + self.assertEqual(suggestion, None) diff --git a/wikipedia/__init__.py b/wikipedia/__init__.py index c781093..a7ac28f 100644 --- a/wikipedia/__init__.py +++ b/wikipedia/__init__.py @@ -20,3 +20,23 @@ suggest, summary, ) + +__all__ = [ + "DisambiguationError", + "HTTPTimeoutError", + "PageError", + "RedirectError", + "WikipediaException", + "WikipediaPage", + "donate", + "geosearch", + "languages", + "page", + "random", + "search", + "set_lang", + "set_rate_limiting", + "set_user_agent", + "suggest", + "summary", +] diff --git a/wikipedia/exceptions.py b/wikipedia/exceptions.py index 58e6d30..a061fc6 100644 --- a/wikipedia/exceptions.py +++ b/wikipedia/exceptions.py @@ -53,7 +53,8 @@ def __init__(self, title: str, may_refer_to: List[str]): self.options = may_refer_to def __unicode__(self): - return '"{0}" may refer to: \n{1}'.format(self.title, "\n".join(self.options)) + options = "\n".join(self.options) + return f'"{self.title}" may refer to: \n{options}' class RedirectError(WikipediaException): diff --git a/wikipedia/version.py b/wikipedia/version.py index eee2da1..e51937f 100644 --- a/wikipedia/version.py +++ b/wikipedia/version.py @@ -1,4 +1,3 @@ from typing import Tuple - __version__: Tuple[int, int, int] = (1, 4, 0) diff --git a/wikipedia/wikipedia.py b/wikipedia/wikipedia.py index c1bccfe..6afb139 100644 --- a/wikipedia/wikipedia.py +++ b/wikipedia/wikipedia.py @@ -19,7 +19,7 @@ WikipediaException, ) -API_URL = "http://en.wikipedia.org/w/api.php" +API_URL = "https://en.wikipedia.org/w/api.php" RATE_LIMIT = False RATE_LIMIT_MIN_WAIT = None RATE_LIMIT_LAST_CALL = None @@ -29,14 +29,14 @@ def set_lang(prefix: str): """ Change the language of the API being requested. - Set `prefix` to one of the two letter prefixes found on the `list of all Wikipedias `_. + Set `prefix` to one of the two letter prefixes found on the `list of all Wikipedias `_. After setting the language, the cache for ``search``, ``suggest``, and ``summary`` will be cleared. .. note:: Make sure you search for page titles in the language that you have set. """ global API_URL - API_URL = f"http://{prefix.lower()}.wikipedia.org/w/api.php" + API_URL = f"https://{prefix.lower()}.wikipedia.org/w/api.php" for cached_func in (search, suggest, summary): cached_func.clear_cache() @@ -145,7 +145,7 @@ def geosearch( ): """ Do a wikipedia geo search for `latitude` and `longitude` - using HTTP API described in http://www.mediawiki.org/wiki/Extension:GeoData + using HTTP API described in https://www.mediawiki.org/wiki/Extension:GeoData Arguments: @@ -220,7 +220,7 @@ def random(pages: int = 1): * pages - the number of random pages returned (max of 10) """ - # http://en.wikipedia.org/w/api.php?action=query&list=random&rnlimit=5000&format=jsonfm + # https://en.wikipedia.org/w/api.php?action=query&list=random&rnlimit=5000&format=jsonfm query_params: Dict[str, Any] = { "list": "random", "rnnamespace": 0, @@ -548,7 +548,7 @@ def revision_id(self): The revision ID is a number that uniquely identifies the current version of the page. It can be used to create the permalink or for other direct API calls. See `Help:Page history - `_ for more + `_ for more information. """ @@ -650,7 +650,7 @@ def references(self): if not getattr(self, "_references", False): def add_protocol(url: str): - return url if url.startswith("http") else "http:" + url + return url if url.startswith(("http:", "https:")) else "http:" + url self._references = [ add_protocol(link["*"]) @@ -767,9 +767,7 @@ def donate(): """ Open up the Wikimedia donate page in your favorite browser. """ - import webbrowser - - webbrowser.open( + __import__("webbrowser").open( "https://donate.wikimedia.org/w/index.php?title=Special:FundraiserLandingPage", new=2, ) @@ -802,6 +800,7 @@ def _wiki_request(params: Dict[str, Any]): time.sleep(int(wait_time.total_seconds())) r = requests.get(API_URL, params=params, headers=headers, timeout=timeout) + r.raise_for_status() if RATE_LIMIT: RATE_LIMIT_LAST_CALL = datetime.now()