instance_id
stringlengths
10
57
base_commit
stringlengths
40
40
created_at
stringdate
2014-04-30 14:58:36
2025-04-30 20:14:11
environment_setup_commit
stringlengths
40
40
hints_text
stringlengths
0
273k
patch
stringlengths
251
7.06M
problem_statement
stringlengths
11
52.5k
repo
stringlengths
7
53
test_patch
stringlengths
231
997k
meta
dict
version
stringclasses
864 values
install_config
dict
requirements
stringlengths
93
34.2k
environment
stringlengths
760
20.5k
FAIL_TO_PASS
listlengths
1
9.39k
FAIL_TO_FAIL
listlengths
0
2.69k
PASS_TO_PASS
listlengths
0
7.87k
PASS_TO_FAIL
listlengths
0
192
license_name
stringclasses
56 values
__index_level_0__
int64
0
21.4k
docker__docker-py-911
446e6d08dd569194a27bb354a184b7d94ecf5e48
2016-01-29 00:27:19
4c34be5d4ab8a5a017950712e9c96b56d78d1c58
diff --git a/docker/client.py b/docker/client.py index fb186cc7..7d1f7c46 100644 --- a/docker/client.py +++ b/docker/client.py @@ -45,17 +45,17 @@ class Client( timeout=constants.DEFAULT_TIMEOUT_SECONDS, tls=False): super(Client, self).__init__() - if tls and (not base_url or not base_url.startswith('https://')): + if tls and not base_url: raise errors.TLSParameterError( - 'If using TLS, the base_url argument must begin with ' - '"https://".') + 'If using TLS, the base_url argument must be provided.' + ) self.base_url = base_url self.timeout = timeout self._auth_configs = auth.load_config() - base_url = utils.parse_host(base_url, sys.platform) + base_url = utils.parse_host(base_url, sys.platform, tls=bool(tls)) if base_url.startswith('http+unix://'): self._custom_adapter = unixconn.UnixAdapter(base_url, timeout) self.mount('http+docker://', self._custom_adapter) diff --git a/docker/utils/utils.py b/docker/utils/utils.py index 1ce1867c..dc46f1ef 100644 --- a/docker/utils/utils.py +++ b/docker/utils/utils.py @@ -345,7 +345,7 @@ def parse_repository_tag(repo_name): # fd:// protocol unsupported (for obvious reasons) # Added support for http and https # Protocol translation: tcp -> http, unix -> http+unix -def parse_host(addr, platform=None): +def parse_host(addr, platform=None, tls=False): proto = "http+unix" host = DEFAULT_HTTP_HOST port = None @@ -381,7 +381,7 @@ def parse_host(addr, platform=None): raise errors.DockerException( "Invalid bind address protocol: {0}".format(addr) ) - proto = "http" + proto = "https" if tls else "http" if proto != "http+unix" and ":" in addr: host_parts = addr.split(':')
Problem when using the DOCKER_HOST variable in combination with docker-compose and https:// Hi, I'm trying to use docker & docker-compose with the DOCKER_HOST env-variable to control a remote docker-host. at first I configured the variables on the docker client machine as follows: export DOCKER_CERT_PATH=/vagrant/docker export DOCKER_TLS_VERIFY=1 export DOCKER_HOST=my-docker-vm.cloudapp.azure.com:2376 This lead to an error in docker-compose as discussed in this issue docker/compose#2634 Once I added the `https://` protocol prefix to the hostname the connection problem with docker-compose went away... export DOCKER_HOST=https://my-docker-vm.cloudapp.azure.com:2376 But with this format of the DOCKER_HOST variable now the docker CLI is complaining about the format... docker ps Invalid bind address format: https://my-docker-vm.cloudapp.azure.com:2376 I think this error is triggered here: https://github.com/docker/docker-py/blob/62d9964cc1881f6f3cd021594cd40fd80a8fc855/docker/utils/utils.py#L388 The code is not expecting to find two occurrences of double-dots `:` in the host variable, but for some reason it does. PS: also the ambiguity of the two error messages in `utils.py` could be improved, since at L368 & at L388 both errors emit the same message, but for different reasons ;) Thanks & Regards, Wolfgang
docker/docker-py
diff --git a/tests/unit/utils_test.py b/tests/unit/utils_test.py index 83d2a98d..63ea10e7 100644 --- a/tests/unit/utils_test.py +++ b/tests/unit/utils_test.py @@ -360,6 +360,11 @@ class ParseHostTest(base.BaseTestCase): assert parse_host(val, 'win32') == tcp_port + def test_parse_host_tls(self): + host_value = 'myhost.docker.net:3348' + expected_result = 'https://myhost.docker.net:3348' + self.assertEqual(parse_host(host_value, None, True), expected_result) + class ParseRepositoryTagTest(base.BaseTestCase): sha = 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855'
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_hyperlinks", "has_many_modified_files" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 0, "test_score": 2 }, "num_modified_files": 2 }
1.7
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.9", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
-e git+https://github.com/docker/docker-py.git@446e6d08dd569194a27bb354a184b7d94ecf5e48#egg=docker_py exceptiongroup==1.2.2 iniconfig==2.1.0 packaging==24.2 pluggy==1.5.0 pytest==8.3.5 requests==2.5.3 six==1.17.0 tomli==2.2.1 websocket_client==0.32.0
name: docker-py channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - exceptiongroup==1.2.2 - iniconfig==2.1.0 - packaging==24.2 - pluggy==1.5.0 - pytest==8.3.5 - requests==2.5.3 - six==1.17.0 - tomli==2.2.1 - websocket-client==0.32.0 prefix: /opt/conda/envs/docker-py
[ "tests/unit/utils_test.py::ParseHostTest::test_parse_host_tls" ]
[]
[ "tests/unit/utils_test.py::HostConfigTest::test_create_endpoint_config_with_aliases", "tests/unit/utils_test.py::HostConfigTest::test_create_host_config_invalid_cpu_cfs_types", "tests/unit/utils_test.py::HostConfigTest::test_create_host_config_no_options", "tests/unit/utils_test.py::HostConfigTest::test_create_host_config_no_options_newer_api_version", "tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_cpu_period", "tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_cpu_quota", "tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_oom_kill_disable", "tests/unit/utils_test.py::UlimitTest::test_create_host_config_dict_ulimit", "tests/unit/utils_test.py::UlimitTest::test_create_host_config_dict_ulimit_capitals", "tests/unit/utils_test.py::UlimitTest::test_create_host_config_obj_ulimit", "tests/unit/utils_test.py::UlimitTest::test_ulimit_invalid_type", "tests/unit/utils_test.py::LogConfigTest::test_create_host_config_dict_logconfig", "tests/unit/utils_test.py::LogConfigTest::test_create_host_config_obj_logconfig", "tests/unit/utils_test.py::LogConfigTest::test_logconfig_invalid_config_type", "tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_empty", "tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_no_cert_path", "tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_tls", "tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_compact", "tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_complete", "tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_empty", "tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_list", "tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_no_mode", "tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_unicode_bytes_input", "tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_unicode_unicode_input", "tests/unit/utils_test.py::ParseEnvFileTest::test_parse_env_file_commented_line", "tests/unit/utils_test.py::ParseEnvFileTest::test_parse_env_file_invalid_line", "tests/unit/utils_test.py::ParseEnvFileTest::test_parse_env_file_proper", "tests/unit/utils_test.py::ParseHostTest::test_parse_host", "tests/unit/utils_test.py::ParseHostTest::test_parse_host_empty_value", "tests/unit/utils_test.py::ParseRepositoryTagTest::test_index_image_no_tag", "tests/unit/utils_test.py::ParseRepositoryTagTest::test_index_image_sha", "tests/unit/utils_test.py::ParseRepositoryTagTest::test_index_image_tag", "tests/unit/utils_test.py::ParseRepositoryTagTest::test_index_user_image_no_tag", "tests/unit/utils_test.py::ParseRepositoryTagTest::test_index_user_image_tag", "tests/unit/utils_test.py::ParseRepositoryTagTest::test_private_reg_image_no_tag", "tests/unit/utils_test.py::ParseRepositoryTagTest::test_private_reg_image_sha", "tests/unit/utils_test.py::ParseRepositoryTagTest::test_private_reg_image_tag", "tests/unit/utils_test.py::ParseDeviceTest::test_dict", "tests/unit/utils_test.py::ParseDeviceTest::test_full_string_definition", "tests/unit/utils_test.py::ParseDeviceTest::test_hybrid_list", "tests/unit/utils_test.py::ParseDeviceTest::test_partial_string_definition", "tests/unit/utils_test.py::ParseDeviceTest::test_permissionless_string_definition", "tests/unit/utils_test.py::ParseBytesTest::test_parse_bytes_float", "tests/unit/utils_test.py::ParseBytesTest::test_parse_bytes_invalid", "tests/unit/utils_test.py::ParseBytesTest::test_parse_bytes_maxint", "tests/unit/utils_test.py::ParseBytesTest::test_parse_bytes_valid", "tests/unit/utils_test.py::UtilsTest::test_convert_filters", "tests/unit/utils_test.py::UtilsTest::test_create_ipam_config", "tests/unit/utils_test.py::UtilsTest::test_decode_json_header", "tests/unit/utils_test.py::SplitCommandTest::test_split_command_with_unicode", "tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_matching_internal_port_ranges", "tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_matching_internal_ports", "tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_nonmatching_internal_port_ranges", "tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_nonmatching_internal_ports", "tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_one_port", "tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_port_range", "tests/unit/utils_test.py::PortsTest::test_host_only_with_colon", "tests/unit/utils_test.py::PortsTest::test_non_matching_length_port_ranges", "tests/unit/utils_test.py::PortsTest::test_port_and_range_invalid", "tests/unit/utils_test.py::PortsTest::test_port_only_with_colon", "tests/unit/utils_test.py::PortsTest::test_split_port_invalid", "tests/unit/utils_test.py::PortsTest::test_split_port_no_host_port", "tests/unit/utils_test.py::PortsTest::test_split_port_range_no_host_port", "tests/unit/utils_test.py::PortsTest::test_split_port_range_with_host_ip_no_port", "tests/unit/utils_test.py::PortsTest::test_split_port_range_with_host_port", "tests/unit/utils_test.py::PortsTest::test_split_port_range_with_protocol", "tests/unit/utils_test.py::PortsTest::test_split_port_with_host_ip", "tests/unit/utils_test.py::PortsTest::test_split_port_with_host_ip_no_port", "tests/unit/utils_test.py::PortsTest::test_split_port_with_host_port", "tests/unit/utils_test.py::PortsTest::test_split_port_with_protocol", "tests/unit/utils_test.py::ExcludePathsTest::test_directory", "tests/unit/utils_test.py::ExcludePathsTest::test_directory_with_single_exception", "tests/unit/utils_test.py::ExcludePathsTest::test_directory_with_subdir_exception", "tests/unit/utils_test.py::ExcludePathsTest::test_directory_with_trailing_slash", "tests/unit/utils_test.py::ExcludePathsTest::test_directory_with_wildcard_exception", "tests/unit/utils_test.py::ExcludePathsTest::test_exclude_custom_dockerfile", "tests/unit/utils_test.py::ExcludePathsTest::test_exclude_dockerfile_dockerignore", "tests/unit/utils_test.py::ExcludePathsTest::test_no_dupes", "tests/unit/utils_test.py::ExcludePathsTest::test_no_excludes", "tests/unit/utils_test.py::ExcludePathsTest::test_question_mark", "tests/unit/utils_test.py::ExcludePathsTest::test_single_filename", "tests/unit/utils_test.py::ExcludePathsTest::test_single_filename_trailing_slash", "tests/unit/utils_test.py::ExcludePathsTest::test_single_subdir_single_filename", "tests/unit/utils_test.py::ExcludePathsTest::test_single_subdir_wildcard_filename", "tests/unit/utils_test.py::ExcludePathsTest::test_subdirectory", "tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_exclude", "tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_filename_end", "tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_filename_start", "tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_subdir_single_filename", "tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_subdir_wildcard_filename", "tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_with_exception", "tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_with_wildcard_exception", "tests/unit/utils_test.py::TarTest::test_tar_with_directory_symlinks", "tests/unit/utils_test.py::TarTest::test_tar_with_empty_directory", "tests/unit/utils_test.py::TarTest::test_tar_with_excludes", "tests/unit/utils_test.py::TarTest::test_tar_with_file_symlinks" ]
[]
Apache License 2.0
401
scrapy__scrapy-1746
a35aec71e96b0c0288c370afa425e8e700dca8b3
2016-01-29 18:00:12
6aa85aee2a274393307ac3e777180fcbdbdc9848
diff --git a/scrapy/commands/settings.py b/scrapy/commands/settings.py index 0e73f4f58..bce4e6086 100644 --- a/scrapy/commands/settings.py +++ b/scrapy/commands/settings.py @@ -1,5 +1,8 @@ from __future__ import print_function +import json + from scrapy.commands import ScrapyCommand +from scrapy.settings import BaseSettings class Command(ScrapyCommand): @@ -28,7 +31,11 @@ class Command(ScrapyCommand): def run(self, args, opts): settings = self.crawler_process.settings if opts.get: - print(settings.get(opts.get)) + s = settings.get(opts.get) + if isinstance(s, BaseSettings): + print(json.dumps(s.copy_to_dict())) + else: + print(s) elif opts.getbool: print(settings.getbool(opts.getbool)) elif opts.getint: diff --git a/scrapy/settings/__init__.py b/scrapy/settings/__init__.py index 342d2585e..7b7808959 100644 --- a/scrapy/settings/__init__.py +++ b/scrapy/settings/__init__.py @@ -4,6 +4,7 @@ import copy import warnings from collections import MutableMapping from importlib import import_module +from pprint import pformat from scrapy.utils.deprecate import create_deprecated_class from scrapy.exceptions import ScrapyDeprecationWarning @@ -368,11 +369,31 @@ class BaseSettings(MutableMapping): def __len__(self): return len(self.attributes) - def __str__(self): - return str(self.attributes) + def _to_dict(self): + return {k: (v._to_dict() if isinstance(v, BaseSettings) else v) + for k, v in six.iteritems(self)} + + def copy_to_dict(self): + """ + Make a copy of current settings and convert to a dict. - def __repr__(self): - return "<%s %s>" % (self.__class__.__name__, self.attributes) + This method returns a new dict populated with the same values + and their priorities as the current settings. + + Modifications to the returned dict won't be reflected on the original + settings. + + This method can be useful for example for printing settings + in Scrapy shell. + """ + settings = self.copy() + return settings._to_dict() + + def _repr_pretty_(self, p, cycle): + if cycle: + p.text(repr(self)) + else: + p.text(pformat(self.copy_to_dict())) @property def overrides(self):
BaseSettings.__repr__ is too verbose for scrapy shell Settings object got `__repr__` in https://github.com/scrapy/scrapy/pull/1149, but it looks weird in `scrapy shell`: > [s] Available Scrapy objects: [s] crawler &lt;scrapy.crawler.Crawler object at 0x10c042fd0> [s] item {} [s] request &lt;GET http://yellowpages.co.th> [s] response &lt;302 http://yellowpages.co.th> [s] settings {'CLOSESPIDER_ITEMCOUNT': &lt;SettingsAttribute value=0 priority=0>, 'RETRY_HTTP_CODES': &lt;SettingsAttribute value=[500, 502, 503, 504, 408] priority=0>, 'HTTPCACHE_DIR': &lt;SettingsAttribute value='httpcache-2' priority=20>, 'MEMUSAGE_REPORT': &lt;SettingsAttribute value=False priority=0>, 'DOWNLOAD_HANDLERS_BASE': &lt;SettingsAttribute value=&lt;BaseSettings {'s3': &lt;SettingsAttribute value='scrapy.core.downloader.handlers.s3.S3DownloadHandler' priority=0>, 'ftp': &lt;SettingsAttribute value='scrapy.core.downloader.handlers.ftp.FTPDownloadHandler' priority=0>, 'http': &lt;SettingsAttribute value='scrapy.core.downloader.handlers.http.HTTPDownloadHandler' priority=0>, 'https': &lt;SettingsAttribute value='scrapy.core.downloader.handlers.http.HTTPDownloadHandler' priority=0>, 'file': &lt;SettingsAttribute value='scrapy.core.downloader.handlers.file.FileDownloadHandler' priority=0>}> priority=0>, 'RETRY_PRIORITY_ADJUST': &lt;SettingsAttribute value=-1 priority=0>, 'MAIL_FROM': &lt;SettingsAttribute value='scrapy@localhost' priority=0>, 'HTTPCACHE_EXPIRATION_SECS': &lt;SettingsAttribute value=86400 priority=20>, 'SPIDER_LOADER_CLASS': &lt;SettingsAttribute value='scrapy.spiderloader.SpiderLoader' priority=0>, 'COMPRESSION_ENABLED': &lt;SettingsAttribute value=True priority=0>, 'DOWNLOAD_TIMEOUT': &lt;SettingsAttribute value=180 priority=0>, 'MAIL_PASS': &lt;SettingsAttribute value=None priority=0>, 'MEMUSAGE_LIMIT_MB': &lt;SettingsAttribute value=0 priority=0>, 'EXTENSIONS': &lt;SettingsAttribute value=&lt;BaseSettings {}> priority=0>, 'DEPTH_PRIORITY': &lt;SettingsAttribute value=0 priority=0>, 'TELNETCONSOLE_HOST': &lt;SettingsAttribute value='127.0.0.1' priority=0>, 'MEMDEBUG_NOTIFY': &lt;SettingsAttribute value=[] priority=0>, 'HTTPPROXY_AUTH_ENCODING': &lt;SettingsAttribute value='latin-1' priority=0>, 'DOWNLOAD_WARNSIZE': &lt;SettingsAttribute value=33554432 priority=0>, 'SPIDER_MODULES': &lt;SettingsAttribute value=['acrawler.spiders'] priority=20>, 'RETRY_TIMES': &lt;SettingsAttribute value=2 priority=0>, 'TELNETCONSOLE_PORT': &lt;SettingsAttribute value=[6023, 6073] priority=0>, 'TELNETCONSOLE_ENABLED': &lt;SettingsAttribute value=False priority=20>, 'DOWNLOADER_MIDDLEWARES': &lt;SettingsAttribute value=&lt;BaseSettings {}> priority=0>, 'HTTPCACHE_DBM_MODULE': &lt;SettingsAttribute value='anydbm' priority=0>, 'ROBOTSTXT_OBEY': &lt;SettingsAttribute value=True priority=20>, 'DEPTH_LIMIT': &lt;SettingsAttribute value=0 priority=0>, 'REACTOR_THREADPOOL_MAXSIZE': &lt;SettingsAttribute value=10 priority=0>, 'FEED_EXPORT_FIELDS': &lt;SettingsAttribute value=None priority=0>, 'CLOSESPIDER_PAGECOUNT': &lt;SettingsAttribute value=0 priority=0>, 'LOG_SHORT_NAMES': &lt;SettingsAttribute value=False priority=0>, 'AUTOTHROTTLE_MAX_DELAY': &lt;SettingsAttribute value=60 priority=20>, 'URLLENGTH_LIMIT': &lt;SettingsAttribute value=2083 priority=0>, 'FEED_EXPORTERS': &lt;SettingsAttribute value=&lt;BaseSettings {}> priority=0>, 'LOG_ENCODING': &lt;SettingsAttribute value='utf-8' priority=0>, 'FEED_EXPORTERS_BASE': &lt;SettingsAttribute value=&lt;BaseSettings {'xml': &lt;SettingsAttribute value='scrapy.exporters.XmlItemExporter' priority=0>, 'jsonlines': &lt;SettingsAttribute value='scrapy.exporters.JsonLinesItemExporter' priority=0>, 'jl': &lt;SettingsAttribute value='scrapy.exporters.JsonLinesItemExporter' priority=0>, 'json': &lt;SettingsAttribute value='scrapy.exporters.JsonItemExporter' priority=0>, 'csv': &lt;SettingsAttribute value='scrapy.exporters.CsvItemExporter' priority=0>, 'pickle': &lt;SettingsAttribute value='scrapy.exporters.PickleItemExporter' priority=0>, 'marshal': &lt;SettingsAttribute value='scrapy.exporters.MarshalItemExporter' priority=0>}> priority=0>, 'FEED_FORMAT': &lt;SettingsAttribute value='jsonlines' priority=0>, 'DOWNLOAD_DELAY': &lt;SettingsAttribute value=0 priority=0>, 'HTTPCACHE_GZIP': &lt;SettingsAttribute value=False priority=0>, 'DOWNLOADER_MIDDLEWARES_BASE': &lt;SettingsAttribute value=&lt;BaseSettings {'scrapy.downloadermiddlewares.useragent.UserAgentMiddleware': &lt;SettingsAttribute value=400 priority=0>, 'scrapy.downloadermiddlewares.defaultheaders.DefaultHeadersMiddleware': &lt;SettingsAttribute value=550 priority=0>, 'scrapy.downloadermiddlewares.httpcompression.HttpCompressionMiddleware': &lt;SettingsAttribute value=590 priority=0>, 'scrapy.downloadermiddlewares.chunked.ChunkedTransferMiddleware': &lt;SettingsAttribute value=830 priority=0>, 'scrapy.downloadermiddlewares.robotstxt.RobotsTxtMiddleware': &lt;SettingsAttribute value=100 priority=0>, 'scrapy.downloadermiddlewares.downloadtimeout.DownloadTimeoutMiddleware': &lt;SettingsAttribute value=350 priority=0>, 'scrapy.downloadermiddlewares.redirect.RedirectMiddleware': &lt;SettingsAttribute value=600 priority=0>, 'scrapy.downloadermiddlewares.ajaxcrawl.AjaxCrawlMiddleware': &lt;SettingsAttribute value=560 priority=0>, 'scrapy.downloadermiddlewares.httpproxy.HttpProxyMiddleware': &lt;SettingsAttribute value=750 priority=0>, 'scrapy.downloadermiddlewares.httpcache.HttpCacheMiddleware': &lt;SettingsAttribute value=900 priority=0>, 'scrapy.downloadermiddlewares.httpauth.HttpAuthMiddleware': &lt;SettingsAttribute value=300 priority=0>, 'scrapy.downloadermiddlewares.retry.RetryMiddleware': &lt;SettingsAttribute value=500 priority=0>, 'scrapy.downloadermiddlewares.stats.DownloaderStats': &lt;SettingsAttribute value=850 priority=0>, 'scrapy.downloadermiddlewares.cookies.CookiesMiddleware': &lt;SettingsAttribute value=700 priority=0>, 'scrapy.downloadermiddlewares.redirect.MetaRefreshMiddleware': &lt;SettingsAttribute value=580 priority=0>}> priority=0>, 'DNSCACHE_ENABLED': &lt;SettingsAttribute value=True priority=0>, 'CONCURRENT_REQUESTS_PER_IP': &lt;SettingsAttribute value=0 priority=0>, 'EDITOR': &lt;SettingsAttribute value='nano' priority=0>, 'MAIL_HOST': &lt;SettingsAttribute value='localhost' priority=0>, 'CONCURRENT_REQUESTS': &lt;SettingsAttribute value=100 priority=20>, 'AUTOTHROTTLE_START_DELAY': &lt;SettingsAttribute value=1 priority=20>, 'CLOSESPIDER_ERRORCOUNT': &lt;SettingsAttribute value=0 priority=0>, 'STATS_CLASS': &lt;SettingsAttribute value='scrapy.statscollectors.MemoryStatsCollector' priority=0>, 'FEED_STORAGES_BASE': &lt;SettingsAttribute value=&lt;BaseSettings {'': &lt;SettingsAttribute value='scrapy.extensions.feedexport.FileFeedStorage' priority=0>, 's3': &lt;SettingsAttribute value='scrapy.extensions.feedexport.S3FeedStorage' priority=0>, 'ftp': &lt;SettingsAttribute value='scrapy.extensions.feedexport.FTPFeedStorage' priority=0>, 'file': &lt;SettingsAttribute value='scrapy.extensions.feedexport.FileFeedStorage' priority=0>, 'stdout': &lt;SettingsAttribute value='scrapy.extensions.feedexport.StdoutFeedStorage' priority=0>}> priority=0>, 'REDIRECT_ENABLED': &lt;SettingsAttribute value=True priority=0>, 'AUTOTHROTTLE_ENABLED': &lt;SettingsAttribute value=True priority=20>, 'COMMANDS_MODULE': &lt;SettingsAttribute value='' priority=0>, 'AUTOTHROTTLE_DEBUG': &lt;SettingsAttribute value=False priority=0>, 'NEWSPIDER_MODULE': &lt;SettingsAttribute value='acrawler.spiders' priority=20>, 'LOG_UNSERIALIZABLE_REQUESTS': &lt;SettingsAttribute value=False priority=0>, 'DOWNLOAD_MAXSIZE': &lt;SettingsAttribute value=1073741824 priority=0>, 'MAIL_PORT': &lt;SettingsAttribute value=25 priority=0>, 'REFERER_ENABLED': &lt;SettingsAttribute value=True priority=0>, 'HTTPCACHE_POLICY': &lt;SettingsAttribute value='scrapy.extensions.httpcache.DummyPolicy' priority=0>, 'STATS_DUMP': &lt;SettingsAttribute value=True priority=0>, 'MEMUSAGE_NOTIFY_MAIL': &lt;SettingsAttribute value=[] priority=0>, 'DOWNLOAD_HANDLERS': &lt;SettingsAttribute value=&lt;BaseSettings {}> priority=0>, 'LOG_DATEFORMAT': &lt;SettingsAttribute value='%Y-%m-%d %H:%M:%S' priority=0>, 'LOG_LEVEL': &lt;SettingsAttribute value='DEBUG' priority=0>, 'DOWNLOADER_HTTPCLIENTFACTORY': &lt;SettingsAttribute value='scrapy.core.downloader.webclient.ScrapyHTTPClientFactory' priority=0>, 'REDIRECT_MAX_TIMES': &lt;SettingsAttribute value=20 priority=0>, 'REDIRECT_PRIORITY_ADJUST': &lt;SettingsAttribute value=2 priority=0>, 'DUPEFILTER_CLASS': &lt;SettingsAttribute value='scrapy.dupefilters.BaseDupeFilter' priority=10>, 'RETRY_ENABLED': &lt;SettingsAttribute value=True priority=0>, 'SPIDER_CONTRACTS': &lt;SettingsAttribute value=&lt;BaseSettings {}> priority=0>, 'HTTPCACHE_ENABLED': &lt;SettingsAttribute value=True priority=20>, 'LOG_ENABLED': &lt;SettingsAttribute value=True priority=0>, 'MAIL_USER': &lt;SettingsAttribute value=None priority=0>, 'HTTPCACHE_ALWAYS_STORE': &lt;SettingsAttribute value=False priority=0>, 'LOGSTATS_INTERVAL': &lt;SettingsAttribute value=0 priority=10>, 'DEFAULT_ITEM_CLASS': &lt;SettingsAttribute value='scrapy.item.Item' priority=0>, 'DNS_TIMEOUT': &lt;SettingsAttribute value=60 priority=0>, 'DEPTH_STATS': &lt;SettingsAttribute value=True priority=0>, 'DOWNLOADER_CLIENTCONTEXTFACTORY': &lt;SettingsAttribute value='scrapy.core.downloader.contextfactory.ScrapyClientContextFactory' priority=0>, 'MEMUSAGE_CHECK_INTERVAL_SECONDS': &lt;SettingsAttribute value=60.0 priority=0>, 'EXTENSIONS_BASE': &lt;SettingsAttribute value=&lt;BaseSettings {'scrapy.extensions.corestats.CoreStats': &lt;SettingsAttribute value=0 priority=0>, 'scrapy.extensions.feedexport.FeedExporter': &lt;SettingsAttribute value=0 priority=0>, 'scrapy.extensions.memdebug.MemoryDebugger': &lt;SettingsAttribute value=0 priority=0>, 'scrapy.extensions.memusage.MemoryUsage': &lt;SettingsAttribute value=0 priority=0>, 'scrapy.extensions.logstats.LogStats': &lt;SettingsAttribute value=0 priority=0>, 'scrapy.extensions.telnet.TelnetConsole': &lt;SettingsAttribute value=0 priority=0>, 'scrapy.extensions.closespider.CloseSpider': &lt;SettingsAttribute value=0 priority=0>, 'scrapy.extensions.spiderstate.SpiderState': &lt;SettingsAttribute value=0 priority=0>, 'scrapy.extensions.throttle.AutoThrottle': &lt;SettingsAttribute value=0 priority=0>}> priority=0>, 'FEED_STORAGES': &lt;SettingsAttribute value=&lt;BaseSettings {}> priority=0>, 'BOT_NAME': &lt;SettingsAttribute value='acrawler' priority=20>, 'SPIDER_CONTRACTS_BASE': &lt;SettingsAttribute value=&lt;BaseSettings {'scrapy.contracts.default.ScrapesContract': &lt;SettingsAttribute value=3 priority=0>, 'scrapy.contracts.default.UrlContract': &lt;SettingsAttribute value=1 priority=0>, 'scrapy.contracts.default.ReturnsContract': &lt;SettingsAttribute value=2 priority=0>}> priority=0>, 'METAREFRESH_MAXDELAY': &lt;SettingsAttribute value=100 priority=0>, 'CONCURRENT_REQUESTS_PER_DOMAIN': &lt;SettingsAttribute value=8 priority=0>, 'HTTPCACHE_IGNORE_HTTP_CODES': &lt;SettingsAttribute value=[] priority=0>, 'KEEP_ALIVE': &lt;SettingsAttribute value=True priority=10>, 'ITEM_PROCESSOR': &lt;SettingsAttribute value='scrapy.pipelines.ItemPipelineManager' priority=0>, 'MEMUSAGE_WARNING_MB': &lt;SettingsAttribute value=0 priority=0>, 'FEED_STORE_EMPTY': &lt;SettingsAttribute value=False priority=0>, 'COOKIES_DEBUG': &lt;SettingsAttribute value=False priority=0>, 'FEED_URI': &lt;SettingsAttribute value=None priority=0>, 'SPIDER_MIDDLEWARES': &lt;SettingsAttribute value=&lt;BaseSettings {}> priority=0>, 'DOWNLOADER': &lt;SettingsAttribute value='scrapy.core.downloader.Downloader' priority=0>, 'AUTOTHROTTLE_TARGET_CONCURRENCY': &lt;SettingsAttribute value=1.0 priority=20>, 'USER_AGENT': &lt;SettingsAttribute value='acrawler' priority=20>, 'AJAXCRAWL_ENABLED': &lt;SettingsAttribute value=False priority=0>, 'COOKIES_ENABLED': &lt;SettingsAttribute value=False priority=20>, 'DNSCACHE_SIZE': &lt;SettingsAttribute value=10000 priority=0>, 'LOG_FORMAT': &lt;SettingsAttribute value='%(asctime)s [%(name)s] %(levelname)s: %(message)s' priority=0>, 'ITEM_PIPELINES': &lt;SettingsAttribute value=&lt;BaseSettings {}> priority=0>, 'LOG_FORMATTER': &lt;SettingsAttribute value='scrapy.logformatter.LogFormatter' priority=0>, 'HTTPCACHE_IGNORE_RESPONSE_CACHE_CONTROLS': &lt;SettingsAttribute value=[] priority=0>, 'METAREFRESH_ENABLED': &lt;SettingsAttribute value=True priority=0>, 'HTTPCACHE_IGNORE_MISSING': &lt;SettingsAttribute value=False priority=0>, 'HTTPCACHE_IGNORE_SCHEMES': &lt;SettingsAttribute value=['file'] priority=0>, 'SCHEDULER_MEMORY_QUEUE': &lt;SettingsAttribute value='scrapy.squeues.LifoMemoryQueue' priority=0>, 'SCHEDULER_DISK_QUEUE': &lt;SettingsAttribute value='scrapy.squeues.PickleLifoDiskQueue' priority=0>, 'RANDOMIZE_DOWNLOAD_DELAY': &lt;SettingsAttribute value=True priority=0>, 'SETTINGS_MODULE': &lt;SettingsAttribute value='acrawler.settings' priority=20>, 'TEMPLATES_DIR': &lt;SettingsAttribute value='/Users/kmike/svn/scrapy/scrapy/templates' priority=0>, 'LOG_STDOUT': &lt;SettingsAttribute value=False priority=0>, 'CONCURRENT_ITEMS': &lt;SettingsAttribute value=100 priority=0>, 'DOWNLOADER_STATS': &lt;SettingsAttribute value=True priority=0>, 'LOG_FILE': &lt;SettingsAttribute value=None priority=0>, 'HTTPCACHE_STORAGE': &lt;SettingsAttribute value='scrapy.extensions.httpcache.FilesystemCacheStorage' priority=20>, 'MEMDEBUG_ENABLED': &lt;SettingsAttribute value=False priority=0>, 'FEED_URI_PARAMS': &lt;SettingsAttribute value=None priority=0>, 'DEFAULT_REQUEST_HEADERS': &lt;SettingsAttribute value=&lt;BaseSettings {'Accept-Language': &lt;SettingsAttribute value='en' priority=0>, 'Accept': &lt;SettingsAttribute value='text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8' priority=0>}> priority=0>, 'CLOSESPIDER_TIMEOUT': &lt;SettingsAttribute value=0 priority=0>, 'SCHEDULER': &lt;SettingsAttribute value='scrapy.core.scheduler.Scheduler' priority=0>, 'SPIDER_MIDDLEWARES_BASE': &lt;SettingsAttribute value=&lt;BaseSettings {'scrapy.spidermiddlewares.referer.RefererMiddleware': &lt;SettingsAttribute value=700 priority=0>, 'scrapy.spidermiddlewares.httperror.HttpErrorMiddleware': &lt;SettingsAttribute value=50 priority=0>, 'scrapy.spidermiddlewares.depth.DepthMiddleware': &lt;SettingsAttribute value=900 priority=0>, 'scrapy.spidermiddlewares.urllength.UrlLengthMiddleware': &lt;SettingsAttribute value=800 priority=0>, 'scrapy.spidermiddlewares.offsite.OffsiteMiddleware': &lt;SettingsAttribute value=500 priority=0>}> priority=0>, 'ITEM_PIPELINES_BASE': &lt;SettingsAttribute value=&lt;BaseSettings {}> priority=0>, 'STATSMAILER_RCPTS': &lt;SettingsAttribute value=[] priority=0>, 'MEMUSAGE_ENABLED': &lt;SettingsAttribute value=False priority=0>} [s] spider &lt;DefaultSpider 'default' at 0x10ebdcf90> [s] Useful shortcuts: [s] shelp() Shell help (print this help) [s] fetch(req_or_url) Fetch request (or URL) and update local objects [s] view(response) View response in a browser
scrapy/scrapy
diff --git a/tests/test_cmdline/__init__.py b/tests/test_cmdline/__init__.py index c2de4fbc8..7733e7180 100644 --- a/tests/test_cmdline/__init__.py +++ b/tests/test_cmdline/__init__.py @@ -68,4 +68,4 @@ class CmdlineTest(unittest.TestCase): settingsstr = settingsstr.replace(char, '"') settingsdict = json.loads(settingsstr) six.assertCountEqual(self, settingsdict.keys(), EXTENSIONS.keys()) - self.assertIn('value=200', settingsdict[EXT_PATH]) + self.assertEquals(200, settingsdict[EXT_PATH]) diff --git a/tests/test_settings/__init__.py b/tests/test_settings/__init__.py index 44b9b6df3..4acf22cba 100644 --- a/tests/test_settings/__init__.py +++ b/tests/test_settings/__init__.py @@ -302,6 +302,21 @@ class BaseSettingsTest(unittest.TestCase): self.assertListEqual(copy.get('TEST_LIST_OF_LISTS')[0], ['first_one', 'first_two']) + def test_copy_to_dict(self): + s = BaseSettings({'TEST_STRING': 'a string', + 'TEST_LIST': [1, 2], + 'TEST_BOOLEAN': False, + 'TEST_BASE': BaseSettings({1: 1, 2: 2}, 'project'), + 'TEST': BaseSettings({1: 10, 3: 30}, 'default'), + 'HASNOBASE': BaseSettings({3: 3000}, 'default')}) + self.assertDictEqual(s.copy_to_dict(), + {'HASNOBASE': {3: 3000}, + 'TEST': {1: 10, 3: 30}, + 'TEST_BASE': {1: 1, 2: 2}, + 'TEST_BOOLEAN': False, + 'TEST_LIST': [1, 2], + 'TEST_STRING': 'a string'}) + def test_freeze(self): self.settings.freeze() with self.assertRaises(TypeError) as cm: @@ -343,14 +358,6 @@ class BaseSettingsTest(unittest.TestCase): self.assertEqual(self.settings.defaults.get('BAR'), 'foo') self.assertIn('BAR', self.settings.defaults) - def test_repr(self): - settings = BaseSettings() - self.assertEqual(repr(settings), "<BaseSettings {}>") - attr = SettingsAttribute('testval', 15) - settings['testkey'] = attr - self.assertEqual(repr(settings), - "<BaseSettings {'testkey': %s}>" % repr(attr)) - class SettingsTest(unittest.TestCase):
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_hyperlinks", "has_many_modified_files", "has_many_hunks", "has_pytest_match_arg" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 1, "test_score": 3 }, "num_modified_files": 2 }
1.0
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "pytest-cov", "pytest-xdist", "pytest-mock", "pytest-asyncio" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.9", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==25.3.0 Automat==24.8.1 cffi==1.17.1 constantly==23.10.4 coverage==7.8.0 cryptography==44.0.2 cssselect==1.3.0 exceptiongroup==1.2.2 execnet==2.1.1 hyperlink==21.0.0 idna==3.10 incremental==24.7.2 iniconfig==2.1.0 jmespath==1.0.1 lxml==5.3.1 packaging==24.2 parsel==1.10.0 pluggy==1.5.0 pyasn1==0.6.1 pyasn1_modules==0.4.2 pycparser==2.22 PyDispatcher==2.0.7 pyOpenSSL==25.0.0 pytest==8.3.5 pytest-asyncio==0.26.0 pytest-cov==6.0.0 pytest-mock==3.14.0 pytest-xdist==3.6.1 queuelib==1.7.0 -e git+https://github.com/scrapy/scrapy.git@a35aec71e96b0c0288c370afa425e8e700dca8b3#egg=Scrapy service-identity==24.2.0 six==1.17.0 tomli==2.2.1 Twisted==24.11.0 typing_extensions==4.13.0 w3lib==2.3.1 zope.interface==7.2
name: scrapy channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==25.3.0 - automat==24.8.1 - cffi==1.17.1 - constantly==23.10.4 - coverage==7.8.0 - cryptography==44.0.2 - cssselect==1.3.0 - exceptiongroup==1.2.2 - execnet==2.1.1 - hyperlink==21.0.0 - idna==3.10 - incremental==24.7.2 - iniconfig==2.1.0 - jmespath==1.0.1 - lxml==5.3.1 - packaging==24.2 - parsel==1.10.0 - pluggy==1.5.0 - pyasn1==0.6.1 - pyasn1-modules==0.4.2 - pycparser==2.22 - pydispatcher==2.0.7 - pyopenssl==25.0.0 - pytest==8.3.5 - pytest-asyncio==0.26.0 - pytest-cov==6.0.0 - pytest-mock==3.14.0 - pytest-xdist==3.6.1 - queuelib==1.7.0 - service-identity==24.2.0 - six==1.17.0 - tomli==2.2.1 - twisted==24.11.0 - typing-extensions==4.13.0 - w3lib==2.3.1 - zope-interface==7.2 prefix: /opt/conda/envs/scrapy
[ "tests/test_settings/__init__.py::BaseSettingsTest::test_copy_to_dict" ]
[ "tests/test_cmdline/__init__.py::CmdlineTest::test_default_settings", "tests/test_cmdline/__init__.py::CmdlineTest::test_override_dict_settings", "tests/test_cmdline/__init__.py::CmdlineTest::test_override_settings_using_envvar", "tests/test_cmdline/__init__.py::CmdlineTest::test_override_settings_using_set_arg", "tests/test_cmdline/__init__.py::CmdlineTest::test_profiling" ]
[ "tests/test_settings/__init__.py::SettingsGlobalFuncsTest::test_get_settings_priority", "tests/test_settings/__init__.py::SettingsAttributeTest::test_overwrite_basesettings", "tests/test_settings/__init__.py::SettingsAttributeTest::test_repr", "tests/test_settings/__init__.py::SettingsAttributeTest::test_set_equal_priority", "tests/test_settings/__init__.py::SettingsAttributeTest::test_set_greater_priority", "tests/test_settings/__init__.py::SettingsAttributeTest::test_set_less_priority", "tests/test_settings/__init__.py::BaseSettingsTest::test_copy", "tests/test_settings/__init__.py::BaseSettingsTest::test_delete", "tests/test_settings/__init__.py::BaseSettingsTest::test_deprecated_attribute_defaults", "tests/test_settings/__init__.py::BaseSettingsTest::test_deprecated_attribute_overrides", "tests/test_settings/__init__.py::BaseSettingsTest::test_freeze", "tests/test_settings/__init__.py::BaseSettingsTest::test_frozencopy", "tests/test_settings/__init__.py::BaseSettingsTest::test_get", "tests/test_settings/__init__.py::BaseSettingsTest::test_getpriority", "tests/test_settings/__init__.py::BaseSettingsTest::test_getwithbase", "tests/test_settings/__init__.py::BaseSettingsTest::test_maxpriority", "tests/test_settings/__init__.py::BaseSettingsTest::test_set_calls_settings_attributes_methods_on_update", "tests/test_settings/__init__.py::BaseSettingsTest::test_set_instance_identity_on_update", "tests/test_settings/__init__.py::BaseSettingsTest::test_set_new_attribute", "tests/test_settings/__init__.py::BaseSettingsTest::test_set_settingsattribute", "tests/test_settings/__init__.py::BaseSettingsTest::test_setdict_alias", "tests/test_settings/__init__.py::BaseSettingsTest::test_setitem", "tests/test_settings/__init__.py::BaseSettingsTest::test_setmodule_alias", "tests/test_settings/__init__.py::BaseSettingsTest::test_setmodule_by_path", "tests/test_settings/__init__.py::BaseSettingsTest::test_setmodule_only_load_uppercase_vars", "tests/test_settings/__init__.py::BaseSettingsTest::test_update", "tests/test_settings/__init__.py::BaseSettingsTest::test_update_jsonstring", "tests/test_settings/__init__.py::SettingsTest::test_autopromote_dicts", "tests/test_settings/__init__.py::SettingsTest::test_getdict_autodegrade_basesettings", "tests/test_settings/__init__.py::SettingsTest::test_initial_defaults", "tests/test_settings/__init__.py::SettingsTest::test_initial_values", "tests/test_settings/__init__.py::CrawlerSettingsTest::test_deprecated_crawlersettings" ]
[]
BSD 3-Clause "New" or "Revised" License
402
networkx__networkx-1963
ec6dfae2aaebbbbf0a4620002ab795efa6430c25
2016-01-29 18:00:19
ec6dfae2aaebbbbf0a4620002ab795efa6430c25
diff --git a/networkx/algorithms/core.py b/networkx/algorithms/core.py index 2091bb97f..c98c7d77c 100644 --- a/networkx/algorithms/core.py +++ b/networkx/algorithms/core.py @@ -4,30 +4,41 @@ # Pieter Swart <[email protected]> # All rights reserved. # BSD license. +# +# Authors: Dan Schult ([email protected]) +# Jason Grout ([email protected]) +# Aric Hagberg ([email protected]) """ Find the k-cores of a graph. The k-core is found by recursively pruning nodes with degrees less than k. -See the following reference for details: +See the following references for details: An O(m) Algorithm for Cores Decomposition of Networks Vladimir Batagelj and Matjaz Zaversnik, 2003. http://arxiv.org/abs/cs.DS/0310049 -""" - -__author__ = "\n".join(['Dan Schult ([email protected])', - 'Jason Grout ([email protected])', - 'Aric Hagberg ([email protected])']) +Generalized Cores +Vladimir Batagelj and Matjaz Zaversnik, 2002. +http://arxiv.org/pdf/cs/0202039 -__all__ = ['core_number','k_core','k_shell','k_crust','k_corona','find_cores'] +For directed graphs a more general notion is that of D-cores which +looks at (k, l) restrictions on (in, out) degree. The (k, k) D-core +is the k-core. +D-cores: Measuring Collaboration of Directed Graphs Based on Degeneracy +Christos Giatsidis, Dimitrios M. Thilikos, Michalis Vazirgiannis, ICDM 2011. +http://www.graphdegeneracy.org/dcores_ICDM_2011.pdf +""" import networkx as nx -from networkx import all_neighbors from networkx.exception import NetworkXError from networkx.utils import not_implemented_for +__all__ = ['core_number', 'find_cores', 'k_core', + 'k_shell', 'k_crust', 'k_corona'] + + @not_implemented_for('multigraph') def core_number(G): """Return the core number for each vertex. @@ -50,7 +61,8 @@ def core_number(G): Raises ------ NetworkXError - The k-core is not defined for graphs with self loops or parallel edges. + The k-core is not implemented for graphs with self loops + or parallel edges. Notes ----- @@ -66,9 +78,9 @@ def core_number(G): http://arxiv.org/abs/cs.DS/0310049 """ if G.number_of_selfloops() > 0: - raise NetworkXError( - 'Input graph has self loops; the core number is not defined.' - ' Consider using G.remove_edges_from(G.selfloop_edges()).') + msg = ('Input graph has self loops which is not permitted; ' + 'Consider using G.remove_edges_from(G.selfloop_edges()).') + raise NetworkXError(msg) degrees = dict(G.degree()) # Sort nodes by degree. nodes = sorted(degrees, key=degrees.get) @@ -81,7 +93,7 @@ def core_number(G): node_pos = {v: pos for pos, v in enumerate(nodes)} # The initial guess for the core number of a node is its degree. core = degrees - nbrs = {v: set(all_neighbors(G, v)) for v in G} + nbrs = {v: list(nx.all_neighbors(G, v)) for v in G} for v in nodes: for u in nbrs[v]: if core[u] > core[v]: @@ -99,34 +111,34 @@ def core_number(G): find_cores = core_number -def _core_helper(G, func, k=None, core=None): - """Returns the subgraph induced by all nodes for which ``func`` - returns ``True``. - - ``G`` is a NetworkX graph. - - ``func`` is a function that takes three inputs: a node of ``G``, the - maximum core value, and the core number of the graph. The function - must return a Boolean value. +def _core_subgraph(G, k_filter, k=None, core=None): + """Returns the subgraph induced by nodes passing filter ``k_filter``. - ``k`` is the order of the core. If not specified, the maximum over - all core values will be returned. - - ``core`` is a dictionary mapping node to core numbers for that - node. If you have already computed it, you should provide it - here. If not specified, the core numbers will be computed from the - graph. + Parameters + ---------- + G : NetworkX graph + The graph or directed graph to process + k_filter : filter function + This function filters the nodes chosen. It takes three inputs: + A node of G, the filter's cutoff, and the core dict of the graph. + The function should return a Boolean value. + k : int, optional + The order of the core. If not specified use the max core number. + This value is used as the cutoff for the filter. + core : dict, optional + Precomputed core numbers keyed by node for the graph ``G``. + If not specified, the core numbers will be computed from ``G``. """ if core is None: core = core_number(G) if k is None: k = max(core.values()) - nodes = [v for v in core if func(v, k, core)] + nodes = (v for v in core if k_filter(v, k, core)) return G.subgraph(nodes).copy() -def k_core(G,k=None,core_number=None): +def k_core(G, k=None, core_number=None): """Return the k-core of G. A k-core is a maximal subgraph that contains nodes of degree k or more. @@ -171,21 +183,23 @@ def k_core(G,k=None,core_number=None): Vladimir Batagelj and Matjaz Zaversnik, 2003. http://arxiv.org/abs/cs.DS/0310049 """ - func = lambda v, k, core_number: core_number[v] >= k - return _core_helper(G, func, k, core_number) + def k_filter(v, k, c): + return c[v] >= k + return _core_subgraph(G, k_filter, k, core_number) -def k_shell(G,k=None,core_number=None): +def k_shell(G, k=None, core_number=None): """Return the k-shell of G. - The k-shell is the subgraph of nodes in the k-core but not in the (k+1)-core. + The k-shell is the subgraph induced by nodes with core number k. + That is, nodes in the k-core that are not in the (k+1)-core. Parameters ---------- G : NetworkX graph A graph or directed graph. k : int, optional - The order of the shell. If not specified return the main shell. + The order of the shell. If not specified return the outer shell. core_number : dictionary, optional Precomputed core numbers for the graph G. @@ -198,7 +212,8 @@ def k_shell(G,k=None,core_number=None): Raises ------ NetworkXError - The k-shell is not defined for graphs with self loops or parallel edges. + The k-shell is not implemented for graphs with self loops + or parallel edges. Notes ----- @@ -225,11 +240,12 @@ def k_shell(G,k=None,core_number=None): and Eran Shir, PNAS July 3, 2007 vol. 104 no. 27 11150-11154 http://www.pnas.org/content/104/27/11150.full """ - func = lambda v, k, core_number: core_number[v] == k - return _core_helper(G, func, k, core_number) + def k_filter(v, k, c): + return c[v] == k + return _core_subgraph(G, k_filter, k, core_number) -def k_crust(G,k=None,core_number=None): +def k_crust(G, k=None, core_number=None): """Return the k-crust of G. The k-crust is the graph G with the k-core removed. @@ -251,7 +267,8 @@ def k_crust(G,k=None,core_number=None): Raises ------ NetworkXError - The k-crust is not defined for graphs with self loops or parallel edges. + The k-crust is not implemented for graphs with self loops + or parallel edges. Notes ----- @@ -276,16 +293,14 @@ def k_crust(G,k=None,core_number=None): and Eran Shir, PNAS July 3, 2007 vol. 104 no. 27 11150-11154 http://www.pnas.org/content/104/27/11150.full """ - func = lambda v, k, core_number: core_number[v] <= k - # HACK These two checks are done in _core_helper, but this function - # requires k to be one less than the maximum core value instead of - # just the maximum. Therefore we duplicate the checks here. A better - # solution should exist... + # Default for k is one less than in _core_subgraph, so just inline. + # Filter is c[v] <= k if core_number is None: - core_number = nx.core_number(G) + core_number = find_cores(G) if k is None: k = max(core_number.values()) - 1 - return _core_helper(G, func, k, core_number) + nodes = (v for v in core_number if core_number[v] <= k) + return G.subgraph(nodes).copy() def k_corona(G, k, core_number=None): @@ -335,5 +350,6 @@ def k_corona(G, k, core_number=None): Phys. Rev. E 73, 056101 (2006) http://link.aps.org/doi/10.1103/PhysRevE.73.056101 """ - func = lambda v, k, c: c[v] == k and sum(1 for w in G[v] if c[w] >= k) == k - return _core_helper(G, func, k, core_number) + def func(v, k, c): + return c[v] == k and k == sum(1 for w in G[v] if c[w] >= k) + return _core_subgraph(G, func, k, core_number)
k-core algorithm produces incorrect output for DiGraph As per title, calling `nx.k_core(G, k = x)` does not return the x-core of a graph, if `G` is a `DiGraph`. See attached file. [6954_2011.txt](https://github.com/networkx/networkx/files/105086/6954_2011.txt) To reproduce, run: ```python import networkx as nx G = nx.DiGraph() with open("6954_2011.txt", 'r') as f: for line in f: fields = line.strip().split('\t') G.add_edge(fields[0], fields[1]) core = nx.k_core(G, k = 12) core.number_of_nodes() # Outputs "24"; expected output: "12" ``` There are only 12 nodes with (in+out) degree 12 once you remove recursively all those which don't qualify. These are: IND AUT CHE BEL USA ESP CHN FRA NLD GBR ITA DEU While ```python core.nodes() ``` says `['BEL', 'SWE', 'DEU', 'GBR', 'KOR', 'USA', 'SGP', 'MYS', 'POL', 'NLD', 'HKG', 'FRA', 'CHE', 'ESP', 'CHN', 'AUT', 'THA', 'JPN', 'TUR', 'ITA', 'IND', 'RUS', 'NOR', 'CZE']` The method seems to work ok for `nx.Graph`, or at least I've yet to find a counter-example.
networkx/networkx
diff --git a/networkx/algorithms/tests/test_core.py b/networkx/algorithms/tests/test_core.py index 48399aeed..7119159c8 100644 --- a/networkx/algorithms/tests/test_core.py +++ b/networkx/algorithms/tests/test_core.py @@ -2,8 +2,8 @@ from nose.tools import * import networkx as nx -class TestCore: +class TestCore: def setUp(self): # G is the example graph in Figure 1 from Batagelj and # Zaversnik's paper titled An O(m) Algorithm for Cores @@ -12,103 +12,114 @@ class TestCore: # shown, the 3-core is given by nodes 1-8, the 2-core by nodes # 9-16, the 1-core by nodes 17-20 and node 21 is in the # 0-core. - t1=nx.convert_node_labels_to_integers(nx.tetrahedral_graph(),1) - t2=nx.convert_node_labels_to_integers(t1,5) - G=nx.union(t1,t2) - G.add_edges_from( [(3,7), (2,11), (11,5), (11,12), (5,12), (12,19), - (12,18), (3,9), (7,9), (7,10), (9,10), (9,20), - (17,13), (13,14), (14,15), (15,16), (16,13)]) + t1 = nx.convert_node_labels_to_integers(nx.tetrahedral_graph(), 1) + t2 = nx.convert_node_labels_to_integers(t1, 5) + G = nx.union(t1, t2) + G.add_edges_from([(3, 7), (2, 11), (11, 5), (11, 12), (5, 12), + (12, 19), (12, 18), (3, 9), (7, 9), (7, 10), + (9, 10), (9, 20), (17, 13), (13, 14), (14, 15), + (15, 16), (16, 13)]) G.add_node(21) - self.G=G + self.G = G # Create the graph H resulting from the degree sequence - # [0,1,2,2,2,2,3] when using the Havel-Hakimi algorithm. + # [0, 1, 2, 2, 2, 2, 3] when using the Havel-Hakimi algorithm. - degseq=[0,1,2,2,2,2,3] + degseq = [0, 1, 2, 2, 2, 2, 3] H = nx.havel_hakimi_graph(degseq) - mapping = {6:0, 0:1, 4:3, 5:6, 3:4, 1:2, 2:5 } + mapping = {6: 0, 0: 1, 4: 3, 5: 6, 3: 4, 1: 2, 2: 5} self.H = nx.relabel_nodes(H, mapping) def test_trivial(self): """Empty graph""" G = nx.Graph() - assert_equal(nx.find_cores(G),{}) + assert_equal(nx.find_cores(G), {}) def test_find_cores(self): - cores=nx.find_cores(self.G) - nodes_by_core=[] - for val in [0,1,2,3]: - nodes_by_core.append( sorted([k for k in cores if cores[k]==val])) - assert_equal(nodes_by_core[0],[21]) - assert_equal(nodes_by_core[1],[17, 18, 19, 20]) - assert_equal(nodes_by_core[2],[9, 10, 11, 12, 13, 14, 15, 16]) + core = nx.find_cores(self.G) + nodes_by_core = [sorted([n for n in core if core[n] == val]) + for val in range(4)] + assert_equal(nodes_by_core[0], [21]) + assert_equal(nodes_by_core[1], [17, 18, 19, 20]) + assert_equal(nodes_by_core[2], [9, 10, 11, 12, 13, 14, 15, 16]) assert_equal(nodes_by_core[3], [1, 2, 3, 4, 5, 6, 7, 8]) def test_core_number(self): # smoke test real name - cores=nx.core_number(self.G) + cores = nx.core_number(self.G) def test_find_cores2(self): - cores=nx.find_cores(self.H) - nodes_by_core=[] - for val in [0,1,2]: - nodes_by_core.append( sorted([k for k in cores if cores[k]==val])) - assert_equal(nodes_by_core[0],[0]) - assert_equal(nodes_by_core[1],[1, 3]) - assert_equal(nodes_by_core[2],[2, 4, 5, 6]) + core = nx.find_cores(self.H) + nodes_by_core = [sorted([n for n in core if core[n] == val]) + for val in range(3)] + assert_equal(nodes_by_core[0], [0]) + assert_equal(nodes_by_core[1], [1, 3]) + assert_equal(nodes_by_core[2], [2, 4, 5, 6]) + + def test_directed_find_cores(Self): + '''core number had a bug for directed graphs found in issue #1959''' + # small example where too timid edge removal can make cn[2] = 3 + G = nx.DiGraph() + edges = [(1, 2), (2, 1), (2, 3), (2, 4), (3, 4), (4, 3)] + G.add_edges_from(edges) + assert_equal(nx.core_number(G), {1: 2, 2: 2, 3: 2, 4: 2}) + # small example where too aggressive edge removal can make cn[2] = 2 + more_edges = [(1, 5), (3, 5), (4, 5), (3, 6), (4, 6), (5, 6)] + G.add_edges_from(more_edges) + assert_equal(nx.core_number(G), {1: 3, 2: 3, 3: 3, 4: 3, 5: 3, 6: 3}) def test_main_core(self): - main_core_subgraph=nx.k_core(self.H) - assert_equal(sorted(main_core_subgraph.nodes()),[2,4,5,6]) + main_core_subgraph = nx.k_core(self.H) + assert_equal(sorted(main_core_subgraph.nodes()), [2, 4, 5, 6]) def test_k_core(self): # k=0 - k_core_subgraph=nx.k_core(self.H,k=0) - assert_equal(sorted(k_core_subgraph.nodes()),sorted(self.H.nodes())) + k_core_subgraph = nx.k_core(self.H, k=0) + assert_equal(sorted(k_core_subgraph.nodes()), sorted(self.H.nodes())) # k=1 - k_core_subgraph=nx.k_core(self.H,k=1) - assert_equal(sorted(k_core_subgraph.nodes()),[1,2,3,4,5,6]) - # k=2 - k_core_subgraph=nx.k_core(self.H,k=2) - assert_equal(sorted(k_core_subgraph.nodes()),[2,4,5,6]) + k_core_subgraph = nx.k_core(self.H, k=1) + assert_equal(sorted(k_core_subgraph.nodes()), [1, 2, 3, 4, 5, 6]) + # k = 2 + k_core_subgraph = nx.k_core(self.H, k=2) + assert_equal(sorted(k_core_subgraph.nodes()), [2, 4, 5, 6]) def test_main_crust(self): - main_crust_subgraph=nx.k_crust(self.H) - assert_equal(sorted(main_crust_subgraph.nodes()),[0,1,3]) + main_crust_subgraph = nx.k_crust(self.H) + assert_equal(sorted(main_crust_subgraph.nodes()), [0, 1, 3]) def test_k_crust(self): - # k=0 - k_crust_subgraph=nx.k_crust(self.H,k=2) - assert_equal(sorted(k_crust_subgraph.nodes()),sorted(self.H.nodes())) + # k = 0 + k_crust_subgraph = nx.k_crust(self.H, k=2) + assert_equal(sorted(k_crust_subgraph.nodes()), sorted(self.H.nodes())) # k=1 - k_crust_subgraph=nx.k_crust(self.H,k=1) - assert_equal(sorted(k_crust_subgraph.nodes()),[0,1,3]) + k_crust_subgraph = nx.k_crust(self.H, k=1) + assert_equal(sorted(k_crust_subgraph.nodes()), [0, 1, 3]) # k=2 - k_crust_subgraph=nx.k_crust(self.H,k=0) - assert_equal(sorted(k_crust_subgraph.nodes()),[0]) + k_crust_subgraph = nx.k_crust(self.H, k=0) + assert_equal(sorted(k_crust_subgraph.nodes()), [0]) def test_main_shell(self): - main_shell_subgraph=nx.k_shell(self.H) - assert_equal(sorted(main_shell_subgraph.nodes()),[2,4,5,6]) + main_shell_subgraph = nx.k_shell(self.H) + assert_equal(sorted(main_shell_subgraph.nodes()), [2, 4, 5, 6]) def test_k_shell(self): # k=0 - k_shell_subgraph=nx.k_shell(self.H,k=2) - assert_equal(sorted(k_shell_subgraph.nodes()),[2,4,5,6]) + k_shell_subgraph = nx.k_shell(self.H, k=2) + assert_equal(sorted(k_shell_subgraph.nodes()), [2, 4, 5, 6]) # k=1 - k_shell_subgraph=nx.k_shell(self.H,k=1) - assert_equal(sorted(k_shell_subgraph.nodes()),[1,3]) + k_shell_subgraph = nx.k_shell(self.H, k=1) + assert_equal(sorted(k_shell_subgraph.nodes()), [1, 3]) # k=2 - k_shell_subgraph=nx.k_shell(self.H,k=0) - assert_equal(sorted(k_shell_subgraph.nodes()),[0]) + k_shell_subgraph = nx.k_shell(self.H, k=0) + assert_equal(sorted(k_shell_subgraph.nodes()), [0]) def test_k_corona(self): # k=0 - k_corona_subgraph=nx.k_corona(self.H,k=2) - assert_equal(sorted(k_corona_subgraph.nodes()),[2,4,5,6]) + k_corona_subgraph = nx.k_corona(self.H, k=2) + assert_equal(sorted(k_corona_subgraph.nodes()), [2, 4, 5, 6]) # k=1 - k_corona_subgraph=nx.k_corona(self.H,k=1) - assert_equal(sorted(k_corona_subgraph.nodes()),[1]) + k_corona_subgraph = nx.k_corona(self.H, k=1) + assert_equal(sorted(k_corona_subgraph.nodes()), [1]) # k=2 - k_corona_subgraph=nx.k_corona(self.H,k=0) - assert_equal(sorted(k_corona_subgraph.nodes()),[0]) + k_corona_subgraph = nx.k_corona(self.H, k=0) + assert_equal(sorted(k_corona_subgraph.nodes()), [0])
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 1, "test_score": 0 }, "num_modified_files": 1 }
1.112
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "nose", "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y libgdal-dev graphviz" ], "python": "3.6", "reqs_path": [ "requirements/default.txt", "requirements/test.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==22.2.0 certifi==2021.5.30 decorator==5.1.1 importlib-metadata==4.8.3 iniconfig==1.1.1 -e git+https://github.com/networkx/networkx.git@ec6dfae2aaebbbbf0a4620002ab795efa6430c25#egg=networkx nose==1.3.7 packaging==21.3 pluggy==1.0.0 py==1.11.0 pyparsing==3.1.4 pytest==7.0.1 tomli==1.2.3 typing_extensions==4.1.1 zipp==3.6.0
name: networkx channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==22.2.0 - decorator==5.1.1 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - nose==1.3.7 - packaging==21.3 - pluggy==1.0.0 - py==1.11.0 - pyparsing==3.1.4 - pytest==7.0.1 - tomli==1.2.3 - typing-extensions==4.1.1 - zipp==3.6.0 prefix: /opt/conda/envs/networkx
[ "networkx/algorithms/tests/test_core.py::TestCore::test_directed_find_cores" ]
[ "networkx/algorithms/tests/test_core.py::TestCore::test_find_cores", "networkx/algorithms/tests/test_core.py::TestCore::test_core_number", "networkx/algorithms/tests/test_core.py::TestCore::test_find_cores2", "networkx/algorithms/tests/test_core.py::TestCore::test_main_core", "networkx/algorithms/tests/test_core.py::TestCore::test_k_core", "networkx/algorithms/tests/test_core.py::TestCore::test_main_crust", "networkx/algorithms/tests/test_core.py::TestCore::test_k_crust", "networkx/algorithms/tests/test_core.py::TestCore::test_main_shell", "networkx/algorithms/tests/test_core.py::TestCore::test_k_shell", "networkx/algorithms/tests/test_core.py::TestCore::test_k_corona" ]
[ "networkx/algorithms/tests/test_core.py::TestCore::test_trivial" ]
[]
BSD 3-Clause
403
falconry__falcon-698
4f5d704c6f2ffa168846641afb8acad1101ee394
2016-01-29 23:52:19
b78ffaac7c412d3b3d6cd3c70dd05024d79d2cce
jmvrbanac: lgtm :+1:
diff --git a/doc/api/cookies.rst b/doc/api/cookies.rst index 84d8669..69c89d8 100644 --- a/doc/api/cookies.rst +++ b/doc/api/cookies.rst @@ -84,7 +84,7 @@ You can also instruct the client to remove a cookie with the # Clear the bad cookie resp.unset_cookie('bad_cookie') -.. _cookie-secure-atribute: +.. _cookie-secure-attribute: The Secure Attribute ~~~~~~~~~~~~~~~~~~~~ diff --git a/doc/api/index.rst b/doc/api/index.rst index b0f4b20..a389b91 100644 --- a/doc/api/index.rst +++ b/doc/api/index.rst @@ -14,3 +14,4 @@ Classes and Functions hooks routing util + testing diff --git a/doc/api/util.rst b/doc/api/util.rst index 46ab3b6..fa11b32 100644 --- a/doc/api/util.rst +++ b/doc/api/util.rst @@ -9,21 +9,6 @@ URI Functions .. automodule:: falcon.util.uri :members: -Testing -------- - -.. autoclass:: falcon.testing.TestBase - :members: - -.. autoclass:: falcon.testing.TestResource - :members: - -.. autoclass:: falcon.testing.StartResponseMock - :members: - -.. automodule:: falcon.testing - :members: rand_string, create_environ - Miscellaneous ------------- diff --git a/falcon/__init__.py b/falcon/__init__.py index c0214b8..f6256b2 100644 --- a/falcon/__init__.py +++ b/falcon/__init__.py @@ -24,7 +24,7 @@ HTTP_METHODS = ( 'TRACE', ) -DEFAULT_MEDIA_TYPE = 'application/json; charset=utf-8' +DEFAULT_MEDIA_TYPE = 'application/json; charset=UTF-8' # Hoist classes and functions into the falcon namespace diff --git a/falcon/api.py b/falcon/api.py index d05ec71..d10f452 100644 --- a/falcon/api.py +++ b/falcon/api.py @@ -230,15 +230,14 @@ class API(object): if length is not None: resp._headers['content-length'] = str(length) - # Set content type if needed - use_content_type = (body or - req.method == 'HEAD' or - resp.status == status.HTTP_416) - - if use_content_type: - media_type = self._media_type - else: + # NOTE(kgriffs): Based on wsgiref.validate's interpretation of + # RFC 2616, as commented in that module's source code. The + # presence of the Content-Length header is not similarly + # enforced. + if resp.status in (status.HTTP_204, status.HTTP_304): media_type = None + else: + media_type = self._media_type headers = resp._wsgi_headers(media_type) diff --git a/falcon/api_helpers.py b/falcon/api_helpers.py index 7fe1093..bf578b8 100644 --- a/falcon/api_helpers.py +++ b/falcon/api_helpers.py @@ -123,7 +123,7 @@ def default_serialize_error(req, resp, exception): representation = exception.to_xml() resp.body = representation - resp.content_type = preferred + resp.content_type = preferred + '; charset=UTF-8' def wrap_old_error_serializer(old_fn): diff --git a/falcon/response.py b/falcon/response.py index 6ff027d..dd9948e 100644 --- a/falcon/response.py +++ b/falcon/response.py @@ -581,7 +581,7 @@ class Response(object): # it isn't needed. items = headers.items() else: - items = list(headers.items()) # pragma: no cover + items = list(headers.items()) if self._cookies is not None: # PERF(tbug): diff --git a/falcon/util/structures.py b/falcon/util/structures.py index 82995e3..8d5347c 100644 --- a/falcon/util/structures.py +++ b/falcon/util/structures.py @@ -1,13 +1,13 @@ # Copied from the Requests library by Kenneth Reitz et al. - +# # Copyright 2013 Kenneth Reitz - +# # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at - +# # http://www.apache.org/licenses/LICENSE-2.0 - +# # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/falcon/util/uri.py b/falcon/util/uri.py index 2359672..2f68ec9 100644 --- a/falcon/util/uri.py +++ b/falcon/util/uri.py @@ -129,7 +129,6 @@ Returns: """ -# NOTE(kgriffs): This is actually covered, but not in py33; hence the pragma if six.PY2: # This map construction is based on urllib @@ -193,8 +192,6 @@ if six.PY2: return decoded_uri -# NOTE(kgriffs): This is actually covered, but not in py2x; hence the pragma - else: # This map construction is based on urllib diff --git a/tox.ini b/tox.ini index fbdeaa5..c7e30c0 100644 --- a/tox.ini +++ b/tox.ini @@ -1,8 +1,12 @@ [tox] -# NOTE(kgriffs): The py26, py27, and py34 evns are required when -# checking combined coverage. After running all three envs, execute -# "tools/combine_coverage.sh" to create a combined coverage report -# that can be viewed by opening ".coverage_html/index.html". +# NOTE(kgriffs): The py26, py27, and py34 envs are required when +# checking combined coverage. To check coverage: +# +# $ tox -e py26,py27,py34 && tools/combine_coverage.sh +# +# You can then drill down into coverage details by opening the HTML +# report at ".coverage_html/index.html". + envlist = py26, py27, py34,
Missing Content-Type on 404 Response Greetings, I've started [testing my webapps with wsgiref.validate](http://blog.dscpl.com.au/2015/05/returning-string-as-iterable-from-wsgi.html), and discovered that Falcon's 404 responses (and possible others) don't conform to the WSGI spec. The `Content-Type` header is required but not present. This script demonstrates: import falcon from webtest import TestApp import wsgiref.validate # the app -- note that it has no routes so all GETs will return 404 app = falcon.API() # wrap in validator middleware app = wsgiref.validate.validator(app) # wrap in test middleware app = TestApp(app) # provoke a 404 resp = app.get('/doesntexist', status=[404]) Output is: Traceback (most recent call last): <...snip...> File "/usr/local/lib/python2.7/dist-packages/falcon/api.py", line 247, in __call__ start_response(resp.status, headers) File "/usr/lib/python2.7/wsgiref/validate.py", line 167, in start_response_wrapper check_content_type(status, headers) File "/usr/lib/python2.7/wsgiref/validate.py", line 419, in check_content_type assert_(0, "No Content-Type header found in headers (%s)" % headers) File "/usr/lib/python2.7/wsgiref/validate.py", line 129, in assert_ raise AssertionError(*args) AssertionError: No Content-Type header found in headers ([('content-length', '0')]) FYI, as a sanity check, I ran the same test using Bottle in place of Falcon, and it conforms (sends Content-type with the 404 response.) I'm assuming that we want to be compliant with the WSGI spec, in which case this is a bug. Happy to submit a PR if you point me in a recommended direction to patch. (Set Content-type in `falcon/responders.py:path_not_found` ?)
falconry/falcon
diff --git a/doc/api/testing.rst b/doc/api/testing.rst new file mode 100644 index 0000000..5f4d4b5 --- /dev/null +++ b/doc/api/testing.rst @@ -0,0 +1,28 @@ +.. _testing: + +Testing +======= + +.. autoclass:: falcon.testing.TestCase + :members: + +.. autoclass:: falcon.testing.Result + :members: + +.. autoclass:: falcon.testing.SimpleTestResource + :members: + +.. autoclass:: falcon.testing.StartResponseMock + :members: + +.. automodule:: falcon.testing + :members: capture_responder_args, rand_string, create_environ + +Deprecated +---------- + +.. autoclass:: falcon.testing.TestBase + :members: + +.. autoclass:: falcon.testing.TestResource + :members: diff --git a/falcon/testing/__init__.py b/falcon/testing/__init__.py index 8990a87..966abe0 100644 --- a/falcon/testing/__init__.py +++ b/falcon/testing/__init__.py @@ -13,7 +13,9 @@ # limitations under the License. # Hoist classes and functions into the falcon.testing namespace +from falcon.testing.base import TestBase # NOQA from falcon.testing.helpers import * # NOQA +from falcon.testing.resource import capture_responder_args # NOQA +from falcon.testing.resource import SimpleTestResource, TestResource # NOQA from falcon.testing.srmock import StartResponseMock # NOQA -from falcon.testing.resource import TestResource # NOQA -from falcon.testing.base import TestBase # NOQA +from falcon.testing.test_case import Result, TestCase # NOQA diff --git a/falcon/testing/base.py b/falcon/testing/base.py index 31aeecc..5fd92b3 100644 --- a/falcon/testing/base.py +++ b/falcon/testing/base.py @@ -26,14 +26,22 @@ from falcon.testing.helpers import create_environ class TestBase(unittest.TestCase): - """Extends ``testtools.TestCase`` to support WSGI integration testing. + """Extends :py:mod:`unittest` to support WSGI functional testing. - ``TestBase`` provides a base class that provides some extra plumbing to - help simulate WSGI calls without having to actually host your API - in a server. + Warning: + This class has been deprecated and will be removed in a future + release. Please use :py:class:`~.TestCase` + instead. Note: - If ``testtools`` is not available, ``unittest`` is used instead. + If available, uses :py:mod:`testtools` in lieu of + :py:mod:`unittest`. + + This base class provides some extra plumbing for unittest-style + test cases, to help simulate WSGI calls without having to spin up + an actual web server. Simply inherit from this class in your test + case classes instead of :py:class:`unittest.TestCase` or + :py:class:`testtools.TestCase`. Attributes: api (falcon.API): An API instance to target when simulating @@ -46,6 +54,7 @@ class TestBase(unittest.TestCase): test_route (str): A simple, generated path that a test can use to add a route to the API. """ + api_class = falcon.API srmock_class = StartResponseMock diff --git a/falcon/testing/helpers.py b/falcon/testing/helpers.py index e6f8ca2..38d340e 100644 --- a/falcon/testing/helpers.py +++ b/falcon/testing/helpers.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import cgi import random import io import sys @@ -28,6 +29,33 @@ DEFAULT_HOST = 'falconframework.org' httpnow = http_now +# get_encoding_from_headers() is Copyright 2016 Kenneth Reitz, and is +# used here under the terms of the Apache License, Version 2.0. +def get_encoding_from_headers(headers): + """Returns encoding from given HTTP Header Dict. + + Args: + headers(dict): Dictionary from which to extract encoding. Header + names must either be lowercase or the dict must support + case-insensitive lookups. + """ + + content_type = headers.get('content-type') + + if not content_type: + return None + + content_type, params = cgi.parse_header(content_type) + + if 'charset' in params: + return params['charset'].strip("'\"") + + if 'text' in content_type: + return 'ISO-8859-1' + + return None + + def rand_string(min, max): """Returns a randomly-generated string, of a random length. @@ -40,7 +68,7 @@ def rand_string(min, max): int_gen = random.randint string_length = int_gen(min, max) return ''.join([chr(int_gen(ord(' '), ord('~'))) - for i in range(string_length)]) + for __ in range(string_length)]) def create_environ(path='/', query_string='', protocol='HTTP/1.1', @@ -50,26 +78,28 @@ def create_environ(path='/', query_string='', protocol='HTTP/1.1', """Creates a mock PEP-3333 environ ``dict`` for simulating WSGI requests. - Args: - path (str, optional): The path for the request (default '/') - query_string (str, optional): The query string to simulate, without a + Keyword Args: + path (str): The path for the request (default '/') + query_string (str): The query string to simulate, without a leading '?' (default '') - protocol (str, optional): The HTTP protocol to simulate + protocol (str): The HTTP protocol to simulate (default 'HTTP/1.1'). If set to 'HTTP/1.0', the Host header will not be added to the environment. scheme (str): URL scheme, either 'http' or 'https' (default 'http') host(str): Hostname for the request (default 'falconframework.org') - port (str or int, optional): The TCP port to simulate. Defaults to + port (str): The TCP port to simulate. Defaults to the standard port used by the given scheme (i.e., 80 for 'http' and 443 for 'https'). - headers (dict or list, optional): Headers as a ``dict`` or an - iterable collection of (*key*, *value*) ``tuple``'s + headers (dict): Headers as a ``dict`` or an iterable yielding + (*key*, *value*) ``tuple``'s app (str): Value for the ``SCRIPT_NAME`` environ variable, described in PEP-333: 'The initial portion of the request URL's "path" that corresponds to the application object, so that the application knows its virtual "location". This may be an empty string, if the application corresponds to the "root" of the server.' (default '') - body (str or unicode): The body of the request (default '') + body (str): The body of the request (default ''). Accepts both byte + strings and Unicode strings. Unicode strings are encoded as UTF-8 + in the request. method (str): The HTTP method to use (default 'GET') wsgierrors (io): The stream to use as *wsgierrors* (default ``sys.stderr``) @@ -108,6 +138,7 @@ def create_environ(path='/', query_string='', protocol='HTTP/1.1', 'SERVER_NAME': host, 'SERVER_PORT': port, + 'wsgi.version': (1, 0), 'wsgi.url_scheme': scheme, 'wsgi.input': body, 'wsgi.errors': wsgierrors or sys.stderr, @@ -135,7 +166,7 @@ def create_environ(path='/', query_string='', protocol='HTTP/1.1', body.seek(0) if content_length != 0: - env['CONTENT_LENGTH'] = content_length + env['CONTENT_LENGTH'] = str(content_length) if headers is not None: _add_headers_to_environ(env, headers) diff --git a/falcon/testing/resource.py b/falcon/testing/resource.py index 0e11f32..1f80ed9 100644 --- a/falcon/testing/resource.py +++ b/falcon/testing/resource.py @@ -12,12 +12,103 @@ # See the License for the specific language governing permissions and # limitations under the License. -from falcon import HTTP_200 +from json import dumps as json_dumps + +import falcon from .helpers import rand_string -class TestResource: - """Mock resource for integration testing. +def capture_responder_args(req, resp, resource, params): + """Before hook for capturing responder arguments. + + Adds the following attributes to the hooked responder's resource + class: + + * captured_req + * captured_resp + * captured_kwargs + """ + + resource.captured_req = req + resource.captured_resp = resp + resource.captured_kwargs = params + + +def set_resp_defaults(req, resp, resource, params): + """Before hook for setting default response properties.""" + + if resource._default_status is not None: + resp.status = resource._default_status + + if resource._default_body is not None: + resp.body = resource._default_body + + if resource._default_headers is not None: + resp.set_headers(resource._default_headers) + + +class SimpleTestResource(object): + """Mock resource for functional testing of framework components. + + This class implements a simple test resource that can be extended + as needed to test middleware, hooks, and the Falcon framework + itself. + + Only the ``on_get()`` responder is implemented; when adding + additional responders in child classes, they can be decorated + with the :py:meth:`falcon.testing.capture_responder_args` hook in + order to capture the *req*, *resp*, and *params* arguments that + are passed to the responder. Responders may also be decorated with + the :py:meth:`falcon.testing.set_resp_defaults` hook in order to + set *resp* properties to default *status*, *body*, and *header* + values. + + Keyword Arguments: + status (str): Default status string to use in responses + body (str): Default body string to use in responses + json (dict): Default JSON document to use in responses. Will + be serialized to a string and encoded as UTF-8. Either + *json* or *body* may be specified, but not both. + headers (dict): Default set of additional headers to include in + responses + + Attributes: + captured_req (falcon.Request): The last Request object passed + into any one of the responder methods. + captured_resp (falcon.Response): The last Response object passed + into any one of the responder methods. + captured_kwargs (dict): The last dictionary of kwargs, beyond + ``req`` and ``resp``, that were passed into any one of the + responder methods. + """ + + def __init__(self, status=None, body=None, json=None, headers=None): + self._default_status = status + self._default_headers = headers + + if json is not None: + if body is not None: + msg = 'Either json or body may be specified, but not both' + raise ValueError(msg) + + self._default_body = json_dumps(json, ensure_ascii=False) + + else: + self._default_body = body + + @falcon.before(capture_responder_args) + @falcon.before(set_resp_defaults) + def on_get(self, req, resp, **kwargs): + pass + + +class TestResource(object): + """Mock resource for functional testing. + + Warning: + This class is deprecated and will be removed in a future + release. Please use :py:class:`~.SimpleTestResource` + instead. This class implements the `on_get` responder, captures request data, and sets response body and headers. @@ -38,14 +129,12 @@ class TestResource: responder, if any. called (bool): ``True`` if `on_get` was ever called; ``False`` otherwise. - - """ sample_status = "200 OK" sample_body = rand_string(0, 128 * 1024) resp_headers = { - 'Content-Type': 'text/plain; charset=utf-8', + 'Content-Type': 'text/plain; charset=UTF-8', 'ETag': '10d4555ebeb53b30adf724ca198b32a2', 'X-Hello': 'OH HAI' } @@ -73,6 +162,6 @@ class TestResource: self.req, self.resp, self.kwargs = req, resp, kwargs self.called = True - resp.status = HTTP_200 + resp.status = falcon.HTTP_200 resp.body = self.sample_body resp.set_headers(self.resp_headers) diff --git a/falcon/testing/srmock.py b/falcon/testing/srmock.py index 21e7544..916c662 100644 --- a/falcon/testing/srmock.py +++ b/falcon/testing/srmock.py @@ -15,7 +15,7 @@ from falcon import util -class StartResponseMock: +class StartResponseMock(object): """Mock object representing a WSGI `start_response` callable. Attributes: diff --git a/falcon/testing/test_case.py b/falcon/testing/test_case.py new file mode 100644 index 0000000..e809b8b --- /dev/null +++ b/falcon/testing/test_case.py @@ -0,0 +1,332 @@ +# Copyright 2013 by Rackspace Hosting, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +import wsgiref.validate + +try: + import testtools as unittest +except ImportError: # pragma: nocover + import unittest + +import falcon +import falcon.request +from falcon.util import CaseInsensitiveDict +from falcon.testing.srmock import StartResponseMock +from falcon.testing.helpers import create_environ, get_encoding_from_headers + + +class Result(object): + """Encapsulates the result of a simulated WSGI request. + + Args: + iterable (iterable): An iterable that yields zero or more + bytestrings, per PEP-3333 + status (str): An HTTP status string, including status code and + reason string + headers (list): A list of (header_name, header_value) tuples, + per PEP-3333 + + Attributes: + status (str): HTTP status string given in the response + status_code (int): The code portion of the HTTP status string + headers (CaseInsensitiveDict): A case-insensitive dictionary + containing all the headers in the response + encoding (str): Text encoding of the response body, or ``None`` + if the encoding can not be determined. + data (bytes): Raw response body, or ``bytes`` if the response + body was empty. + text (str): Decoded response body of type ``unicode`` + under Python 2.6 and 2.7, and of type ``str`` otherwise. + Raises an error if the response encoding can not be + determined. + json (dict): Deserialized JSON body. Raises an error if the + response is not JSON. + """ + + def __init__(self, iterable, status, headers): + self._text = None + + self._data = b''.join(iterable) + if hasattr(iterable, 'close'): + iterable.close() + + self._status = status + self._status_code = int(status[:3]) + self._headers = CaseInsensitiveDict(headers) + + self._encoding = get_encoding_from_headers(self._headers) + + @property + def status(self): + return self._status + + @property + def status_code(self): + return self._status_code + + @property + def headers(self): + return self._headers + + @property + def encoding(self): + return self._encoding + + @property + def data(self): + return self._data + + @property + def text(self): + if self._text is None: + if not self.data: + self._text = u'' + else: + if self.encoding is None: + msg = 'Response did not specify a content encoding' + raise RuntimeError(msg) + + self._text = self.data.decode(self.encoding) + + return self._text + + @property + def json(self): + return json.loads(self.text) + + +class TestCase(unittest.TestCase): + """Extends :py:mod:`unittest` to support WSGI functional testing. + + Note: + If available, uses :py:mod:`testtools` in lieu of + :py:mod:`unittest`. + + This base class provides some extra plumbing for unittest-style + test cases, to help simulate WSGI calls without having to spin up + an actual web server. Simply inherit from this class in your test + case classes instead of :py:class:`unittest.TestCase` or + :py:class:`testtools.TestCase`. + + Attributes: + api_class (class): An API class to use when instantiating + the ``api`` instance (default: :py:class:`falcon.API`) + api (object): An API instance to target when simulating + requests (default: ``self.api_class()``) + """ + + api_class = None + + def setUp(self): + super(TestCase, self).setUp() + + if self.api_class is None: + self.api = falcon.API() + else: + self.api = self.api_class() # pylint: disable=not-callable + + # Reset to simulate "restarting" the WSGI container + falcon.request._maybe_wrap_wsgi_stream = True + + # NOTE(warsaw): Pythons earlier than 2.7 do not have a + # self.assertIn() method, so use this compatibility function + # instead. + if not hasattr(unittest.TestCase, 'assertIn'): # pragma: nocover + def assertIn(self, a, b): + self.assertTrue(a in b) + + def simulate_get(self, path='/', **kwargs): + """Simulates a GET request to a WSGI application. + + Equivalent to ``simulate_request('GET', ...)`` + + Args: + path (str): The URL path to request (default: '/') + + Keyword Args: + query_string (str): A raw query string to include in the + request (default: ``None``) + headers (dict): Additional headers to include in the request + (default: ``None``) + """ + return self.simulate_request('GET', path, **kwargs) + + def simulate_head(self, path='/', **kwargs): + """Simulates a HEAD request to a WSGI application. + + Equivalent to ``simulate_request('HEAD', ...)`` + + Args: + path (str): The URL path to request (default: '/') + + Keyword Args: + query_string (str): A raw query string to include in the + request (default: ``None``) + headers (dict): Additional headers to include in the request + (default: ``None``) + """ + return self.simulate_request('HEAD', path, **kwargs) + + def simulate_post(self, path='/', **kwargs): + """Simulates a POST request to a WSGI application. + + Equivalent to ``simulate_request('POST', ...)`` + + Args: + path (str): The URL path to request (default: '/') + + Keyword Args: + query_string (str): A raw query string to include in the + request (default: ``None``) + headers (dict): Additional headers to include in the request + (default: ``None``) + body (str): A string to send as the body of the request. + Accepts both byte strings and Unicode strings + (default: ``None``). If a Unicode string is provided, + it will be encoded as UTF-8 in the request. + """ + return self.simulate_request('POST', path, **kwargs) + + def simulate_put(self, path='/', **kwargs): + """Simulates a PUT request to a WSGI application. + + Equivalent to ``simulate_request('PUT', ...)`` + + Args: + path (str): The URL path to request (default: '/') + + Keyword Args: + query_string (str): A raw query string to include in the + request (default: ``None``) + headers (dict): Additional headers to include in the request + (default: ``None``) + body (str): A string to send as the body of the request. + Accepts both byte strings and Unicode strings + (default: ``None``). If a Unicode string is provided, + it will be encoded as UTF-8 in the request. + """ + return self.simulate_request('PUT', path, **kwargs) + + def simulate_options(self, path='/', **kwargs): + """Simulates an OPTIONS request to a WSGI application. + + Equivalent to ``simulate_request('OPTIONS', ...)`` + + Args: + path (str): The URL path to request (default: '/') + + Keyword Args: + query_string (str): A raw query string to include in the + request (default: ``None``) + headers (dict): Additional headers to include in the request + (default: ``None``) + """ + return self.simulate_request('OPTIONS', path, **kwargs) + + def simulate_patch(self, path='/', **kwargs): + """Simulates a PATCH request to a WSGI application. + + Equivalent to ``simulate_request('PATCH', ...)`` + + Args: + path (str): The URL path to request (default: '/') + + Keyword Args: + query_string (str): A raw query string to include in the + request (default: ``None``) + headers (dict): Additional headers to include in the request + (default: ``None``) + body (str): A string to send as the body of the request. + Accepts both byte strings and Unicode strings + (default: ``None``). If a Unicode string is provided, + it will be encoded as UTF-8 in the request. + """ + return self.simulate_request('PATCH', path, **kwargs) + + def simulate_delete(self, path='/', **kwargs): + """Simulates a DELETE request to a WSGI application. + + Equivalent to ``simulate_request('DELETE', ...)`` + + Args: + path (str): The URL path to request (default: '/') + + Keyword Args: + query_string (str): A raw query string to include in the + request (default: ``None``) + headers (dict): Additional headers to include in the request + (default: ``None``) + """ + return self.simulate_request('DELETE', path, **kwargs) + + def simulate_request(self, method='GET', path='/', query_string=None, + headers=None, body=None, file_wrapper=None): + """Simulates a request to a WSGI application. + + Performs a WSGI request directly against ``self.api``. + + Keyword Args: + method (str): The HTTP method to use in the request + (default: 'GET') + path (str): The URL path to request (default: '/') + query_string (str): A raw query string to include in the + request (default: ``None``) + headers (dict): Additional headers to include in the request + (default: ``None``) + body (str): A string to send as the body of the request. + Accepts both byte strings and Unicode strings + (default: ``None``). If a Unicode string is provided, + it will be encoded as UTF-8 in the request. + file_wrapper (callable): Callable that returns an iterable, + to be used as the value for *wsgi.file_wrapper* in the + environ (default: ``None``). + + Returns: + :py:class:`~.Result`: The result of the request + """ + + if not path.startswith('/'): + raise ValueError("path must start with '/'") + + if query_string and query_string.startswith('?'): + raise ValueError("query_string should not start with '?'") + + if '?' in path: + # NOTE(kgriffs): We could allow this, but then we'd need + # to define semantics regarding whether the path takes + # precedence over the query_string. Also, it would make + # tests less consistent, since there would be "more than + # one...way to do it." + raise ValueError( + 'path may not contain a query string. Please use the ' + 'query_string parameter instead.' + ) + + env = create_environ( + method=method, + path=path, + query_string=(query_string or ''), + headers=headers, + body=body, + file_wrapper=file_wrapper, + ) + + srmock = StartResponseMock() + validator = wsgiref.validate.validator(self.api) + iterable = validator(env, srmock) + + result = Result(iterable, srmock.status, srmock.headers) + + return result diff --git a/tests/test_after_hooks.py b/tests/test_after_hooks.py index b9b1307..c4b4ada 100644 --- a/tests/test_after_hooks.py +++ b/tests/test_after_hooks.py @@ -2,7 +2,12 @@ import functools import json import falcon -import falcon.testing as testing +from falcon import testing + + +# -------------------------------------------------------------------- +# Hooks +# -------------------------------------------------------------------- def validate_output(req, resp): @@ -71,6 +76,11 @@ def cuteness_in_the_head(req, resp): resp.set_header('X-Cuteness', 'cute') +# -------------------------------------------------------------------- +# Resources +# -------------------------------------------------------------------- + + class WrappedRespondersResource(object): @falcon.after(serialize_body) @@ -163,15 +173,18 @@ class FaultyResource(object): raise falcon.HTTPError(falcon.HTTP_743, 'Query failed') -class TestHooks(testing.TestBase): +# -------------------------------------------------------------------- +# Tests +# -------------------------------------------------------------------- - def simulate_request(self, *args, **kwargs): - return super(TestHooks, self).simulate_request( - *args, decode='utf-8', **kwargs) - def before(self): +class TestHooks(testing.TestCase): + + def setUp(self): + super(TestHooks, self).setUp() + self.resource = WrappedRespondersResource() - self.api.add_route(self.test_route, self.resource) + self.api.add_route('/', self.resource) self.wrapped_resource = WrappedClassResource() self.api.add_route('/wrapped', self.wrapped_resource) @@ -179,157 +192,145 @@ class TestHooks(testing.TestBase): self.wrapped_resource_aware = ClassResourceWithAwareHooks() self.api.add_route('/wrapped_aware', self.wrapped_resource_aware) - def test_global_hook(self): - self.assertRaises(TypeError, falcon.API, None, {}) - self.assertRaises(TypeError, falcon.API, None, 0) + def test_output_validator(self): + result = self.simulate_get() + self.assertEqual(result.status_code, 723) + self.assertEqual(result.text, '{\n "title": "Tricky"\n}') - self.api = falcon.API(after=fluffiness) - zoo_resource = ZooResource() + def test_serializer(self): + result = self.simulate_put() + self.assertEqual('{"animal": "falcon"}', result.text) - self.api.add_route(self.test_route, zoo_resource) + def test_hook_as_callable_class(self): + result = self.simulate_post() + self.assertEqual('smart', result.text) - result = self.simulate_request(self.test_route) - self.assertEqual(u'fluffy', result) + def test_wrapped_resource(self): + result = self.simulate_get('/wrapped') + self.assertEqual(result.status_code, 200) + self.assertEqual(result.text, 'fluffy and cute', ) - def test_global_hook_is_resource_aware(self): - self.assertRaises(TypeError, falcon.API, None, {}) - self.assertRaises(TypeError, falcon.API, None, 0) + result = self.simulate_head('/wrapped') + self.assertEqual(result.status_code, 200) - self.api = falcon.API(after=resource_aware_fluffiness) - zoo_resource = ZooResource() + result = self.simulate_post('/wrapped') + self.assertEqual(result.status_code, 405) - self.api.add_route(self.test_route, zoo_resource) + result = self.simulate_patch('/wrapped') + self.assertEqual(result.status_code, 405) - result = self.simulate_request(self.test_route) - self.assertEqual(u'fluffy', result) + # Decorator should not affect the default on_options responder + result = self.simulate_options('/wrapped') + self.assertEqual(result.status_code, 204) + self.assertFalse(result.text) + + def test_wrapped_resource_with_hooks_aware_of_resource(self): + expected = 'fluffy and cute' + + result = self.simulate_get('/wrapped_aware') + self.assertEqual(result.status_code, 200) + self.assertEqual(expected, result.text) + + for test in (self.simulate_head, self.simulate_put, self.simulate_post): + result = test('/wrapped_aware') + self.assertEqual(result.status_code, 200) + self.assertEqual(self.wrapped_resource_aware.resp.body, expected) + + result = self.simulate_patch('/wrapped_aware') + self.assertEqual(result.status_code, 405) + + # Decorator should not affect the default on_options responder + result = self.simulate_options('/wrapped_aware') + self.assertEqual(result.status_code, 204) + self.assertFalse(result.text) + + +class TestGlobalHooks(testing.TestCase): + + def test_invalid_type(self): + self.assertRaises(TypeError, falcon.API, after={}) + self.assertRaises(TypeError, falcon.API, after=0) + + def test_global_hook(self): + self.api = falcon.API(after=fluffiness) + self.api.add_route('/', ZooResource()) + + result = self.simulate_get() + self.assertEqual(result.text, 'fluffy') + + def test_global_hook_is_resource_aware(self): + self.api = falcon.API(after=resource_aware_fluffiness) + self.api.add_route('/', ZooResource()) + + result = self.simulate_get() + self.assertEqual(result.text, 'fluffy') def test_multiple_global_hook(self): self.api = falcon.API(after=[fluffiness, cuteness, Smartness()]) - zoo_resource = ZooResource() - - self.api.add_route(self.test_route, zoo_resource) + self.api.add_route('/', ZooResource()) - result = self.simulate_request(self.test_route) - self.assertEqual(u'fluffy and cute and smart', result) + result = self.simulate_get() + self.assertEqual(result.text, 'fluffy and cute and smart') def test_global_hook_wrap_default_on_options(self): self.api = falcon.API(after=fluffiness_in_the_head) - zoo_resource = ZooResource() + self.api.add_route('/', ZooResource()) - self.api.add_route(self.test_route, zoo_resource) + result = self.simulate_options() - self.simulate_request(self.test_route, method='OPTIONS') - - self.assertEqual(falcon.HTTP_204, self.srmock.status) - self.assertEqual('fluffy', self.srmock.headers_dict['X-Fluffiness']) + self.assertEqual(result.status_code, 204) + self.assertEqual(result.headers['X-Fluffiness'], 'fluffy') def test_global_hook_wrap_default_405(self): self.api = falcon.API(after=fluffiness_in_the_head) - zoo_resource = ZooResource() - - self.api.add_route(self.test_route, zoo_resource) + self.api.add_route('/', ZooResource()) - self.simulate_request(self.test_route, method='POST') + result = self.simulate_post() - self.assertEqual(falcon.HTTP_405, self.srmock.status) - self.assertEqual('fluffy', self.srmock.headers_dict['X-Fluffiness']) + self.assertEqual(result.status_code, 405) + self.assertEqual(result.headers['X-Fluffiness'], 'fluffy') def test_multiple_global_hooks_wrap_default_on_options(self): self.api = falcon.API(after=[fluffiness_in_the_head, cuteness_in_the_head]) - zoo_resource = ZooResource() - self.api.add_route(self.test_route, zoo_resource) + self.api.add_route('/', ZooResource()) - self.simulate_request(self.test_route, method='OPTIONS') + result = self.simulate_options() - self.assertEqual(falcon.HTTP_204, self.srmock.status) - self.assertEqual('fluffy', self.srmock.headers_dict['X-Fluffiness']) - self.assertEqual('cute', self.srmock.headers_dict['X-Cuteness']) + self.assertEqual(result.status_code, 204) + self.assertEqual(result.headers['X-Fluffiness'], 'fluffy') + self.assertEqual(result.headers['X-Cuteness'], 'cute') def test_multiple_global_hooks_wrap_default_405(self): self.api = falcon.API(after=[fluffiness_in_the_head, cuteness_in_the_head]) - zoo_resource = ZooResource() - self.api.add_route(self.test_route, zoo_resource) + self.api.add_route('/', ZooResource()) - self.simulate_request(self.test_route, method='POST') + result = self.simulate_post() - self.assertEqual(falcon.HTTP_405, self.srmock.status) - self.assertEqual('fluffy', self.srmock.headers_dict['X-Fluffiness']) - self.assertEqual('cute', self.srmock.headers_dict['X-Cuteness']) + self.assertEqual(result.status_code, 405) + self.assertEqual(result.headers['X-Fluffiness'], 'fluffy') + self.assertEqual(result.headers['X-Cuteness'], 'cute') def test_global_after_hooks_run_after_exception(self): self.api = falcon.API(after=[fluffiness, resource_aware_cuteness, Smartness()]) - self.api.add_route(self.test_route, FaultyResource()) - - actual_body = self.simulate_request(self.test_route) - self.assertEqual(falcon.HTTP_743, self.srmock.status) - self.assertEqual(u'fluffy and cute and smart', actual_body) + self.api.add_route('/', FaultyResource()) - def test_output_validator(self): - actual_body = self.simulate_request(self.test_route) - self.assertEqual(falcon.HTTP_723, self.srmock.status) - self.assertEqual(u'{\n "title": "Tricky"\n}', actual_body) - - def test_serializer(self): - actual_body = self.simulate_request(self.test_route, method='PUT') - - self.assertEqual(u'{"animal": "falcon"}', actual_body) - - def test_hook_as_callable_class(self): - actual_body = self.simulate_request(self.test_route, method='POST') - self.assertEqual(u'smart', actual_body) - - def test_wrapped_resource(self): - actual_body = self.simulate_request('/wrapped') - self.assertEqual(falcon.HTTP_200, self.srmock.status) - self.assertEqual(u'fluffy and cute', actual_body) - - self.simulate_request('/wrapped', method='HEAD') - self.assertEqual(falcon.HTTP_200, self.srmock.status) - - self.simulate_request('/wrapped', method='POST') - self.assertEqual(falcon.HTTP_405, self.srmock.status) - - self.simulate_request('/wrapped', method='PATCH') - self.assertEqual(falcon.HTTP_405, self.srmock.status) - - # decorator does not affect the default on_options - body = self.simulate_request('/wrapped', method='OPTIONS') - self.assertEqual(falcon.HTTP_204, self.srmock.status) - self.assertEqual(u'', body) - - def test_wrapped_resource_with_hooks_aware_of_resource(self): - expected = u'fluffy and cute' - - actual_body = self.simulate_request('/wrapped_aware') - self.assertEqual(falcon.HTTP_200, self.srmock.status) - self.assertEqual(expected, actual_body) - - for method in ('HEAD', 'PUT', 'POST'): - self.simulate_request('/wrapped_aware', method=method) - self.assertEqual(falcon.HTTP_200, self.srmock.status) - self.assertEqual(expected, self.wrapped_resource_aware.resp.body) - - self.simulate_request('/wrapped_aware', method='PATCH') - self.assertEqual(falcon.HTTP_405, self.srmock.status) - - # decorator does not affect the default on_options - body = self.simulate_request('/wrapped_aware', method='OPTIONS') - self.assertEqual(falcon.HTTP_204, self.srmock.status) - self.assertEqual(u'', body) + result = self.simulate_get() + self.assertEqual(result.status_code, 743) + self.assertEqual(result.text, 'fluffy and cute and smart') def test_customized_options(self): self.api = falcon.API(after=fluffiness) - self.api.add_route('/one', SingleResource()) - body = self.simulate_request('/one', method='OPTIONS') - self.assertEqual(falcon.HTTP_501, self.srmock.status) - self.assertEqual(u'fluffy', body) - self.assertNotIn('allow', self.srmock.headers_dict) + result = self.simulate_options('/one') + + self.assertEqual(result.status_code, 501) + self.assertEqual(result.text, 'fluffy') + self.assertNotIn(result.headers, 'allow') diff --git a/tests/test_error_handlers.py b/tests/test_error_handlers.py index c87500e..30d155a 100644 --- a/tests/test_error_handlers.py +++ b/tests/test_error_handlers.py @@ -1,7 +1,5 @@ -import json - import falcon -import falcon.testing as testing +from falcon import testing def capture_error(ex, req, resp, params): @@ -31,6 +29,7 @@ class CustomException(CustomBaseException): class ErroredClassResource(object): + def on_get(self, req, resp): raise Exception('Plain Exception') @@ -41,44 +40,35 @@ class ErroredClassResource(object): raise CustomException('CustomException') -class TestErrorHandler(testing.TestBase): +class TestErrorHandler(testing.TestCase): + + def setUp(self): + super(TestErrorHandler, self).setUp() + self.api.add_route('/', ErroredClassResource()) def test_caught_error(self): self.api.add_error_handler(Exception, capture_error) - self.api.add_route(self.test_route, ErroredClassResource()) + result = self.simulate_get() + self.assertEqual(result.text, 'error: Plain Exception') - body = self.simulate_request(self.test_route) - self.assertEqual([b'error: Plain Exception'], body) - - body = self.simulate_request(self.test_route, method='HEAD') - self.assertEqual(falcon.HTTP_723, self.srmock.status) - self.assertEqual([], body) + result = self.simulate_head() + self.assertEqual(result.status_code, 723) + self.assertFalse(result.data) def test_uncaught_error(self): self.api.add_error_handler(CustomException, capture_error) - - self.api.add_route(self.test_route, ErroredClassResource()) - - self.assertRaises(Exception, - self.simulate_request, self.test_route) + self.assertRaises(Exception, self.simulate_get) def test_uncaught_error_else(self): - self.api.add_route(self.test_route, ErroredClassResource()) - - self.assertRaises(Exception, - self.simulate_request, self.test_route) + self.assertRaises(Exception, self.simulate_get) def test_converted_error(self): self.api.add_error_handler(CustomException) - self.api.add_route(self.test_route, ErroredClassResource()) - - body = self.simulate_request(self.test_route, method='DELETE') - self.assertEqual(falcon.HTTP_792, self.srmock.status) - - info = json.loads(body[0].decode()) - self.assertEqual('Internet crashed!', info['title']) + result = self.simulate_delete() + self.assertEqual(result.status_code, 792) + self.assertEqual(result.json[u'title'], u'Internet crashed!') def test_handle_not_defined(self): self.assertRaises(AttributeError, @@ -87,17 +77,13 @@ class TestErrorHandler(testing.TestBase): def test_subclass_error(self): self.api.add_error_handler(CustomBaseException, capture_error) - self.api.add_route(self.test_route, ErroredClassResource()) - - body = self.simulate_request(self.test_route, method='DELETE') - self.assertEqual(falcon.HTTP_723, self.srmock.status) - self.assertEqual([b'error: CustomException'], body) + result = self.simulate_delete() + self.assertEqual(result.status_code, 723) + self.assertEqual(result.text, 'error: CustomException') def test_error_order(self): self.api.add_error_handler(Exception, capture_error) self.api.add_error_handler(Exception, handle_error_first) - self.api.add_route(self.test_route, ErroredClassResource()) - - body = self.simulate_request(self.test_route) - self.assertEqual([b'first error handler'], body) + result = self.simulate_get() + self.assertEqual(result.text, 'first error handler') diff --git a/tests/test_headers.py b/tests/test_headers.py index 7b2f82c..fda337c 100644 --- a/tests/test_headers.py +++ b/tests/test_headers.py @@ -1,25 +1,14 @@ from collections import defaultdict from datetime import datetime +import ddt import six -from testtools.matchers import Contains, Not import falcon -import falcon.testing as testing +from falcon import testing -class StatusTestResource: - sample_body = testing.rand_string(0, 128 * 1024) - - def __init__(self, status): - self.status = status - - def on_get(self, req, resp): - resp.status = self.status - resp.body = self.sample_body - - -class XmlResource: +class XmlResource(object): def __init__(self, content_type): self.content_type = content_type @@ -27,16 +16,7 @@ class XmlResource: resp.set_header('content-type', self.content_type) -class DefaultContentTypeResource: - def __init__(self, body=None): - self.body = body - - def on_get(self, req, resp): - if self.body is not None: - resp.body = self.body - - -class HeaderHelpersResource: +class HeaderHelpersResource(object): def __init__(self, last_modified=None): if last_modified is not None: @@ -106,7 +86,7 @@ class HeaderHelpersResource: self.resp = resp -class LocationHeaderUnicodeResource: +class LocationHeaderUnicodeResource(object): URL1 = u'/\u00e7runchy/bacon' URL2 = u'ab\u00e7' if six.PY3 else 'ab\xc3\xa7' @@ -120,18 +100,25 @@ class LocationHeaderUnicodeResource: resp.content_location = self.URL1 -class UnicodeHeaderResource: +class UnicodeHeaderResource(object): def on_get(self, req, resp): resp.set_headers([ (u'X-auTH-toKEN', 'toomanysecrets'), ('Content-TYpE', u'application/json'), - (u'X-symBOl', u'\u0040'), - (u'X-symb\u00F6l', u'\u00FF'), + (u'X-symBOl', u'@'), + + # TODO(kgriffs): This will cause the wsgiref validator + # to raise an error. Falcon itself does not currently + # check for non-ASCII chars to save some CPU cycles. The + # app is responsible for doing the right thing, and + # validating its own output as needed. + # + # (u'X-symb\u00F6l', u'\u00FF'), ]) -class VaryHeaderResource: +class VaryHeaderResource(object): def __init__(self, vary): self.vary = vary @@ -141,7 +128,7 @@ class VaryHeaderResource: resp.vary = self.vary -class LinkHeaderResource: +class LinkHeaderResource(object): def __init__(self): self._links = [] @@ -156,7 +143,7 @@ class LinkHeaderResource: resp.add_link(*args, **kwargs) -class AppendHeaderResource: +class AppendHeaderResource(object): def on_get(self, req, resp): resp.append_header('X-Things', 'thing-1') @@ -172,33 +159,35 @@ class AppendHeaderResource: resp.append_header('X-Things', 'thing-1') -class TestHeaders(testing.TestBase): [email protected] +class TestHeaders(testing.TestCase): - def before(self): - self.resource = testing.TestResource() - self.api.add_route(self.test_route, self.resource) + def setUp(self): + super(TestHeaders, self).setUp() - def test_content_length(self): - self.simulate_request(self.test_route) + self.sample_body = testing.rand_string(0, 128 * 1024) + self.resource = testing.SimpleTestResource(body=self.sample_body) + self.api.add_route('/', self.resource) - headers = self.srmock.headers + def test_content_length(self): + result = self.simulate_get() - # Test Content-Length header set - content_length = str(len(self.resource.sample_body)) - content_length_header = ('content-length', content_length) - self.assertThat(headers, Contains(content_length_header)) + content_length = str(len(self.sample_body)) + self.assertEqual(result.headers['Content-Length'], content_length) def test_default_value(self): - self.simulate_request(self.test_route) + self.simulate_get() - value = self.resource.req.get_header('X-Not-Found') or '876' + req = self.resource.captured_req + value = req.get_header('X-Not-Found') or '876' self.assertEqual(value, '876') def test_required_header(self): - self.simulate_request(self.test_route) + self.simulate_get() try: - self.resource.req.get_header('X-Not-Found', required=True) + req = self.resource.captured_req + req.get_header('X-Not-Found', required=True) self.fail('falcon.HTTPMissingHeader not raised') except falcon.HTTPMissingHeader as ex: self.assertIsInstance(ex, falcon.HTTPBadRequest) @@ -206,45 +195,13 @@ class TestHeaders(testing.TestBase): expected_desc = 'The X-Not-Found header is required.' self.assertEqual(ex.description, expected_desc) - def test_no_body_on_100(self): - self.resource = StatusTestResource(falcon.HTTP_100) - self.api.add_route('/1xx', self.resource) - - body = self.simulate_request('/1xx') - self.assertThat(self.srmock.headers_dict, - Not(Contains('Content-Length'))) + @ddt.data(falcon.HTTP_204, falcon.HTTP_304) + def test_no_content_length(self, status): + self.api.add_route('/xxx', testing.SimpleTestResource(status=status)) - self.assertEqual(body, []) - - def test_no_body_on_101(self): - self.resource = StatusTestResource(falcon.HTTP_101) - self.api.add_route('/1xx', self.resource) - - body = self.simulate_request('/1xx') - self.assertThat(self.srmock.headers_dict, - Not(Contains('Content-Length'))) - - self.assertEqual(body, []) - - def test_no_body_on_204(self): - self.resource = StatusTestResource(falcon.HTTP_204) - self.api.add_route('/204', self.resource) - - body = self.simulate_request('/204') - self.assertThat(self.srmock.headers_dict, - Not(Contains('Content-Length'))) - - self.assertEqual(body, []) - - def test_no_body_on_304(self): - self.resource = StatusTestResource(falcon.HTTP_304) - self.api.add_route('/304', self.resource) - - body = self.simulate_request('/304') - self.assertThat(self.srmock.headers_dict, - Not(Contains('Content-Length'))) - - self.assertEqual(body, []) + result = self.simulate_get('/xxx') + self.assertNotIn('Content-Length', result.headers) + self.assertFalse(result.data) def test_content_header_missing(self): environ = testing.create_environ() @@ -252,248 +209,228 @@ class TestHeaders(testing.TestBase): for header in ('Content-Type', 'Content-Length'): self.assertIs(req.get_header(header), None) - def test_passthrough_req_headers(self): - req_headers = { + def test_passthrough_request_headers(self): + request_headers = { 'X-Auth-Token': 'Setec Astronomy', 'Content-Type': 'text/plain; charset=utf-8' } - self.simulate_request(self.test_route, headers=req_headers) + self.simulate_get(headers=request_headers) - for name, expected_value in req_headers.items(): - actual_value = self.resource.req.get_header(name) + for name, expected_value in request_headers.items(): + actual_value = self.resource.captured_req.get_header(name) self.assertEqual(actual_value, expected_value) - self.simulate_request(self.test_route, - headers=self.resource.req.headers) + self.simulate_get(headers=self.resource.captured_req.headers) # Compare the request HTTP headers with the original headers - for name, expected_value in req_headers.items(): - actual_value = self.resource.req.get_header(name) + for name, expected_value in request_headers.items(): + actual_value = self.resource.captured_req.get_header(name) self.assertEqual(actual_value, expected_value) - def test_get_raw_headers(self): + def test_headers_as_list(self): headers = [ ('Client-ID', '692ba466-74bb-11e3-bf3f-7567c531c7ca'), ('Accept', 'audio/*; q=0.2, audio/basic') ] + # Unit test environ = testing.create_environ(headers=headers) req = falcon.Request(environ) for name, value in headers: self.assertIn((name.upper(), value), req.headers.items()) - def test_passthrough_resp_headers(self): - self.simulate_request(self.test_route) + # Functional test + self.api.add_route('/', testing.SimpleTestResource(headers=headers)) + result = self.simulate_get() - resp_headers = self.srmock.headers - - for name, value in self.resource.resp_headers.items(): - expected = (name.lower(), value) - self.assertThat(resp_headers, Contains(expected)) + for name, value in headers: + self.assertEqual(result.headers[name], value) def test_default_media_type(self): - self.resource = DefaultContentTypeResource('Hello world!') - self.api.add_route(self.test_route, self.resource) - self.simulate_request(self.test_route) + resource = testing.SimpleTestResource(body='Hello world!') + self._check_header(resource, 'Content-Type', falcon.DEFAULT_MEDIA_TYPE) - content_type = falcon.DEFAULT_MEDIA_TYPE - self.assertIn(('content-type', content_type), self.srmock.headers) + @ddt.data( + ('text/plain; charset=UTF-8', u'Hello Unicode! \U0001F638'), - def test_custom_media_type(self): - self.resource = DefaultContentTypeResource('Hello world!') - self.api = falcon.API(media_type='application/atom+xml') - self.api.add_route(self.test_route, self.resource) - self.simulate_request(self.test_route) + # NOTE(kgriffs): This only works because the client defaults to + # ISO-8859-1 IFF the media type is 'text'. + ('text/plain', 'Hello ISO-8859-1!'), + ) + @ddt.unpack + def test_override_default_media_type(self, content_type, body): + self.api = falcon.API(media_type=content_type) + self.api.add_route('/', testing.SimpleTestResource(body=body)) + result = self.simulate_get() - content_type = 'application/atom+xml' - self.assertIn(('content-type', content_type), self.srmock.headers) + self.assertEqual(result.text, body) + self.assertEqual(result.headers['Content-Type'], content_type) + + def test_override_default_media_type_missing_encoding(self): + body = b'{}' + + self.api = falcon.API(media_type='application/json') + self.api.add_route('/', testing.SimpleTestResource(body=body)) + result = self.simulate_get() + + self.assertEqual(result.data, body) + self.assertRaises(RuntimeError, lambda: result.text) + self.assertRaises(RuntimeError, lambda: result.json) def test_response_header_helpers_on_get(self): last_modified = datetime(2013, 1, 1, 10, 30, 30) - self.resource = HeaderHelpersResource(last_modified) - self.api.add_route(self.test_route, self.resource) - self.simulate_request(self.test_route) + resource = HeaderHelpersResource(last_modified) + self.api.add_route('/', resource) + result = self.simulate_get() - resp = self.resource.resp + resp = resource.resp content_type = 'x-falcon/peregrine' - self.assertEqual(content_type, resp.content_type) - self.assertIn(('content-type', content_type), self.srmock.headers) + self.assertEqual(resp.content_type, content_type) + self.assertEqual(result.headers['Content-Type'], content_type) cache_control = ('public, private, no-cache, no-store, ' 'must-revalidate, proxy-revalidate, max-age=3600, ' 's-maxage=60, no-transform') - self.assertEqual(cache_control, resp.cache_control) - self.assertIn(('cache-control', cache_control), self.srmock.headers) + self.assertEqual(resp.cache_control, cache_control) + self.assertEqual(result.headers['Cache-Control'], cache_control) etag = 'fa0d1a60ef6616bb28038515c8ea4cb2' - self.assertEqual(etag, resp.etag) - self.assertIn(('etag', etag), self.srmock.headers) + self.assertEqual(resp.etag, etag) + self.assertEqual(result.headers['Etag'], etag) - last_modified_http_date = 'Tue, 01 Jan 2013 10:30:30 GMT' - self.assertEqual(last_modified_http_date, resp.last_modified) - self.assertIn(('last-modified', last_modified_http_date), - self.srmock.headers) + lm_date = 'Tue, 01 Jan 2013 10:30:30 GMT' + self.assertEqual(resp.last_modified, lm_date) + self.assertEqual(result.headers['Last-Modified'], lm_date) - self.assertEqual('3601', resp.retry_after) - self.assertIn(('retry-after', '3601'), self.srmock.headers) + self.assertEqual(resp.retry_after, '3601') + self.assertEqual(result.headers['Retry-After'], '3601') - self.assertEqual('/things/87', resp.location) - self.assertIn(('location', '/things/87'), self.srmock.headers) + self.assertEqual(resp.location, '/things/87') + self.assertEqual(result.headers['Location'], '/things/87') - self.assertEqual('/things/78', resp.content_location) - self.assertIn(('content-location', '/things/78'), self.srmock.headers) + self.assertEqual(resp.content_location, '/things/78') + self.assertEqual(result.headers['Content-Location'], '/things/78') - self.assertEqual('bytes 0-499/10240', resp.content_range) - self.assertIn(('content-range', 'bytes 0-499/10240'), - self.srmock.headers) + content_range = 'bytes 0-499/10240' + self.assertEqual(resp.content_range, content_range) + self.assertEqual(result.headers['Content-Range'], content_range) - resp.content_range = (0, 499, 10 * 1024, 'bytes') - self.assertEqual('bytes 0-499/10240', resp.content_range) - self.assertIn(('content-range', 'bytes 0-499/10240'), - self.srmock.headers) + resp.content_range = (1, 499, 10 * 1024, 'bytes') + self.assertEqual(resp.content_range, 'bytes 1-499/10240') - req_headers = { - 'Range': 'items=0-25', - } - self.simulate_request(self.test_route, headers=req_headers) - - resp.content_range = (0, 25, 100, 'items') - self.assertEqual('items 0-25/100', resp.content_range) - self.assertIn(('content-range', 'items 0-25/100'), - self.srmock.headers) + req_headers = {'Range': 'items=0-25'} + result = self.simulate_get(headers=req_headers) + self.assertEqual(result.headers['Content-Range'], 'items 0-25/100') # Check for duplicate headers hist = defaultdict(lambda: 0) - for name, value in self.srmock.headers: + for name, value in result.headers.items(): hist[name] += 1 self.assertEqual(1, hist[name]) def test_unicode_location_headers(self): - self.api.add_route(self.test_route, LocationHeaderUnicodeResource()) - self.simulate_request(self.test_route) - - location = ('location', '/%C3%A7runchy/bacon') - self.assertIn(location, self.srmock.headers) + self.api.add_route('/', LocationHeaderUnicodeResource()) - content_location = ('content-location', 'ab%C3%A7') - self.assertIn(content_location, self.srmock.headers) + result = self.simulate_get() + self.assertEqual(result.headers['Location'], '/%C3%A7runchy/bacon') + self.assertEqual(result.headers['Content-Location'], 'ab%C3%A7') # Test with the values swapped - self.simulate_request(self.test_route, method='HEAD') - - location = ('location', 'ab%C3%A7') - self.assertIn(location, self.srmock.headers) - - content_location = ('content-location', '/%C3%A7runchy/bacon') - self.assertIn(content_location, self.srmock.headers) + result = self.simulate_head() + self.assertEqual(result.headers['Content-Location'], + '/%C3%A7runchy/bacon') + self.assertEqual(result.headers['Location'], 'ab%C3%A7') def test_unicode_headers(self): - self.api.add_route(self.test_route, UnicodeHeaderResource()) - self.simulate_request(self.test_route) - - expect = ('x-auth-token', 'toomanysecrets') - self.assertIn(expect, self.srmock.headers) + self.api.add_route('/', UnicodeHeaderResource()) - expect = ('content-type', 'application/json') - self.assertIn(expect, self.srmock.headers) + result = self.simulate_get('/') - expect = ('x-symbol', '@') - self.assertIn(expect, self.srmock.headers) - - expect = ('x-symb\xF6l', '\xFF') - self.assertIn(expect, self.srmock.headers) + self.assertEqual(result.headers['Content-Type'], 'application/json') + self.assertEqual(result.headers['X-Auth-Token'], 'toomanysecrets') + self.assertEqual(result.headers['X-Symbol'], '@') def test_response_set_and_get_header(self): - self.resource = HeaderHelpersResource() - self.api.add_route(self.test_route, self.resource) + resource = HeaderHelpersResource() + self.api.add_route('/', resource) for method in ('HEAD', 'POST', 'PUT'): - self.simulate_request(self.test_route, method=method) + result = self.simulate_request(method=method) content_type = 'x-falcon/peregrine' - self.assertIn(('content-type', content_type), self.srmock.headers) - self.assertEquals(self.resource.resp.get_header('content-TyPe'), content_type) - self.assertIn(('cache-control', 'no-store'), self.srmock.headers) - self.assertIn(('x-auth-token', 'toomanysecrets'), - self.srmock.headers) + self.assertEqual(result.headers['Content-Type'], content_type) + self.assertEqual(resource.resp.get_header('content-TyPe'), + content_type) + + self.assertEqual(result.headers['Cache-Control'], 'no-store') + self.assertEqual(result.headers['X-Auth-Token'], 'toomanysecrets') - self.assertEqual(None, self.resource.resp.location) - self.assertEquals(self.resource.resp.get_header('not-real'), None) + self.assertEqual(resource.resp.location, None) + self.assertEqual(resource.resp.get_header('not-real'), None) # Check for duplicate headers - hist = defaultdict(lambda: 0) - for name, value in self.srmock.headers: + hist = defaultdict(int) + for name, value in result.headers.items(): hist[name] += 1 - self.assertEqual(1, hist[name]) + self.assertEqual(hist[name], 1) def test_response_append_header(self): - self.resource = AppendHeaderResource() - self.api.add_route(self.test_route, self.resource) + self.api.add_route('/', AppendHeaderResource()) for method in ('HEAD', 'GET'): - self.simulate_request(self.test_route, method=method) - value = self.srmock.headers_dict['x-things'] - self.assertEqual('thing-1,thing-2,thing-3', value) + result = self.simulate_request(method=method) + value = result.headers['x-things'] + self.assertEqual(value, 'thing-1,thing-2,thing-3') - self.simulate_request(self.test_route, method='POST') - value = self.srmock.headers_dict['x-things'] - self.assertEqual('thing-1', value) + result = self.simulate_request(method='POST') + self.assertEqual(result.headers['x-things'], 'thing-1') def test_vary_star(self): - self.resource = VaryHeaderResource(['*']) - self.api.add_route(self.test_route, self.resource) - self.simulate_request(self.test_route) - - self.assertIn(('vary', '*'), self.srmock.headers) - - def test_vary_header(self): - self.resource = VaryHeaderResource(['accept-encoding']) - self.api.add_route(self.test_route, self.resource) - self.simulate_request(self.test_route) - - self.assertIn(('vary', 'accept-encoding'), self.srmock.headers) - - def test_vary_headers(self): - self.resource = VaryHeaderResource(['accept-encoding', 'x-auth-token']) - self.api.add_route(self.test_route, self.resource) - self.simulate_request(self.test_route) - - vary = 'accept-encoding, x-auth-token' - self.assertIn(('vary', vary), self.srmock.headers) - - def test_vary_headers_tuple(self): - self.resource = VaryHeaderResource(('accept-encoding', 'x-auth-token')) - self.api.add_route(self.test_route, self.resource) - self.simulate_request(self.test_route) - - vary = 'accept-encoding, x-auth-token' - self.assertIn(('vary', vary), self.srmock.headers) - - def test_no_content_type(self): - self.resource = DefaultContentTypeResource() - self.api.add_route(self.test_route, self.resource) - self.simulate_request(self.test_route) - - self.assertNotIn('content-type', self.srmock.headers_dict) + self.api.add_route('/', VaryHeaderResource(['*'])) + result = self.simulate_get() + self.assertEqual(result.headers['vary'], '*') + + @ddt.data( + (['accept-encoding'], 'accept-encoding'), + (['accept-encoding', 'x-auth-token'], 'accept-encoding, x-auth-token'), + (('accept-encoding', 'x-auth-token'), 'accept-encoding, x-auth-token'), + ) + @ddt.unpack + def test_vary_header(self, vary, expected_value): + resource = VaryHeaderResource(vary) + self._check_header(resource, 'Vary', expected_value) + + def test_content_type_no_body(self): + self.api.add_route('/', testing.SimpleTestResource()) + result = self.simulate_get() + + # NOTE(kgriffs): Even when there is no body, Content-Type + # should still be included per wsgiref.validate + self.assertIn('Content-Type', result.headers) + self.assertEqual(result.headers['Content-Length'], '0') + + @ddt.data(falcon.HTTP_204, falcon.HTTP_304) + def test_no_content_type(self, status): + self.api.add_route('/', testing.SimpleTestResource(status=status)) + + result = self.simulate_get() + self.assertNotIn('Content-Type', result.headers) def test_custom_content_type(self): content_type = 'application/xml; charset=utf-8' - self.resource = XmlResource(content_type) - self.api.add_route(self.test_route, self.resource) - - self.simulate_request(self.test_route) - self.assertIn(('content-type', content_type), self.srmock.headers) + resource = XmlResource(content_type) + self._check_header(resource, 'Content-Type', content_type) def test_add_link_single(self): expected_value = '</things/2842>; rel=next' - self.resource = LinkHeaderResource() - self.resource.add_link('/things/2842', 'next') + resource = LinkHeaderResource() + resource.add_link('/things/2842', 'next') - self._check_link_header(expected_value) + self._check_link_header(resource, expected_value) def test_add_link_multiple(self): expected_value = ( @@ -506,26 +443,26 @@ class TestHeaders(testing.TestBase): uri = u'ab\u00e7' if six.PY3 else 'ab\xc3\xa7' - self.resource = LinkHeaderResource() - self.resource.add_link('/things/2842', 'next') - self.resource.add_link(u'http://\u00e7runchy/bacon', 'contents') - self.resource.add_link(uri, 'http://example.com/ext-type') - self.resource.add_link(uri, u'http://example.com/\u00e7runchy') - self.resource.add_link(uri, u'https://example.com/too-\u00e7runchy') - self.resource.add_link('/alt-thing', - u'alternate http://example.com/\u00e7runchy') + resource = LinkHeaderResource() + resource.add_link('/things/2842', 'next') + resource.add_link(u'http://\u00e7runchy/bacon', 'contents') + resource.add_link(uri, 'http://example.com/ext-type') + resource.add_link(uri, u'http://example.com/\u00e7runchy') + resource.add_link(uri, u'https://example.com/too-\u00e7runchy') + resource.add_link('/alt-thing', + u'alternate http://example.com/\u00e7runchy') - self._check_link_header(expected_value) + self._check_link_header(resource, expected_value) def test_add_link_with_title(self): expected_value = ('</related/thing>; rel=item; ' 'title="A related thing"') - self.resource = LinkHeaderResource() - self.resource.add_link('/related/thing', 'item', - title='A related thing') + resource = LinkHeaderResource() + resource.add_link('/related/thing', 'item', + title='A related thing') - self._check_link_header(expected_value) + self._check_link_header(resource, expected_value) def test_add_link_with_title_star(self): expected_value = ('</related/thing>; rel=item; ' @@ -533,54 +470,53 @@ class TestHeaders(testing.TestBase): '</%C3%A7runchy/thing>; rel=item; ' "title*=UTF-8'en'A%20%C3%A7runchy%20thing") - self.resource = LinkHeaderResource() - self.resource.add_link('/related/thing', 'item', - title_star=('', 'A related thing')) + resource = LinkHeaderResource() + resource.add_link('/related/thing', 'item', + title_star=('', 'A related thing')) - self.resource.add_link(u'/\u00e7runchy/thing', 'item', - title_star=('en', u'A \u00e7runchy thing')) + resource.add_link(u'/\u00e7runchy/thing', 'item', + title_star=('en', u'A \u00e7runchy thing')) - self._check_link_header(expected_value) + self._check_link_header(resource, expected_value) def test_add_link_with_anchor(self): expected_value = ('</related/thing>; rel=item; ' 'anchor="/some%20thing/or-other"') - self.resource = LinkHeaderResource() - self.resource.add_link('/related/thing', 'item', - anchor='/some thing/or-other') + resource = LinkHeaderResource() + resource.add_link('/related/thing', 'item', + anchor='/some thing/or-other') - self._check_link_header(expected_value) + self._check_link_header(resource, expected_value) def test_add_link_with_hreflang(self): expected_value = ('</related/thing>; rel=about; ' 'hreflang=en') - self.resource = LinkHeaderResource() - self.resource.add_link('/related/thing', 'about', - hreflang='en') + resource = LinkHeaderResource() + resource.add_link('/related/thing', 'about', hreflang='en') - self._check_link_header(expected_value) + self._check_link_header(resource, expected_value) def test_add_link_with_hreflang_multi(self): expected_value = ('</related/thing>; rel=about; ' 'hreflang=en-GB; hreflang=de') - self.resource = LinkHeaderResource() - self.resource.add_link('/related/thing', 'about', - hreflang=('en-GB', 'de')) + resource = LinkHeaderResource() + resource.add_link('/related/thing', 'about', + hreflang=('en-GB', 'de')) - self._check_link_header(expected_value) + self._check_link_header(resource, expected_value) def test_add_link_with_type_hint(self): expected_value = ('</related/thing>; rel=alternate; ' 'type="video/mp4; codecs=avc1.640028"') - self.resource = LinkHeaderResource() - self.resource.add_link('/related/thing', 'alternate', - type_hint='video/mp4; codecs=avc1.640028') + resource = LinkHeaderResource() + resource.add_link('/related/thing', 'alternate', + type_hint='video/mp4; codecs=avc1.640028') - self._check_link_header(expected_value) + self._check_link_header(resource, expected_value) def test_add_link_complex(self): expected_value = ('</related/thing>; rel=alternate; ' @@ -589,21 +525,24 @@ class TestHeaders(testing.TestBase): 'type="application/json"; ' 'hreflang=en-GB; hreflang=de') - self.resource = LinkHeaderResource() - self.resource.add_link('/related/thing', 'alternate', - title='A related thing', - hreflang=('en-GB', 'de'), - type_hint='application/json', - title_star=('en', u'A \u00e7runchy thing')) + resource = LinkHeaderResource() + resource.add_link('/related/thing', 'alternate', + title='A related thing', + hreflang=('en-GB', 'de'), + type_hint='application/json', + title_star=('en', u'A \u00e7runchy thing')) - self._check_link_header(expected_value) + self._check_link_header(resource, expected_value) # ---------------------------------------------------------------------- # Helpers # ---------------------------------------------------------------------- - def _check_link_header(self, expected_value): - self.api.add_route(self.test_route, self.resource) + def _check_link_header(self, resource, expected_value): + self._check_header(resource, 'Link', expected_value) + + def _check_header(self, resource, header, expected_value): + self.api.add_route('/', resource) - self.simulate_request(self.test_route) - self.assertEqual(expected_value, self.srmock.headers_dict['link']) + result = self.simulate_get() + self.assertEqual(result.headers[header], expected_value) diff --git a/tests/test_hello.py b/tests/test_hello.py index daff9b9..f191916 100644 --- a/tests/test_hello.py +++ b/tests/test_hello.py @@ -1,4 +1,4 @@ -from testtools.matchers import Contains +import ddt import falcon import io @@ -74,156 +74,106 @@ class NoStatusResource(object): pass -class TestHelloWorld(testing.TestBase): [email protected] +class TestHelloWorld(testing.TestCase): - def before(self): - self.resource = HelloResource('body') - self.api.add_route(self.test_route, self.resource) - - self.bytes_resource = HelloResource('body, bytes') - self.api.add_route('/bytes', self.bytes_resource) - - self.data_resource = HelloResource('data') - self.api.add_route('/data', self.data_resource) - - self.chunked_resource = HelloResource('stream') - self.api.add_route('/chunked-stream', self.chunked_resource) - - self.stream_resource = HelloResource('stream, stream_len') - self.api.add_route('/stream', self.stream_resource) - - self.filelike_resource = HelloResource('stream, stream_len, filelike') - self.api.add_route('/filelike', self.filelike_resource) - - self.filelike_helper_resource = HelloResource( - 'stream, stream_len, filelike, use_helper') - self.api.add_route('/filelike-helper', self.filelike_helper_resource) - - self.no_status_resource = NoStatusResource() - self.api.add_route('/nostatus', self.no_status_resource) - - self.root_resource = testing.TestResource() - self.api.add_route('/', self.root_resource) - - def after(self): - pass + def setUp(self): + super(TestHelloWorld, self).setUp() def test_env_headers_list_of_tuples(self): env = testing.create_environ(headers=[('User-Agent', 'Falcon-Test')]) self.assertEqual(env['HTTP_USER_AGENT'], 'Falcon-Test') - def test_empty_route(self): - self.simulate_request('') - self.assertTrue(self.root_resource.called) - - def test_route_negative(self): - bogus_route = self.test_route + 'x' - self.simulate_request(bogus_route) + def test_root_route(self): + doc = {u"message": u"Hello world!"} + resource = testing.SimpleTestResource(json=doc) + self.api.add_route('/', resource) - # Ensure the request was NOT routed to resource - self.assertFalse(self.resource.called) - self.assertEqual(self.srmock.status, falcon.HTTP_404) + result = self.simulate_get() + self.assertEqual(result.json, doc) - def test_body(self): - body = self.simulate_request(self.test_route) - resp = self.resource.resp + def test_no_route(self): + result = self.simulate_get('/seenoevil') + self.assertEqual(result.status_code, 404) - content_length = int(self.srmock.headers_dict['content-length']) - self.assertEqual(content_length, len(self.resource.sample_utf8)) + @ddt.data( + ('/body', HelloResource('body'), lambda r: r.body.encode('utf-8')), + ('/bytes', HelloResource('body, bytes'), lambda r: r.body), + ('/data', HelloResource('data'), lambda r: r.data), + ) + @ddt.unpack + def test_body(self, path, resource, get_body): + self.api.add_route(path, resource) - self.assertEqual(self.srmock.status, self.resource.sample_status) - self.assertEqual(resp.status, self.resource.sample_status) - self.assertEqual(resp.body.encode('utf-8'), self.resource.sample_utf8) - self.assertEqual(body, [self.resource.sample_utf8]) + result = self.simulate_get(path) + resp = resource.resp - def test_body_bytes(self): - body = self.simulate_request('/bytes') - resp = self.bytes_resource.resp + content_length = int(result.headers['content-length']) + self.assertEqual(content_length, len(resource.sample_utf8)) - content_length = int(self.srmock.headers_dict['content-length']) - self.assertEqual(content_length, len(self.resource.sample_utf8)) - - self.assertEqual(self.srmock.status, self.resource.sample_status) - self.assertEqual(resp.status, self.resource.sample_status) - self.assertEqual(resp.body, self.resource.sample_utf8) - self.assertEqual(body, [self.resource.sample_utf8]) - - def test_data(self): - body = self.simulate_request('/data') - resp = self.data_resource.resp - - content_length = int(self.srmock.headers_dict['content-length']) - self.assertEqual(content_length, len(self.resource.sample_utf8)) - - self.assertEqual(self.srmock.status, self.resource.sample_status) - self.assertEqual(resp.status, self.resource.sample_status) - self.assertEqual(resp.data, self.resource.sample_utf8) - self.assertEqual(body, [self.resource.sample_utf8]) + self.assertEqual(result.status, resource.sample_status) + self.assertEqual(resp.status, resource.sample_status) + self.assertEqual(get_body(resp), resource.sample_utf8) + self.assertEqual(result.data, resource.sample_utf8) def test_no_body_on_head(self): - body = self.simulate_request(self.test_route, method='HEAD') - self.assertEqual(body, []) - self.assertEqual(self.srmock.status, falcon.HTTP_200) + self.api.add_route('/body', HelloResource('body')) + result = self.simulate_head('/body') - def test_stream_chunked(self): - src = self.simulate_request('/chunked-stream') + self.assertFalse(result.data) + self.assertEqual(result.status_code, 200) - dest = io.BytesIO() - for chunk in src: - dest.write(chunk) + def test_stream_chunked(self): + resource = HelloResource('stream') + self.api.add_route('/chunked-stream', resource) - self.assertEqual(dest.getvalue(), self.chunked_resource.sample_utf8) + result = self.simulate_get('/chunked-stream') - for header in self.srmock.headers: - self.assertNotEqual(header[0].lower(), 'content-length') + self.assertEqual(result.data, resource.sample_utf8) + self.assertNotIn('content-length', result.headers) def test_stream_known_len(self): - src = self.simulate_request('/stream') - self.assertTrue(self.stream_resource.called) + resource = HelloResource('stream, stream_len') + self.api.add_route('/stream', resource) - dest = io.BytesIO() - for chunk in src: - dest.write(chunk) + result = self.simulate_get('/stream') + self.assertTrue(resource.called) - expected_len = self.stream_resource.resp.stream_len - content_length = ('content-length', str(expected_len)) - self.assertThat(self.srmock.headers, Contains(content_length)) - self.assertEqual(dest.tell(), expected_len) - - self.assertEqual(dest.getvalue(), - self.chunked_resource.sample_utf8) + expected_len = resource.resp.stream_len + actual_len = int(result.headers['content-length']) + self.assertEqual(actual_len, expected_len) + self.assertEqual(len(result.data), expected_len) + self.assertEqual(result.data, resource.sample_utf8) def test_filelike(self): - for file_wrapper in (None, FileWrapper): - url = '/filelike' - - src = self.simulate_request(url, file_wrapper=file_wrapper) - self.assertTrue(self.filelike_resource.called) + resource = HelloResource('stream, stream_len, filelike') + self.api.add_route('/filelike', resource) - dest = io.BytesIO() - for chunk in src: - dest.write(chunk) + for file_wrapper in (None, FileWrapper): + result = self.simulate_get('/filelike', file_wrapper=file_wrapper) + self.assertTrue(resource.called) - expected_len = self.filelike_resource.resp.stream_len - content_length = ('content-length', str(expected_len)) - self.assertThat(self.srmock.headers, Contains(content_length)) - self.assertEqual(dest.tell(), expected_len) + expected_len = resource.resp.stream_len + actual_len = int(result.headers['content-length']) + self.assertEqual(actual_len, expected_len) + self.assertEqual(len(result.data), expected_len) def test_filelike_using_helper(self): - src = self.simulate_request('/filelike-helper') - self.assertTrue(self.filelike_helper_resource.called) + resource = HelloResource('stream, stream_len, filelike, use_helper') + self.api.add_route('/filelike-helper', resource) - dest = io.BytesIO() - for chunk in src: - dest.write(chunk) + result = self.simulate_get('/filelike-helper') + self.assertTrue(resource.called) - expected_len = self.filelike_helper_resource.resp.stream_len - content_length = ('content-length', str(expected_len)) - self.assertThat(self.srmock.headers, Contains(content_length)) - self.assertEqual(dest.tell(), expected_len) + expected_len = resource.resp.stream_len + actual_len = int(result.headers['content-length']) + self.assertEqual(actual_len, expected_len) + self.assertEqual(len(result.data), expected_len) def test_status_not_set(self): - body = self.simulate_request('/nostatus') + self.api.add_route('/nostatus', NoStatusResource()) + + result = self.simulate_get('/nostatus') - self.assertEqual(body, []) - self.assertEqual(self.srmock.status, falcon.HTTP_200) + self.assertFalse(result.data) + self.assertEqual(result.status_code, 200) diff --git a/tests/test_utils.py b/tests/test_utils.py index b082c38..9ba7e0c 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -3,6 +3,7 @@ from datetime import datetime import functools import io +import json import random import sys @@ -10,7 +11,7 @@ import testtools import six import falcon -import falcon.testing +from falcon import testing from falcon import util from falcon.util import uri @@ -297,25 +298,25 @@ class TestFalconUtils(testtools.TestCase): ('falcon.example.com', 42)) -class TestFalconTesting(falcon.testing.TestBase): +class TestFalconTesting(testing.TestBase): """Catch some uncommon branches not covered elsewhere.""" def test_path_escape_chars_in_create_environ(self): - env = falcon.testing.create_environ('/hello%20world%21') + env = testing.create_environ('/hello%20world%21') self.assertEqual(env['PATH_INFO'], '/hello world!') def test_unicode_path_in_create_environ(self): if six.PY3: self.skip('Test does not apply to Py3K') - env = falcon.testing.create_environ(u'/fancy/unícode') + env = testing.create_environ(u'/fancy/unícode') self.assertEqual(env['PATH_INFO'], '/fancy/un\xc3\xadcode') - env = falcon.testing.create_environ(u'/simple') + env = testing.create_environ(u'/simple') self.assertEqual(env['PATH_INFO'], '/simple') def test_none_header_value_in_create_environ(self): - env = falcon.testing.create_environ('/', headers={'X-Foo': None}) + env = testing.create_environ('/', headers={'X-Foo': None}) self.assertEqual(env['HTTP_X_FOO'], '') def test_decode_empty_result(self): @@ -323,4 +324,69 @@ class TestFalconTesting(falcon.testing.TestBase): self.assertEqual(body, '') def test_httpnow_alias_for_backwards_compat(self): - self.assertIs(falcon.testing.httpnow, util.http_now) + self.assertIs(testing.httpnow, util.http_now) + + +class TestFalconTestCase(testing.TestCase): + """Verify some branches not covered elsewhere.""" + + def test_status(self): + resource = testing.SimpleTestResource(status=falcon.HTTP_702) + self.api.add_route('/', resource) + + result = self.simulate_get() + self.assertEqual(result.status, falcon.HTTP_702) + + def test_wsgi_iterable_not_closeable(self): + result = testing.Result([], falcon.HTTP_200, []) + self.assertFalse(result.data) + + def test_path_must_start_with_slash(self): + self.assertRaises(ValueError, self.simulate_get, 'foo') + + def test_cached_text_in_result(self): + self.api.add_route('/', testing.SimpleTestResource(body='test')) + + result = self.simulate_get() + self.assertEqual(result.text, result.text) + + def test_simple_resource_body_json_xor(self): + self.assertRaises( + ValueError, + testing.SimpleTestResource, + body='', + json={}, + ) + + def test_query_string(self): + class SomeResource(object): + def on_get(self, req, resp): + doc = {} + + doc['oid'] = req.get_param_as_int('oid') + doc['detailed'] = req.get_param_as_bool('detailed') + + resp.body = json.dumps(doc) + + self.api.add_route('/', SomeResource()) + + result = self.simulate_get(query_string='oid=42&detailed=no') + self.assertEqual(result.json['oid'], 42) + self.assertFalse(result.json['detailed']) + + def test_query_string_no_question(self): + self.assertRaises(ValueError, self.simulate_get, query_string='?x=1') + + def test_query_string_in_path(self): + self.assertRaises(ValueError, self.simulate_get, path='/thing?x=1') + + +class FancyAPI(falcon.API): + pass + + +class FancyTestCase(testing.TestCase): + api_class = FancyAPI + + def test_something(self): + self.assertTrue(isinstance(self.api, FancyAPI))
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_hyperlinks", "has_many_modified_files", "has_many_hunks", "has_pytest_match_arg" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 1, "test_score": 1 }, "num_modified_files": 10 }
0.3
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "nose", "coverage", "ddt", "pyyaml", "requests", "testtools", "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.9", "reqs_path": [ "tools/test-requires" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
certifi==2025.1.31 charset-normalizer==3.4.1 coverage==7.8.0 ddt==1.7.2 exceptiongroup==1.2.2 -e git+https://github.com/falconry/falcon.git@4f5d704c6f2ffa168846641afb8acad1101ee394#egg=falcon idna==3.10 iniconfig==2.1.0 nose==1.3.7 packaging==24.2 pluggy==1.5.0 pytest==8.3.5 python-mimeparse==2.0.0 PyYAML==6.0.2 requests==2.32.3 six==1.17.0 testtools==2.7.2 tomli==2.2.1 urllib3==2.3.0
name: falcon channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - certifi==2025.1.31 - charset-normalizer==3.4.1 - coverage==7.8.0 - ddt==1.7.2 - exceptiongroup==1.2.2 - idna==3.10 - iniconfig==2.1.0 - nose==1.3.7 - packaging==24.2 - pluggy==1.5.0 - pytest==8.3.5 - python-mimeparse==2.0.0 - pyyaml==6.0.2 - requests==2.32.3 - six==1.17.0 - testtools==2.7.2 - tomli==2.2.1 - urllib3==2.3.0 prefix: /opt/conda/envs/falcon
[ "tests/test_after_hooks.py::TestHooks::test_output_validator", "tests/test_after_hooks.py::TestHooks::test_wrapped_resource", "tests/test_after_hooks.py::TestHooks::test_wrapped_resource_with_hooks_aware_of_resource", "tests/test_after_hooks.py::TestGlobalHooks::test_global_after_hooks_run_after_exception", "tests/test_after_hooks.py::TestGlobalHooks::test_global_hook_wrap_default_405", "tests/test_after_hooks.py::TestGlobalHooks::test_multiple_global_hooks_wrap_default_405", "tests/test_error_handlers.py::TestErrorHandler::test_converted_error", "tests/test_headers.py::TestHeaders::test_content_type_no_body", "tests/test_headers.py::TestHeaders::test_headers_as_list", "tests/test_headers.py::TestHeaders::test_response_append_header", "tests/test_headers.py::TestHeaders::test_unicode_location_headers", "tests/test_hello.py::TestHelloWorld::test_no_route", "tests/test_hello.py::TestHelloWorld::test_status_not_set", "tests/test_utils.py::TestFalconTestCase::test_status" ]
[ "tests/test_utils.py::TestFalconUtils::test_deprecated_decorator" ]
[ "tests/test_after_hooks.py::TestHooks::test_hook_as_callable_class", "tests/test_after_hooks.py::TestHooks::test_serializer", "tests/test_after_hooks.py::TestGlobalHooks::test_customized_options", "tests/test_after_hooks.py::TestGlobalHooks::test_global_hook", "tests/test_after_hooks.py::TestGlobalHooks::test_global_hook_is_resource_aware", "tests/test_after_hooks.py::TestGlobalHooks::test_global_hook_wrap_default_on_options", "tests/test_after_hooks.py::TestGlobalHooks::test_invalid_type", "tests/test_after_hooks.py::TestGlobalHooks::test_multiple_global_hook", "tests/test_after_hooks.py::TestGlobalHooks::test_multiple_global_hooks_wrap_default_on_options", "tests/test_error_handlers.py::TestErrorHandler::test_caught_error", "tests/test_error_handlers.py::TestErrorHandler::test_error_order", "tests/test_error_handlers.py::TestErrorHandler::test_handle_not_defined", "tests/test_error_handlers.py::TestErrorHandler::test_subclass_error", "tests/test_error_handlers.py::TestErrorHandler::test_uncaught_error", "tests/test_error_handlers.py::TestErrorHandler::test_uncaught_error_else", "tests/test_headers.py::TestHeaders::test_add_link_complex", "tests/test_headers.py::TestHeaders::test_add_link_multiple", "tests/test_headers.py::TestHeaders::test_add_link_single", "tests/test_headers.py::TestHeaders::test_add_link_with_anchor", "tests/test_headers.py::TestHeaders::test_add_link_with_hreflang", "tests/test_headers.py::TestHeaders::test_add_link_with_hreflang_multi", "tests/test_headers.py::TestHeaders::test_add_link_with_title", "tests/test_headers.py::TestHeaders::test_add_link_with_title_star", "tests/test_headers.py::TestHeaders::test_add_link_with_type_hint", "tests/test_headers.py::TestHeaders::test_content_header_missing", "tests/test_headers.py::TestHeaders::test_content_length", "tests/test_headers.py::TestHeaders::test_custom_content_type", "tests/test_headers.py::TestHeaders::test_default_media_type", "tests/test_headers.py::TestHeaders::test_default_value", "tests/test_headers.py::TestHeaders::test_no_content_length_1_204_No_Content", "tests/test_headers.py::TestHeaders::test_no_content_length_2_304_Not_Modified", "tests/test_headers.py::TestHeaders::test_no_content_type_1_204_No_Content", "tests/test_headers.py::TestHeaders::test_no_content_type_2_304_Not_Modified", "tests/test_headers.py::TestHeaders::test_override_default_media_type_1___text_plain__charset_UTF_8____Hello_Unicode_____", "tests/test_headers.py::TestHeaders::test_override_default_media_type_2___text_plain____Hello_ISO_8859_1___", "tests/test_headers.py::TestHeaders::test_override_default_media_type_missing_encoding", "tests/test_headers.py::TestHeaders::test_passthrough_request_headers", "tests/test_headers.py::TestHeaders::test_required_header", "tests/test_headers.py::TestHeaders::test_response_header_helpers_on_get", "tests/test_headers.py::TestHeaders::test_response_set_and_get_header", "tests/test_headers.py::TestHeaders::test_unicode_headers", "tests/test_headers.py::TestHeaders::test_vary_header_1____accept_encoding_____accept_encoding__", "tests/test_headers.py::TestHeaders::test_vary_header_2____accept_encoding____x_auth_token_____accept_encoding__x_auth_token__", "tests/test_headers.py::TestHeaders::test_vary_header_3____accept_encoding____x_auth_token_____accept_encoding__x_auth_token__", "tests/test_headers.py::TestHeaders::test_vary_star", "tests/test_hello.py::TestHelloWorld::test_body_1", "tests/test_hello.py::TestHelloWorld::test_body_2", "tests/test_hello.py::TestHelloWorld::test_body_3", "tests/test_hello.py::TestHelloWorld::test_env_headers_list_of_tuples", "tests/test_hello.py::TestHelloWorld::test_filelike", "tests/test_hello.py::TestHelloWorld::test_filelike_using_helper", "tests/test_hello.py::TestHelloWorld::test_no_body_on_head", "tests/test_hello.py::TestHelloWorld::test_root_route", "tests/test_hello.py::TestHelloWorld::test_stream_chunked", "tests/test_hello.py::TestHelloWorld::test_stream_known_len", "tests/test_utils.py::TestFalconUtils::test_dt_to_http", "tests/test_utils.py::TestFalconUtils::test_http_date_to_dt", "tests/test_utils.py::TestFalconUtils::test_http_now", "tests/test_utils.py::TestFalconUtils::test_pack_query_params_none", "tests/test_utils.py::TestFalconUtils::test_pack_query_params_one", "tests/test_utils.py::TestFalconUtils::test_pack_query_params_several", "tests/test_utils.py::TestFalconUtils::test_parse_host", "tests/test_utils.py::TestFalconUtils::test_parse_query_string", "tests/test_utils.py::TestFalconUtils::test_prop_uri_decode_models_stdlib_unquote_plus", "tests/test_utils.py::TestFalconUtils::test_prop_uri_encode_models_stdlib_quote", "tests/test_utils.py::TestFalconUtils::test_prop_uri_encode_value_models_stdlib_quote_safe_tilde", "tests/test_utils.py::TestFalconUtils::test_uri_decode", "tests/test_utils.py::TestFalconUtils::test_uri_encode", "tests/test_utils.py::TestFalconUtils::test_uri_encode_value", "tests/test_utils.py::TestFalconTesting::test_decode_empty_result", "tests/test_utils.py::TestFalconTesting::test_httpnow_alias_for_backwards_compat", "tests/test_utils.py::TestFalconTesting::test_none_header_value_in_create_environ", "tests/test_utils.py::TestFalconTesting::test_path_escape_chars_in_create_environ", "tests/test_utils.py::TestFalconTestCase::test_cached_text_in_result", "tests/test_utils.py::TestFalconTestCase::test_path_must_start_with_slash", "tests/test_utils.py::TestFalconTestCase::test_query_string", "tests/test_utils.py::TestFalconTestCase::test_query_string_in_path", "tests/test_utils.py::TestFalconTestCase::test_query_string_no_question", "tests/test_utils.py::TestFalconTestCase::test_simple_resource_body_json_xor", "tests/test_utils.py::TestFalconTestCase::test_wsgi_iterable_not_closeable", "tests/test_utils.py::FancyTestCase::test_something" ]
[]
Apache License 2.0
404
ifosch__accloudtant-52
71d752b9244ff84978a83413be84035a92d5c077
2016-01-30 11:16:12
71d752b9244ff84978a83413be84035a92d5c077
diff --git a/accloudtant/aws/reports.py b/accloudtant/aws/reports.py index 26af2d1..905018e 100644 --- a/accloudtant/aws/reports.py +++ b/accloudtant/aws/reports.py @@ -16,10 +16,13 @@ class Reports(object): def find_reserved_instance(self): for instance in self.instances: - instance.current = float(self.prices.prices[instance.key][instance.region][instance.size]['od']) + instance_region = self.prices.prices[instance.key][instance.region] + instance_size = instance_region[instance.size] + instance.current = float(instance_size['od']) if instance.state == 'stopped': instance.current = 0.0 - instance.best = float(self.prices.prices[instance.key][instance.region][instance.size]['ri']['yrTerm3']['allUpfront']['effectiveHourly']) + instance_allUpfront = instance_size['ri']['yrTerm3']['allUpfront'] + instance.best = float(instance_allUpfront['effectiveHourly']) for reserved in self.reserved_instances['ReservedInstances']: if 'InstancesLeft' not in reserved.keys(): reserved['InstancesLeft'] = reserved['InstanceCount'] @@ -39,6 +42,8 @@ class Reports(object): 'State', 'Launch time', 'Reserved', + 'Current hourly price', + 'Renewed hourly price', ] table = [] for instance in self.instances: @@ -51,6 +56,8 @@ class Reports(object): instance.state, instance.launch_time.strftime('%Y-%m-%d %H:%M:%S'), instance.reserved, + instance.current, + instance.best, ] table.append(row) return tabulate(table, headers)
Add price information to EC2 instances information <!--- @huboard:{"order":6.0732421875,"milestone_order":0.09375,"custom_state":""} -->
ifosch/accloudtant
diff --git a/tests/aws/report_expected.txt b/tests/aws/report_expected.txt index 928efa2..f2bbb8b 100644 --- a/tests/aws/report_expected.txt +++ b/tests/aws/report_expected.txt @@ -1,9 +1,9 @@ -Id Name Type AZ OS State Launch time Reserved ----------- --------- ---------- ---------- ------------------------ ------- ------------------- ---------- -i-912a4392 web1 c3.8xlarge us-east-1c Windows running 2015-10-22 14:15:10 Yes -i-1840273e app1 r2.8xlarge us-east-1b Red Hat Enterprise Linux running 2015-10-22 14:15:10 Yes -i-9840273d app2 r2.8xlarge us-east-1c SUSE Linux running 2015-10-22 14:15:10 Yes -i-1840273d database1 r2.8xlarge us-east-1c Linux/UNIX stopped 2015-10-22 14:15:10 No -i-1840273c database2 r2.8xlarge us-east-1c Linux/UNIX running 2015-10-22 14:15:10 Yes -i-1840273b database3 r2.8xlarge us-east-1c Linux/UNIX running 2015-10-22 14:15:10 Yes -i-912a4393 test t1.micro us-east-1c Linux/UNIX running 2015-10-22 14:15:10 No +Id Name Type AZ OS State Launch time Reserved Current hourly price Renewed hourly price +---------- --------- ---------- ---------- ------------------------ ------- ------------------- ---------- ---------------------- ---------------------- +i-912a4392 web1 c3.8xlarge us-east-1c Windows running 2015-10-22 14:15:10 Yes 0.5121 0.3894 +i-1840273e app1 r2.8xlarge us-east-1b Red Hat Enterprise Linux running 2015-10-22 14:15:10 Yes 0.3894 0.3794 +i-9840273d app2 r2.8xlarge us-east-1c SUSE Linux running 2015-10-22 14:15:10 Yes 0.5225 0.389 +i-1840273d database1 r2.8xlarge us-east-1c Linux/UNIX stopped 2015-10-22 14:15:10 No 0 0.379 +i-1840273c database2 r2.8xlarge us-east-1c Linux/UNIX running 2015-10-22 14:15:10 Yes 0.611 0.379 +i-1840273b database3 r2.8xlarge us-east-1c Linux/UNIX running 2015-10-22 14:15:10 Yes 0.611 0.379 +i-912a4393 test t1.micro us-east-1c Linux/UNIX running 2015-10-22 14:15:10 No 0.767 0.3892
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 3, "test_score": 2 }, "num_modified_files": 1 }
unknown
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.9", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
-e git+https://github.com/ifosch/accloudtant.git@71d752b9244ff84978a83413be84035a92d5c077#egg=accloudtant boto3==1.37.23 botocore==1.37.23 certifi==2025.1.31 charset-normalizer==3.4.1 click==8.1.8 exceptiongroup==1.2.2 idna==3.10 iniconfig==2.1.0 jmespath==1.0.1 packaging==24.2 pluggy==1.5.0 pytest==8.3.5 python-dateutil==2.9.0.post0 requests==2.32.3 s3transfer==0.11.4 six==1.17.0 tabulate==0.9.0 tomli==2.2.1 urllib3==1.26.20
name: accloudtant channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - boto3==1.37.23 - botocore==1.37.23 - certifi==2025.1.31 - charset-normalizer==3.4.1 - click==8.1.8 - exceptiongroup==1.2.2 - idna==3.10 - iniconfig==2.1.0 - jmespath==1.0.1 - packaging==24.2 - pluggy==1.5.0 - pytest==8.3.5 - python-dateutil==2.9.0.post0 - requests==2.32.3 - s3transfer==0.11.4 - six==1.17.0 - tabulate==0.9.0 - tomli==2.2.1 - urllib3==1.26.20 prefix: /opt/conda/envs/accloudtant
[ "tests/aws/test_reports.py::test_reports" ]
[]
[ "tests/aws/test_instance.py::test_instance", "tests/aws/test_instance.py::test_guess_os", "tests/aws/test_instance.py::test_match_reserved_instance", "tests/aws/test_prices.py::test_process_ec2", "tests/aws/test_prices.py::test_process_model", "tests/aws/test_prices.py::test_process_generic", "tests/aws/test_prices.py::test_process_on_demand", "tests/aws/test_prices.py::test_process_reserved", "tests/aws/test_prices.py::test_process_data_transfer", "tests/aws/test_prices.py::test_process_ebs", "tests/aws/test_prices.py::test_process_eip", "tests/aws/test_prices.py::test_process_cw", "tests/aws/test_prices.py::test_process_elb", "tests/aws/test_prices.py::test_print_prices", "tests/aws/test_prices.py::test_prices", "tests/aws/test_prices.py::test_prices_with_warning", "tests/test_utils.py::test_fix_lazy_json" ]
[]
null
405
sympy__sympy-10499
8b01598119b063766667cfdb643452680a5356cd
2016-01-30 23:11:54
0241b35cae5f2adc04dc37b25f7dc9c5f00bd746
diff --git a/setup.py b/setup.py index d171a6301a..9a8755d543 100755 --- a/setup.py +++ b/setup.py @@ -292,7 +292,6 @@ def run(self): 'sympy.polys.tests', 'sympy.printing.pretty.tests', 'sympy.printing.tests', - 'sympy.sandbox.tests', 'sympy.series.tests', 'sympy.sets.tests', 'sympy.simplify.tests', @@ -300,12 +299,11 @@ def run(self): 'sympy.stats.tests', 'sympy.strategies.branch.tests', 'sympy.strategies.tests', - 'sympy.tensor.array.tests', 'sympy.tensor.tests', 'sympy.unify.tests', 'sympy.utilities.tests', 'sympy.vector.tests', -] + ] long_description = '''SymPy is a Python library for symbolic mathematics. It aims to become a full-featured computer algebra system (CAS) while keeping the code diff --git a/sympy/functions/combinatorial/factorials.py b/sympy/functions/combinatorial/factorials.py index 48ea175555..ea9e5a5ec0 100644 --- a/sympy/functions/combinatorial/factorials.py +++ b/sympy/functions/combinatorial/factorials.py @@ -330,8 +330,8 @@ def eval(cls, arg): # TODO: extend this to complex numbers? if arg.is_Number: - if not arg.is_Integer: - raise ValueError("argument must be nonnegative integer or negative odd integer") + if arg.is_infinite: + return # This implementation is faster than the recursive one # It also avoids "maximum recursion depth exceeded" runtime error @@ -339,13 +339,13 @@ def eval(cls, arg): if arg.is_even: k = arg / 2 return 2 ** k * factorial(k) - return factorial(arg) / factorial2(arg - 1) + return factorial(arg) / factorial2(arg - 1) - if arg.is_odd: - return arg * (S.NegativeOne) ** ((1 - arg) / 2) / factorial2(-arg) - raise ValueError("argument must be nonnegative integer or negative odd integer") + if arg.is_even: + raise ValueError("argument must be nonnegative or odd") + return arg * (S.NegativeOne) ** ((1 - arg) / 2) / factorial2(-arg) def _eval_is_even(self): # Double factorial is even for every positive even input diff --git a/sympy/printing/lambdarepr.py b/sympy/printing/lambdarepr.py index bc8756a1f0..e0d80fbcc2 100644 --- a/sympy/printing/lambdarepr.py +++ b/sympy/printing/lambdarepr.py @@ -244,7 +244,7 @@ def blacklisted(self, expr): def doprint(self, expr): lstr = super(NumExprPrinter, self).doprint(expr) - return "evaluate('%s')" % lstr + return "evaluate('%s', truediv=True)" % lstr def lambdarepr(expr, **settings): """ diff --git a/sympy/utilities/lambdify.py b/sympy/utilities/lambdify.py index aae43e458e..f282738ff5 100644 --- a/sympy/utilities/lambdify.py +++ b/sympy/utilities/lambdify.py @@ -31,7 +31,6 @@ # Mappings between sympy and other modules function names. MATH_TRANSLATIONS = { - "Abs": "fabs", "ceiling": "ceil", "E": "e", "ln": "log", @@ -65,7 +64,6 @@ } NUMPY_TRANSLATIONS = { - "Abs": "abs", "acos": "arccos", "acosh": "arccosh", "arg": "angle", @@ -151,10 +149,20 @@ def _import(module, reload="False"): for sympyname, translation in translations.items(): namespace[sympyname] = namespace[translation] + # For computing the modulus of a sympy expression we use the builtin abs + # function, instead of the previously used fabs function for all + # translation modules. This is because the fabs function in the math + # module does not accept complex valued arguments. (see issue 9474). The + # only exception, where we don't use the builtin abs function is the + # mpmath translation module, because mpmath.fabs returns mpf objects in + # contrast to abs(). + if 'Abs' not in namespace: + namespace['Abs'] = abs + @doctest_depends_on(modules=('numpy')) def lambdify(args, expr, modules=None, printer=None, use_imps=True, - dummify=True): + dummify=True): """ Returns a lambda function for fast calculation of numerical values.
lambdify throws error with abs This is from #7767 and there is a potential fix in #7779. Just found a bug in lambdify ```py k = symbols('k', complex=True) lambdify([k], abs(k**2))(10.01+0.1j) ``` throws ``` --------------------------------------------------------------------------- TypeError Traceback (most recent call last) <ipython-input-144-4ad0dd2e940e> in <module>() 1 k = symbols('k', complex=True) ----> 2 lambdify([k], abs(k**2))(10.01+0.1j) /usr/lib64/python2.7/site-packages/numpy/__init__.pyc in <lambda>(_Dummy_82) TypeError: can't convert complex to float ``` using ``sympy.Abs`` leads to the same exception.
sympy/sympy
diff --git a/sympy/functions/combinatorial/tests/test_comb_factorials.py b/sympy/functions/combinatorial/tests/test_comb_factorials.py index 1886a053ba..9b54ea1019 100644 --- a/sympy/functions/combinatorial/tests/test_comb_factorials.py +++ b/sympy/functions/combinatorial/tests/test_comb_factorials.py @@ -3,9 +3,8 @@ oo, zoo, simplify, expand_func, Product, I, Piecewise, Mod, Eq, sqrt) from sympy.functions.combinatorial.factorials import subfactorial from sympy.functions.special.gamma_functions import uppergamma -from sympy.utilities.pytest import XFAIL, raises +from sympy.utilities.pytest import XFAIL -#Solves and Fixes Issue #10388 - This is the updated test for the same solved issue def test_rf_eval_apply(): x, y = symbols('x,y') @@ -195,9 +194,7 @@ def test_factorial2(): nt = Symbol('nt', nonnegative=True) nf = Symbol('nf', nonnegative=False) nn = Symbol('nn') - #Solves and Fixes Issue #10388 - This is the updated test for the same solved issue - raises (ValueError, lambda: factorial2(oo)) - raises (ValueError, lambda: factorial2(S(5)/2)) + assert factorial2(n).is_integer is None assert factorial2(tt - 1).is_integer assert factorial2(tte - 1).is_integer diff --git a/sympy/utilities/tests/test_lambdify.py b/sympy/utilities/tests/test_lambdify.py index 7953638841..199a160b08 100644 --- a/sympy/utilities/tests/test_lambdify.py +++ b/sympy/utilities/tests/test_lambdify.py @@ -1,15 +1,18 @@ +from itertools import product +import math + +import mpmath + from sympy.utilities.pytest import XFAIL, raises from sympy import ( symbols, lambdify, sqrt, sin, cos, tan, pi, acos, acosh, Rational, Float, Matrix, Lambda, Piecewise, exp, Integral, oo, I, Abs, Function, - true, false, And, Or, Not, ITE, Min, Max) + true, false, And, Or, Not, ITE, Min, Max, floor, diff) from sympy.printing.lambdarepr import LambdaPrinter -import mpmath from sympy.utilities.lambdify import implemented_function from sympy.utilities.pytest import skip from sympy.utilities.decorator import conserve_mpmath_dps from sympy.external import import_module -import math import sympy @@ -328,6 +331,49 @@ def test_numpy_old_matrix(): numpy.testing.assert_allclose(f(1, 2, 3), sol_arr) assert isinstance(f(1, 2, 3), numpy.matrix) + +def test_issue9474(): + mods = [None, 'math'] + if numpy: + mods.append('numpy') + if mpmath: + mods.append('mpmath') + for mod in mods: + f = lambdify(x, sympy.S(1)/x, modules=mod) + assert f(2) == 0.5 + f = lambdify(x, floor(sympy.S(1)/x), modules=mod) + assert f(2) == 0 + + if mpmath: + f = lambdify(x, sympy.S(1)/sympy.Abs(x), modules=['mpmath']) + assert isinstance(f(2), mpmath.mpf) + + for absfunc, modules in product([Abs, abs], mods): + f = lambdify(x, absfunc(x), modules=modules) + assert f(-1) == 1 + assert f(1) == 1 + assert f(3+4j) == 5 + + +def test_issue_9871(): + if not numexpr: + skip("numexpr not installed.") + if not numpy: + skip("numpy not installed.") + + r = sqrt(x**2 + y**2) + expr = diff(1/r, x) + + xn = yn = numpy.linspace(1, 10, 16) + # expr(xn, xn) = -xn/(sqrt(2)*xn)^3 + fv_exact = -numpy.sqrt(2.)**-3 * xn**-2 + + fv_numpy = lambdify((x, y), expr, modules='numpy')(xn, yn) + fv_numexpr = lambdify((x, y), expr, modules='numexpr')(xn, yn) + numpy.testing.assert_allclose(fv_numpy, fv_exact, rtol=1e-10) + numpy.testing.assert_allclose(fv_numexpr, fv_exact, rtol=1e-10) + + def test_numpy_piecewise(): if not numpy: skip("numpy not installed.")
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 1, "test_score": 3 }, "num_modified_files": 4 }
0.7
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "pytest" ], "pre_install": null, "python": "3.6", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work certifi==2021.5.30 importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work mpmath==1.3.0 packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work py @ file:///opt/conda/conda-bld/py_1644396412707/work pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work pytest==6.2.4 -e git+https://github.com/sympy/sympy.git@8b01598119b063766667cfdb643452680a5356cd#egg=sympy toml @ file:///tmp/build/80754af9/toml_1616166611790/work typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
name: sympy channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - attrs=21.4.0=pyhd3eb1b0_0 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - importlib-metadata=4.8.1=py36h06a4308_0 - importlib_metadata=4.8.1=hd3eb1b0_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - more-itertools=8.12.0=pyhd3eb1b0_0 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - packaging=21.3=pyhd3eb1b0_0 - pip=21.2.2=py36h06a4308_0 - pluggy=0.13.1=py36h06a4308_0 - py=1.11.0=pyhd3eb1b0_0 - pyparsing=3.0.4=pyhd3eb1b0_0 - pytest=6.2.4=py36h06a4308_2 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - toml=0.10.2=pyhd3eb1b0_0 - typing_extensions=4.1.1=pyh06a4308_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zipp=3.6.0=pyhd3eb1b0_0 - zlib=1.2.13=h5eee18b_1 - pip: - mpmath==1.3.0 prefix: /opt/conda/envs/sympy
[ "sympy/utilities/tests/test_lambdify.py::test_issue9474" ]
[]
[ "sympy/functions/combinatorial/tests/test_comb_factorials.py::test_rf_eval_apply", "sympy/functions/combinatorial/tests/test_comb_factorials.py::test_ff_eval_apply", "sympy/functions/combinatorial/tests/test_comb_factorials.py::test_factorial", "sympy/functions/combinatorial/tests/test_comb_factorials.py::test_factorial_diff", "sympy/functions/combinatorial/tests/test_comb_factorials.py::test_factorial_series", "sympy/functions/combinatorial/tests/test_comb_factorials.py::test_factorial_rewrite", "sympy/functions/combinatorial/tests/test_comb_factorials.py::test_factorial2", "sympy/functions/combinatorial/tests/test_comb_factorials.py::test_factorial2_rewrite", "sympy/functions/combinatorial/tests/test_comb_factorials.py::test_binomial", "sympy/functions/combinatorial/tests/test_comb_factorials.py::test_binomial_diff", "sympy/functions/combinatorial/tests/test_comb_factorials.py::test_binomial_rewrite", "sympy/functions/combinatorial/tests/test_comb_factorials.py::test_subfactorial", "sympy/utilities/tests/test_lambdify.py::test_no_args", "sympy/utilities/tests/test_lambdify.py::test_single_arg", "sympy/utilities/tests/test_lambdify.py::test_list_args", "sympy/utilities/tests/test_lambdify.py::test_str_args", "sympy/utilities/tests/test_lambdify.py::test_own_namespace", "sympy/utilities/tests/test_lambdify.py::test_own_module", "sympy/utilities/tests/test_lambdify.py::test_bad_args", "sympy/utilities/tests/test_lambdify.py::test_atoms", "sympy/utilities/tests/test_lambdify.py::test_sympy_lambda", "sympy/utilities/tests/test_lambdify.py::test_math_lambda", "sympy/utilities/tests/test_lambdify.py::test_mpmath_lambda", "sympy/utilities/tests/test_lambdify.py::test_math_transl", "sympy/utilities/tests/test_lambdify.py::test_mpmath_transl", "sympy/utilities/tests/test_lambdify.py::test_exponentiation", "sympy/utilities/tests/test_lambdify.py::test_sqrt", "sympy/utilities/tests/test_lambdify.py::test_trig", "sympy/utilities/tests/test_lambdify.py::test_vector_simple", "sympy/utilities/tests/test_lambdify.py::test_vector_discontinuous", "sympy/utilities/tests/test_lambdify.py::test_trig_symbolic", "sympy/utilities/tests/test_lambdify.py::test_trig_float", "sympy/utilities/tests/test_lambdify.py::test_docs", "sympy/utilities/tests/test_lambdify.py::test_math", "sympy/utilities/tests/test_lambdify.py::test_sin", "sympy/utilities/tests/test_lambdify.py::test_matrix", "sympy/utilities/tests/test_lambdify.py::test_integral", "sympy/utilities/tests/test_lambdify.py::test_sym_single_arg", "sympy/utilities/tests/test_lambdify.py::test_sym_list_args", "sympy/utilities/tests/test_lambdify.py::test_sym_integral", "sympy/utilities/tests/test_lambdify.py::test_namespace_order", "sympy/utilities/tests/test_lambdify.py::test_imps", "sympy/utilities/tests/test_lambdify.py::test_imps_wrong_args", "sympy/utilities/tests/test_lambdify.py::test_lambdify_imps", "sympy/utilities/tests/test_lambdify.py::test_dummification", "sympy/utilities/tests/test_lambdify.py::test_python_keywords", "sympy/utilities/tests/test_lambdify.py::test_lambdify_docstring", "sympy/utilities/tests/test_lambdify.py::test_special_printers", "sympy/utilities/tests/test_lambdify.py::test_true_false", "sympy/utilities/tests/test_lambdify.py::test_issue_2790", "sympy/utilities/tests/test_lambdify.py::test_ITE", "sympy/utilities/tests/test_lambdify.py::test_Min_Max" ]
[]
BSD
406
guykisel__inline-plz-28
59cb7f9721ca3390fa31c48583e786e3728e8f1a
2016-01-31 22:54:38
59cb7f9721ca3390fa31c48583e786e3728e8f1a
diff --git a/inlineplz/linters/__init__.py b/inlineplz/linters/__init__.py index f4b0a73..420508e 100644 --- a/inlineplz/linters/__init__.py +++ b/inlineplz/linters/__init__.py @@ -13,7 +13,7 @@ from inlineplz import parsers LINTERS = { 'prospector': { 'install': ['pip', 'install', 'prospector'], - 'run': ['prospector', '--zero-exit'], + 'run': ['prospector', '--zero-exit', '-o', 'json'], 'dotfiles': ['.prospector.yaml'], 'parser': parsers.ProspectorParser }, diff --git a/inlineplz/parsers/prospector.py b/inlineplz/parsers/prospector.py index 68acb07..8146c2a 100644 --- a/inlineplz/parsers/prospector.py +++ b/inlineplz/parsers/prospector.py @@ -1,43 +1,31 @@ # -*- coding: utf-8 -*- from __future__ import absolute_import +from collections import OrderedDict +import json + from inlineplz.parsers.base import ParserBase from inlineplz.message import Message class ProspectorParser(ParserBase): - """Parse default prospector output.""" + """Parse json prospector output.""" def parse(self, lint_data): messages = [] - current_message = None - current_filename = '' - current_line = '' - - messages_found = False - - for line in lint_data.split('\n'): - # check for message block - if not line.strip(): - continue - if not messages_found: - if line.strip() == 'Messages': - messages_found = True - continue - # check for end of message block - elif line.strip() == 'Check Information': - break - # new filename - if not line.startswith(' '): - current_filename = line.strip() - continue - # new line number - elif not line.startswith(' '): - current_line = int(line.replace(' Line: ', '').strip()) - current_message = Message(current_filename, current_line) - messages.append(current_message) - continue - # new content - current_message.comments.append(line.lstrip()) - + for msgdata in json.loads( + lint_data, + object_pairs_hook=OrderedDict + ).get('messages'): + msg = Message( + msgdata['location']['path'], + msgdata['location']['line'] + ) + msgbody = '{0}: {1} ({2})'.format( + msgdata['source'], + msgdata['message'], + msgdata['code'] + ) + msg.comments.append(msgbody) + messages.append(msg) return messages
refactor prospector parser to use json formatter
guykisel/inline-plz
diff --git a/tests/parsers/test_prospector.py b/tests/parsers/test_prospector.py index f673b81..fbba037 100644 --- a/tests/parsers/test_prospector.py +++ b/tests/parsers/test_prospector.py @@ -17,14 +17,12 @@ prospector_path = os.path.join( def test_prospector(): with open(prospector_path) as inputfile: messages = prospector.ProspectorParser().parse(inputfile.read()) - assert messages[0].content == '`pylint: syntax-error / invalid syntax`' - assert messages[0].line_number == 34 - assert messages[0].path == 'docs/conf.py' - assert messages[1].content == '`pylint: unused-import / Unused Message imported from message`' - assert messages[1].line_number == 4 - assert messages[1].path == 'inline-plz/parsers/base.py' - assert messages[9].content == ('```\npylint: misplaced-comparison-constant / Comparison ' - 'should be __name__ == \'__main__\' (col 3)\npylint: ' - 'pretend this is a real message\n```') - assert messages[9].line_number == 113 - assert len(messages) == 11 + assert messages[0].content == '`pep257: Missing docstring in public package (D104)`' + assert messages[0].line_number == 1 + assert messages[0].path == 'inlineplz/util/__init__.py' + assert messages[1].content == '`pep257: Missing docstring in public package (D104)`' + assert messages[1].line_number == 1 + assert messages[1].path == 'inlineplz/parsers/__init__.py' + assert messages[9].content == ('`pep257: One-line docstring should fit on one line with quotes (found 2) (D200)`') + assert messages[9].line_number == 1 + assert len(messages) == 32 diff --git a/tests/testdata/parsers/prospector.txt b/tests/testdata/parsers/prospector.txt index 7c9ed99..c6ec9f7 100644 --- a/tests/testdata/parsers/prospector.txt +++ b/tests/testdata/parsers/prospector.txt @@ -1,48 +1,407 @@ -Messages -======== - -docs\conf.py - Line: 34 - pylint: syntax-error / invalid syntax - -inline-plz\parsers\base.py - Line: 4 - pylint: unused-import / Unused Message imported from message - Line: 9 - pylint: redefined-builtin / Redefining built-in 'input' (col 20) - -inline-plz\parsers\prospector.py - Line: 5 - pylint: unused-import / Unused Message imported from message - Line: 8 - pylint: redefined-builtin / Redefining built-in 'input' (col 20) - Line: 17 - pylint: undefined-variable / Undefined variable 'message' (col 32) - Line: 24 - pylint: undefined-variable / Undefined variable 'message' (col 32) - Line: 25 - pylint: redefined-variable-type / Redefinition of current_line type from str to int (col 16) - -travis_pypi_setup.py - Line: 20 - pylint: bare-except / No exception type(s) specified - Line: 113 - pylint: misplaced-comparison-constant / Comparison should be __name__ == '__main__' (col 3) - pylint: pretend this is a real message - Line: 114 - pylint: wrong-import-position / Import "import argparse" should be placed at the top of the module (col 4) - - - -Check Information -================= - Started: 2016-01-09 12:50:17.649090 - Finished: 2016-01-09 12:50:19.027461 - Time Taken: 1.38 seconds - Formatter: grouped - Profiles: default, no_doc_warnings, no_test_warnings, strictness_medium, strictness_high, strictness_veryhigh, no_member_warnings - Strictness: None - Libraries Used: - Tools Run: dodgy, mccabe, pep8, profile-validator, pyflakes, pylint - Messages Found: 11 - +{ + "messages": [ + { + "source": "pep257", + "location": { + "character": 0, + "line": 1, + "path": "inlineplz\\util\\__init__.py", + "module": null, + "function": null + }, + "code": "D104", + "message": "Missing docstring in public package" + }, + { + "source": "pep257", + "location": { + "character": 0, + "line": 1, + "path": "inlineplz\\parsers\\__init__.py", + "module": null, + "function": null + }, + "code": "D104", + "message": "Missing docstring in public package" + }, + { + "source": "pylint", + "location": { + "character": 0, + "line": 9, + "path": "inlineplz\\main.py", + "module": "inlineplz.main", + "function": null + }, + "code": "unused-import", + "message": "Unused parsers imported from inlineplz" + }, + { + "source": "pep257", + "location": { + "character": 0, + "line": 11, + "path": "inlineplz\\message.py", + "module": null, + "function": null + }, + "code": "D105", + "message": "Missing docstring in magic method" + }, + { + "source": "pylint", + "location": { + "character": 4, + "line": 8, + "path": "setup.py", + "module": "setup", + "function": null + }, + "code": "import-error", + "message": "Unable to import 'distutils.core'" + }, + { + "source": "pylint", + "location": { + "character": 0, + "line": 17, + "path": "setup.py", + "module": "setup", + "function": null + }, + "code": "invalid-name", + "message": "Invalid constant name \"requirements\"" + }, + { + "source": "pylint", + "location": { + "character": 0, + "line": 23, + "path": "setup.py", + "module": "setup", + "function": null + }, + "code": "invalid-name", + "message": "Invalid constant name \"test_requirements\"" + }, + { + "source": "pylint", + "location": { + "character": 4, + "line": 12, + "path": "setup.py", + "module": "setup", + "function": null + }, + "code": "invalid-name", + "message": "Invalid constant name \"readme\"" + }, + { + "source": "pylint", + "location": { + "character": 4, + "line": 15, + "path": "setup.py", + "module": "setup", + "function": null + }, + "code": "invalid-name", + "message": "Invalid constant name \"history\"" + }, + { + "source": "pep257", + "location": { + "character": 0, + "line": 1, + "path": "travis_pypi_setup.py", + "module": null, + "function": null + }, + "code": "D200", + "message": "One-line docstring should fit on one line with quotes (found 2)" + }, + { + "source": "pylint", + "location": { + "character": 4, + "line": 115, + "path": "travis_pypi_setup.py", + "module": "travis_pypi_setup", + "function": null + }, + "code": "invalid-name", + "message": "Invalid constant name \"parser\"" + }, + { + "source": "pep257", + "location": { + "character": 0, + "line": 69, + "path": "travis_pypi_setup.py", + "module": null, + "function": null + }, + "code": "D200", + "message": "One-line docstring should fit on one line with quotes (found 2)" + }, + { + "source": "pylint", + "location": { + "character": 27, + "line": 72, + "path": "travis_pypi_setup.py", + "module": "travis_pypi_setup", + "function": "prepend_line" + }, + "code": "invalid-name", + "message": "Invalid variable name \"f\"" + }, + { + "source": "pylint", + "location": { + "character": 9, + "line": 106, + "path": "travis_pypi_setup.py", + "module": "travis_pypi_setup", + "function": "main" + }, + "code": "redefined-outer-name", + "message": "Redefining name 'args' from outer scope (line 121)" + }, + { + "source": "pylint", + "location": { + "character": 32, + "line": 77, + "path": "travis_pypi_setup.py", + "module": "travis_pypi_setup", + "function": "prepend_line" + }, + "code": "invalid-name", + "message": "Invalid variable name \"f\"" + }, + { + "source": "pylint", + "location": { + "character": 3, + "line": 113, + "path": "travis_pypi_setup.py", + "module": "travis_pypi_setup", + "function": null + }, + "code": "misplaced-comparison-constant", + "message": "Comparison should be __name__ == '__main__'" + }, + { + "source": "pylint", + "location": { + "character": 27, + "line": 82, + "path": "travis_pypi_setup.py", + "module": "travis_pypi_setup", + "function": "load_yaml_config" + }, + "code": "invalid-name", + "message": "Invalid variable name \"f\"" + }, + { + "source": "pylint", + "location": { + "character": 4, + "line": 19, + "path": "travis_pypi_setup.py", + "module": "travis_pypi_setup", + "function": null + }, + "code": "wrong-import-order", + "message": "standard import \"from urllib import urlopen\" comes before \"import yaml\"" + }, + { + "source": "pylint", + "location": { + "character": 0, + "line": 20, + "path": "travis_pypi_setup.py", + "module": "travis_pypi_setup", + "function": null + }, + "code": "bare-except", + "message": "No exception type(s) specified" + }, + { + "source": "pylint", + "location": { + "character": 4, + "line": 21, + "path": "travis_pypi_setup.py", + "module": "travis_pypi_setup", + "function": null + }, + "code": "wrong-import-order", + "message": "standard import \"from urllib.request import urlopen\" comes before \"import yaml\"" + }, + { + "source": "pylint", + "location": { + "character": 32, + "line": 87, + "path": "travis_pypi_setup.py", + "module": "travis_pypi_setup", + "function": "save_yaml_config" + }, + "code": "invalid-name", + "message": "Invalid variable name \"f\"" + }, + { + "source": "pylint", + "location": { + "character": 4, + "line": 121, + "path": "travis_pypi_setup.py", + "module": "travis_pypi_setup", + "function": null + }, + "code": "invalid-name", + "message": "Invalid constant name \"args\"" + }, + { + "source": "pylint", + "location": { + "character": 4, + "line": 114, + "path": "travis_pypi_setup.py", + "module": "travis_pypi_setup", + "function": null + }, + "code": "wrong-import-order", + "message": "standard import \"import argparse\" comes before \"import yaml\"" + }, + { + "source": "pylint", + "location": { + "character": 4, + "line": 114, + "path": "travis_pypi_setup.py", + "module": "travis_pypi_setup", + "function": null + }, + "code": "wrong-import-position", + "message": "Import \"import argparse\" should be placed at the top of the module" + }, + { + "source": "pep257", + "location": { + "character": 0, + "line": 1, + "path": "inlineplz\\interfaces\\__init__.py", + "module": null, + "function": null + }, + "code": "D104", + "message": "Missing docstring in public package" + }, + { + "source": "pylint", + "location": { + "character": 16, + "line": 36, + "path": "inlineplz\\parsers\\prospector.py", + "module": "inlineplz.parsers.prospector", + "function": "ProspectorParser.parse" + }, + "code": "redefined-variable-type", + "message": "Redefinition of current_line type from str to int" + }, + { + "source": "pylint", + "location": { + "character": 12, + "line": 16, + "path": "inlineplz\\interfaces\\github.py", + "module": "inlineplz.interfaces.github", + "function": "GitHubInterface.__init__" + }, + "code": "invalid-name", + "message": "Invalid attribute name \"gh\"" + }, + { + "source": "pylint", + "location": { + "character": 12, + "line": 18, + "path": "inlineplz\\interfaces\\github.py", + "module": "inlineplz.interfaces.github", + "function": "GitHubInterface.__init__" + }, + "code": "redefined-variable-type", + "message": "Redefinition of self.gh type from github3.github.GitHub to github3.github.GitHubEnterprise" + }, + { + "source": "pylint", + "location": { + "character": 0, + "line": 4, + "path": "inlineplz\\interfaces\\github.py", + "module": "inlineplz.interfaces.github", + "function": null + }, + "code": "unused-import", + "message": "Unused import os.path" + }, + { + "source": "pylint", + "location": { + "character": 20, + "line": 58, + "path": "inlineplz\\interfaces\\github.py", + "module": "inlineplz.interfaces.github", + "function": "GitHubInterface.position" + }, + "code": "unused-variable", + "message": "Unused variable 'hunk_no'" + }, + { + "source": "pylint", + "location": { + "character": 4, + "line": 14, + "path": "inlineplz\\interfaces\\github.py", + "module": "inlineplz.interfaces.github", + "function": "GitHubInterface.__init__" + }, + "code": "too-many-arguments", + "message": "Too many arguments (6/5)" + }, + { + "source": "pep257", + "location": { + "character": 0, + "line": 1, + "path": "inlineplz\\__init__.py", + "module": null, + "function": null + }, + "code": "D104", + "message": "Missing docstring in public package" + } + ], + "summary": { + "strictness": "from profile", + "tools": [ + "dodgy", + "mccabe", + "pep257", + "pep8", + "profile-validator", + "pyflakes", + "pylint" + ], + "formatter": "json", + "started": "2016-01-31 14:43:39.317922", + "profiles": ".prospector.yaml, full_pep8, doc_warnings, no_test_warnings, strictness_veryhigh, no_member_warnings", + "time_taken": "3.94", + "completed": "2016-01-31 14:43:43.256803", + "libraries": [], + "message_count": 32 + } +}
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_many_modified_files" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 2, "test_score": 2 }, "num_modified_files": 2 }
unknown
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "numpy>=1.16.0", "pandas>=1.0.0", "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.9", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
certifi==2025.1.31 cffi==1.17.1 charset-normalizer==3.4.1 cryptography==44.0.2 exceptiongroup==1.2.2 github3.py==4.0.1 idna==3.10 iniconfig==2.1.0 -e git+https://github.com/guykisel/inline-plz.git@59cb7f9721ca3390fa31c48583e786e3728e8f1a#egg=inlineplz numpy==2.0.2 packaging==24.2 pandas==2.2.3 pluggy==1.5.0 pycparser==2.22 PyJWT==2.10.1 pytest==8.3.5 python-dateutil==2.9.0.post0 pytz==2025.2 requests==2.32.3 six==1.17.0 tomli==2.2.1 tzdata==2025.2 unidiff==0.7.5 uritemplate==4.1.1 urllib3==2.3.0 xmltodict==0.14.2
name: inline-plz channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - certifi==2025.1.31 - cffi==1.17.1 - charset-normalizer==3.4.1 - cryptography==44.0.2 - exceptiongroup==1.2.2 - github3-py==4.0.1 - idna==3.10 - iniconfig==2.1.0 - numpy==2.0.2 - packaging==24.2 - pandas==2.2.3 - pluggy==1.5.0 - pycparser==2.22 - pyjwt==2.10.1 - pytest==8.3.5 - python-dateutil==2.9.0.post0 - pytz==2025.2 - requests==2.32.3 - six==1.17.0 - tomli==2.2.1 - tzdata==2025.2 - unidiff==0.7.5 - uritemplate==4.1.1 - urllib3==2.3.0 - xmltodict==0.14.2 prefix: /opt/conda/envs/inline-plz
[ "tests/parsers/test_prospector.py::test_prospector" ]
[]
[]
[]
ISC License
407
guykisel__inline-plz-29
3cfa8f2c3f0fd814c105ca4d51f0727659a45fa8
2016-01-31 23:15:58
3cfa8f2c3f0fd814c105ca4d51f0727659a45fa8
diff --git a/inlineplz/interfaces/github.py b/inlineplz/interfaces/github.py index 005b8ff..4dd4397 100644 --- a/inlineplz/interfaces/github.py +++ b/inlineplz/interfaces/github.py @@ -26,14 +26,14 @@ class GitHubInterface(InterfaceBase): def post_messages(self, messages): messages_to_post = 0 for msg in messages: - if not msg.content: + if not msg.comments: continue msg_position = self.position(msg) if msg_position: messages_to_post += 1 if not self.is_duplicate(msg, msg_position): self.pull_request.create_review_comment( - msg.content, + self.format_message(msg), self.last_sha, msg.path, msg_position @@ -44,10 +44,22 @@ class GitHubInterface(InterfaceBase): for comment in self.pull_request.review_comments(): if (comment.position == position and comment.path == message.path and - comment.body.strip() == message.content.strip()): + comment.body.strip() == self.format_message(message).strip()): return True return False + @staticmethod + def format_message(message): + if not message.comments: + return '' + if len(message.comments) > 1: + return ( + '```\n' + + '\n'.join(sorted(list(message.comments))) + + '\n```' + ) + return '`{0}`'.format(list(message.comments)[0].strip()) + def position(self, message): """Calculate position within the PR, which is not the line number""" patch = unidiff.PatchSet(self.diff.split('\n')) diff --git a/inlineplz/linters/__init__.py b/inlineplz/linters/__init__.py index 420508e..458fd7c 100644 --- a/inlineplz/linters/__init__.py +++ b/inlineplz/linters/__init__.py @@ -9,6 +9,7 @@ import subprocess import traceback from inlineplz import parsers +from inlineplz import message LINTERS = { 'prospector': { @@ -44,7 +45,7 @@ LINTERS = { def lint(install=False): - messages = [] + messages = message.Messages() for config in LINTERS.values(): if any(dotfile in os.listdir(os.getcwd()) for dotfile in config.get('dotfiles')): @@ -52,7 +53,7 @@ def lint(install=False): if install and config.get('install'): subprocess.check_call(config.get('install')) output = subprocess.check_output(config.get('run')).decode('utf-8') - messages.extend(config.get('parser')().parse(output)) + messages.add_messages(config.get('parser')().parse(output)) except subprocess.CalledProcessError: traceback.print_exc() - return messages + return messages.get_messages() diff --git a/inlineplz/message.py b/inlineplz/message.py index 61011a4..da2c722 100644 --- a/inlineplz/message.py +++ b/inlineplz/message.py @@ -1,12 +1,32 @@ # -*- coding: utf-8 -*- +import os + + +class Messages(object): + + def __init__(self): + self.messages = {} + + def add_message(self, path, line, message): + if (path, line) not in self.messages: + self.messages[(path, line)] = Message(path, line) + self.messages[(path, line)].append(message) + + def add_messages(self, messages): + for message in messages: + self.add_message(*message) + + def get_messages(self): + return self.messages.values() + class Message(object): def __init__(self, path, line_number): - self.path = path.replace('\\', '/') + self.path = os.path.relpath(path).replace('\\', '/') self.line_number = line_number - self.comments = [] + self.comments = set() def __str__(self): return """ @@ -16,10 +36,5 @@ Message: Content: {2} """.format(self.path, self.line_number, self.content).strip() - @property - def content(self): - if not self.comments: - return '' - if len(self.comments) > 1: - return '```\n' + '\n'.join(self.comments) + '\n```' - return '`{0}`'.format(self.comments[0].strip()) + def append(self, message): + self.comments.add(message) diff --git a/inlineplz/parsers/eslint.py b/inlineplz/parsers/eslint.py index 6a0c211..f30d7b2 100644 --- a/inlineplz/parsers/eslint.py +++ b/inlineplz/parsers/eslint.py @@ -5,24 +5,21 @@ from collections import OrderedDict import json from inlineplz.parsers.base import ParserBase -from inlineplz.message import Message class ESLintParser(ParserBase): """Parse json eslint output.""" def parse(self, lint_data): - messages = [] + messages = set() for filedata in json.loads( lint_data, object_pairs_hook=OrderedDict ): if filedata.get('messages'): for msgdata in filedata['messages']: - msg = Message( - filedata.get('filePath'), - msgdata.get('line') - ) - msg.comments.append(msgdata.get('message')) - messages.append(msg) + path = filedata['filePath'] + line = msgdata['line'] + msgbody = msgdata['message'] + messages.add((path, line, msgbody)) return messages diff --git a/inlineplz/parsers/jscs.py b/inlineplz/parsers/jscs.py index e3fd72c..b80d8db 100644 --- a/inlineplz/parsers/jscs.py +++ b/inlineplz/parsers/jscs.py @@ -5,24 +5,21 @@ from collections import OrderedDict import json from inlineplz.parsers.base import ParserBase -from inlineplz.message import Message class JSCSParser(ParserBase): """Parse json jscs output.""" def parse(self, lint_data): - messages = [] + messages = set() for filename, msgs in json.loads( lint_data, object_pairs_hook=OrderedDict ).items(): if msgs: for msgdata in msgs: - msg = Message( - filename, - msgdata.get('line') - ) - msg.comments.append(msgdata.get('message')) - messages.append(msg) + path = filename + line = msgdata['line'] + msgbody = msgdata['message'] + messages.add((path, line, msgbody)) return messages diff --git a/inlineplz/parsers/jshint.py b/inlineplz/parsers/jshint.py index 9c8beee..7e8863f 100644 --- a/inlineplz/parsers/jshint.py +++ b/inlineplz/parsers/jshint.py @@ -4,24 +4,21 @@ from __future__ import absolute_import import xmltodict from inlineplz.parsers.base import ParserBase -from inlineplz.message import Message class JSHintParser(ParserBase): """Parse json jshint output.""" def parse(self, lint_data): - messages = [] + messages = set() obj = xmltodict.parse(lint_data) for filedata in obj['checkstyle']['file']: for errordata in filedata['error']: try: - msg = Message( - filedata.get('@name'), - int(errordata.get('@line')) - ) - msg.comments.append(errordata.get('@message')) - messages.append(msg) - except AttributeError: + path = filedata['@name'] + line = int(errordata['@line']) + msgbody = errordata['@message'] + messages.add((path, line, msgbody)) + except (AttributeError, TypeError): pass return messages diff --git a/inlineplz/parsers/prospector.py b/inlineplz/parsers/prospector.py index 8146c2a..43b8eb6 100644 --- a/inlineplz/parsers/prospector.py +++ b/inlineplz/parsers/prospector.py @@ -5,27 +5,23 @@ from collections import OrderedDict import json from inlineplz.parsers.base import ParserBase -from inlineplz.message import Message class ProspectorParser(ParserBase): """Parse json prospector output.""" def parse(self, lint_data): - messages = [] + messages = set() for msgdata in json.loads( lint_data, object_pairs_hook=OrderedDict ).get('messages'): - msg = Message( - msgdata['location']['path'], - msgdata['location']['line'] - ) + path = msgdata['location']['path'] + line = msgdata['location']['line'] msgbody = '{0}: {1} ({2})'.format( msgdata['source'], msgdata['message'], msgdata['code'] ) - msg.comments.append(msgbody) - messages.append(msg) + messages.add((path, line, msgbody)) return messages
move message formatting logic from message module into interface modules
guykisel/inline-plz
diff --git a/tests/parsers/test_eslint.py b/tests/parsers/test_eslint.py index d8e765b..289099d 100644 --- a/tests/parsers/test_eslint.py +++ b/tests/parsers/test_eslint.py @@ -15,7 +15,7 @@ eslint_path = os.path.join( def test_eslint(): with open(eslint_path) as inputfile: - messages = eslint.ESLintParser().parse(inputfile.read()) - assert messages[0].content == '`Parsing error: Illegal return statement`' - assert messages[0].line_number == 17 - assert messages[0].path == 'C:/Users/Guy/Documents/jshint/tests/unit/fixtures/asi.js' + messages = sorted(list(eslint.ESLintParser().parse(inputfile.read()))) + assert messages[0][2] == 'Parsing error: Illegal return statement' + assert messages[0][1] == 17 + assert messages[0][0] == 'C:\\Users\\Guy\\Documents\\jshint\\tests\\unit\\fixtures\\asi.js' diff --git a/tests/parsers/test_jscs.py b/tests/parsers/test_jscs.py index e0577f6..d71c08f 100644 --- a/tests/parsers/test_jscs.py +++ b/tests/parsers/test_jscs.py @@ -15,7 +15,7 @@ jscs_path = os.path.join( def test_jscs(): with open(jscs_path) as inputfile: - messages = jscs.JSCSParser().parse(inputfile.read()) - assert messages[0].content == '`maximumLineLength: Line must be at most 100 characters`' - assert messages[0].line_number == 1 - assert messages[0].path == './data/non-ascii-identifier-part-only.js' + messages = sorted(list(jscs.JSCSParser().parse(inputfile.read()))) + assert messages[0][2] == 'maximumLineLength: Line must be at most 100 characters' + assert messages[0][1] == 1 + assert messages[0][0] == './data/non-ascii-identifier-part-only.js' diff --git a/tests/parsers/test_jshint.py b/tests/parsers/test_jshint.py index fb4c4c6..f1d8c79 100644 --- a/tests/parsers/test_jshint.py +++ b/tests/parsers/test_jshint.py @@ -15,7 +15,7 @@ jshint_path = os.path.join( def test_jshint(): with open(jshint_path) as inputfile: - messages = jshint.JSHintParser().parse(inputfile.read()) - assert messages[0].content == '`Use the function form of "use strict". (W097)`' - assert messages[0].line_number == 8 - assert messages[0].path == 'conf/cli-options.js' + messages = sorted(list(jshint.JSHintParser().parse(inputfile.read()))) + assert messages[0][2] == 'Use the function form of "use strict". (W097)' + assert messages[0][1] == 7 + assert messages[0][0] == 'Makefile.js' diff --git a/tests/parsers/test_prospector.py b/tests/parsers/test_prospector.py index fbba037..4b3e569 100644 --- a/tests/parsers/test_prospector.py +++ b/tests/parsers/test_prospector.py @@ -16,13 +16,15 @@ prospector_path = os.path.join( def test_prospector(): with open(prospector_path) as inputfile: - messages = prospector.ProspectorParser().parse(inputfile.read()) - assert messages[0].content == '`pep257: Missing docstring in public package (D104)`' - assert messages[0].line_number == 1 - assert messages[0].path == 'inlineplz/util/__init__.py' - assert messages[1].content == '`pep257: Missing docstring in public package (D104)`' - assert messages[1].line_number == 1 - assert messages[1].path == 'inlineplz/parsers/__init__.py' - assert messages[9].content == ('`pep257: One-line docstring should fit on one line with quotes (found 2) (D200)`') - assert messages[9].line_number == 1 + messages = sorted(list(prospector.ProspectorParser().parse(inputfile.read()))) + assert messages[0][2] == 'pep257: Missing docstring in public package (D104)' + assert messages[0][1] == 1 + assert messages[0][0] == 'inlineplz\__init__.py' + + assert messages[1][2] == 'pep257: Missing docstring in public package (D104)' + assert messages[1][1] == 1 + assert messages[1][0] == 'inlineplz\interfaces\__init__.py' + + assert messages[9][2] == 'pep257: Missing docstring in public package (D104)' + assert messages[9][1] == 1 assert len(messages) == 32
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 3, "test_score": 2 }, "num_modified_files": 7 }
unknown
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.5", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work certifi==2021.5.30 cffi==1.15.1 charset-normalizer==2.0.12 cryptography==40.0.2 github3.py==3.2.0 idna==3.10 importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work -e git+https://github.com/guykisel/inline-plz.git@3cfa8f2c3f0fd814c105ca4d51f0727659a45fa8#egg=inlineplz more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work py @ file:///opt/conda/conda-bld/py_1644396412707/work pycparser==2.21 PyJWT==2.4.0 pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work pytest==6.2.4 python-dateutil==2.9.0.post0 requests==2.27.1 six==1.17.0 toml @ file:///tmp/build/80754af9/toml_1616166611790/work typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work unidiff==0.7.5 uritemplate==4.1.1 urllib3==1.26.20 xmltodict==0.14.2 zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
name: inline-plz channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - attrs=21.4.0=pyhd3eb1b0_0 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - importlib-metadata=4.8.1=py36h06a4308_0 - importlib_metadata=4.8.1=hd3eb1b0_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - more-itertools=8.12.0=pyhd3eb1b0_0 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - packaging=21.3=pyhd3eb1b0_0 - pip=21.2.2=py36h06a4308_0 - pluggy=0.13.1=py36h06a4308_0 - py=1.11.0=pyhd3eb1b0_0 - pyparsing=3.0.4=pyhd3eb1b0_0 - pytest=6.2.4=py36h06a4308_2 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - toml=0.10.2=pyhd3eb1b0_0 - typing_extensions=4.1.1=pyh06a4308_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zipp=3.6.0=pyhd3eb1b0_0 - zlib=1.2.13=h5eee18b_1 - pip: - cffi==1.15.1 - charset-normalizer==2.0.12 - cryptography==40.0.2 - github3-py==3.2.0 - idna==3.10 - pycparser==2.21 - pyjwt==2.4.0 - python-dateutil==2.9.0.post0 - requests==2.27.1 - six==1.17.0 - unidiff==0.7.5 - uritemplate==4.1.1 - urllib3==1.26.20 - xmltodict==0.14.2 prefix: /opt/conda/envs/inline-plz
[ "tests/parsers/test_eslint.py::test_eslint", "tests/parsers/test_jscs.py::test_jscs", "tests/parsers/test_jshint.py::test_jshint", "tests/parsers/test_prospector.py::test_prospector" ]
[]
[]
[]
ISC License
408
JonathonReinhart__scuba-42
9aa705d7d0419f1930ae034e2210d69f66f5bf2a
2016-02-01 13:01:13
9aa705d7d0419f1930ae034e2210d69f66f5bf2a
diff --git a/scuba/__main__.py b/scuba/__main__.py index 57f8671..4a54fae 100755 --- a/scuba/__main__.py +++ b/scuba/__main__.py @@ -217,7 +217,7 @@ def main(argv=None): if g_verbose or scuba_args.dry_run: appmsg('Docker command line:') - print(format_cmdline(run_args)) + print('$ ' + format_cmdline(run_args)) if scuba_args.dry_run: appmsg('Exiting for dry run. Temporary files not removed:') diff --git a/scuba/utils.py b/scuba/utils.py index f4d742b..0bd2e0e 100644 --- a/scuba/utils.py +++ b/scuba/utils.py @@ -3,13 +3,33 @@ try: except ImportError: from pipes import quote as shell_quote + def format_cmdline(args, maxwidth=80): + '''Format args into a shell-quoted command line. + + The result will be wrapped to maxwidth characters where possible, + not breaking a single long argument. + ''' + + # Leave room for the space and backslash at the end of each line + maxwidth -= 2 + def lines(): line = '' for a in (shell_quote(a) for a in args): - if len(line) + len(a) > maxwidth: + # If adding this argument will make the line too long, + # yield the current line, and start a new one. + if len(line) + len(a) + 1 > maxwidth: yield line line = '' - line += ' ' + a - return '$' + ' \\\n'.join(lines()) + # Append this argument to the current line, separating + # it by a space from the existing arguments. + if line: + line += ' ' + a + else: + line = a + + yield line + + return ' \\\n'.join(lines())
utils.format_cmdline misses last line It appears that utils.format_cmdline fails to yield the last accumulated line. This also means that a better test could be written, which splits the result back out, and compares it to the list of input arguments.
JonathonReinhart/scuba
diff --git a/tests/test_utils.py b/tests/test_utils.py new file mode 100644 index 0000000..ff9ad97 --- /dev/null +++ b/tests/test_utils.py @@ -0,0 +1,49 @@ +from __future__ import print_function + +from nose.tools import * +from unittest import TestCase + +import logging +import shlex +from itertools import chain + +from .utils import * + +import scuba.utils + + +class TestUtils(TestCase): + + def _parse_cmdline(self, cmdline): + # Strip the formatting and whitespace + lines = [l.rstrip('\\').strip() for l in cmdline.splitlines()] + + # Split each line, and return a flattened list of arguments + return chain.from_iterable(map(shlex.split, lines)) + + def _test_format_cmdline(self, args): + + # Call the unit-under-test to get the formatted command line + result = scuba.utils.format_cmdline(args) + + # Parse the result back out to a list of arguments + out_args = self._parse_cmdline(result) + + # Verify that they match + assert_seq_equal(out_args, args) + + + def test_basic(self): + '''format_cmdline works as expected''' + + self._test_format_cmdline([ + 'something', + '-a', + '-b', + '--long', 'option text', + '-s', 'hort', + 'a very long argument here that will end up on its own line because it is so wide and nothing else will fit at the default width', + 'and now', + 'some', 'more', 'stuff', + 'and even more stuff', + ])
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_many_modified_files" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 1, "test_score": 1 }, "num_modified_files": 2 }
1.4
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "nose", "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.5", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==22.2.0 certifi==2021.5.30 coverage==3.7.1 importlib-metadata==4.8.3 iniconfig==1.1.1 nose==1.3.7 packaging==21.3 pluggy==1.0.0 py==1.11.0 pyparsing==3.1.4 pytest==7.0.1 PyYAML==6.0.1 -e git+https://github.com/JonathonReinhart/scuba.git@9aa705d7d0419f1930ae034e2210d69f66f5bf2a#egg=SCUBA tomli==1.2.3 typing_extensions==4.1.1 zipp==3.6.0
name: scuba channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==22.2.0 - coverage==3.7.1 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - nose==1.3.7 - packaging==21.3 - pluggy==1.0.0 - py==1.11.0 - pyparsing==3.1.4 - pytest==7.0.1 - pyyaml==6.0.1 - tomli==1.2.3 - typing-extensions==4.1.1 - zipp==3.6.0 prefix: /opt/conda/envs/scuba
[ "tests/test_utils.py::TestUtils::test_basic" ]
[]
[]
[]
MIT License
409
geowurster__tinymr-17
5dbf46845a8caba995916f76d1e681860f1e198f
2016-02-02 06:14:38
5dbf46845a8caba995916f76d1e681860f1e198f
diff --git a/.travis.yml b/.travis.yml index 8813f7b..04f68af 100644 --- a/.travis.yml +++ b/.travis.yml @@ -15,6 +15,7 @@ python: - pypy3 install: + - pip install pip setuptools --upgrade - pip install -e .\[dev\] script: diff --git a/tinymr/_mrtools.py b/tinymr/_mrtools.py index 1a200db..ce3f9ef 100644 --- a/tinymr/_mrtools.py +++ b/tinymr/_mrtools.py @@ -82,7 +82,8 @@ def sort_partitioned_values(kv_stream): return ((k, sorter(v, key=lambda x: x[0])) for k, v in kv_stream) -class ReduceJob(namedtuple('ReduceJob', ['reducer', 'sort', 'jobs', 'chunksize'])): +class ReduceJobConf( + namedtuple('ReduceJob', ['reducer', 'sort', 'jobs', 'chunksize'])): """ Describes a reduce job. Makes keeping track of multiple reducers easier. diff --git a/tinymr/base.py b/tinymr/base.py index eba8905..c29bbb6 100644 --- a/tinymr/base.py +++ b/tinymr/base.py @@ -3,7 +3,9 @@ Base classes. Subclass away! """ +import inspect from itertools import chain +import logging import six @@ -201,7 +203,8 @@ class BaseMapReduce(object): Sort the output from each `reducer()` before executing the next or before being passed to `output()`. - Define one property per reducer, so `reducer2()` would be `sort_reduce2`. + Define one property per reducer, so `reducer2()` would be + `sort_reduce2`. Returns ------- @@ -210,6 +213,16 @@ class BaseMapReduce(object): return self.sort + @property + def logger(self): + + """ + Each MapReduce task gets its own logger with a name like + `tinymr-ClassName`. + """ + + return logging.getLogger('tinymr-{}'.format(self.__class__.__name__)) + def close(self): """ @@ -318,18 +331,43 @@ class BaseMapReduce(object): return ((key, tuple(values)) for key, values in pairs) @property - def _reduce_jobs(self): - - reducers = tools.sorter(filter( - lambda x: not x.startswith('_') and 'reducer' in x, - dir(self))) - - for r in reducers: - yield _mrtools.ReduceJob( - reducer=getattr(self, r), - sort=getattr(self, 'sort_{}'.format(r.replace('reducer', 'reduce'))), - jobs=getattr(self, '{}_jobs'.format(r.replace('reducer', 'reduce'))), - chunksize=getattr(self, '{}_jobs'.format(r.replace('reducer', 'reduce')))) + def _reduce_job_confs(self): + + """ + The user can define multiple reduce operations, each with their own + independent job configuration, to be executed in a specified order. + This method produces one `_mrtools.ReduceJobConf()` per reduce + operation in execution order. + + Returns + ------- + tuple + """ + + # We encourage user's to add their own properties and methods, so + # we want to be really confident that we are _only_ grabbing the + # reducer methods, otherwise difficult to debug failures might pop up. + # Can't assume the reducers were defined in order. + reducers = {} + for method in (m for m in dir(self) if m != '_reduce_job_confs'): + + if method.startswith('reducer') and \ + inspect.ismethod(getattr(self, method)): + + str_idx = method.lstrip('reducer') or '-1' + + sort_method = 'sort_reduce{}'.format(str_idx) + jobs_method = 'reduce{}_jobs'.format(str_idx) + chunksize_method = 'reduce{}_chunksize'.format(str_idx) + + reducers[int(str_idx)] = _mrtools.ReduceJobConf( + reducer=getattr(self, method), + sort=getattr(self, sort_method, self.sort_reduce), + jobs=getattr(self, jobs_method, self.reduce_jobs), + chunksize=getattr( + self, chunksize_method, self.reduce_chunksize)) + + return [reducers.pop(i) for i in sorted(reducers.keys())] def _map_combine_partition(self, stream): diff --git a/tinymr/memory.py b/tinymr/memory.py index f48b4a9..97bf1d6 100644 --- a/tinymr/memory.py +++ b/tinymr/memory.py @@ -12,38 +12,54 @@ from tinymr import tools from tinymr.tools import runner -logger = logging.getLogger('tinymr') -logger.setLevel(logging.DEBUG) - - class MemMapReduce(base.BaseMapReduce): - def __call__(self, stream): + def __call__(self, stream, log_level=logging.NOTSET): + + original_log_level = self.logger.level + self.logger.setLevel(log_level) sliced = tools.slicer(stream, self.map_chunksize) # Map, partition, combine, partition + self.logger.info( + "Running map, combine, and partition phase with %s jobs, chunksize " + "%s, sort_map=%s, and sort_combine=%s", + self.map_jobs, self.map_chunksize, self.sort_map, self.sort_combine) + with runner(self._map_combine_partition, sliced, self.map_jobs) as mcp: partitioned = tools.merge_partitions(*mcp, sort=self.sort_combine) + self.logger.info("Finished map with %s keys", len(partitioned)) + self.logger.info("Initializing reduce phase") self.init_reduce() # Run all partition jobs reducer_input = partitioned - for rj in self._reduce_jobs: + for rj in self._reduce_job_confs: + self.logger.info("Running reduce job %s", rj) + + # Pin the reduce job so we can treat it like a lambda func = functools.partial( self._reduce_partition, reducer=rj.reducer, sort=rj.sort) reducer_input = _mrtools.strip_sort_key(reducer_input) sliced = tools.slicer(reducer_input, rj.chunksize) - with runner(func, sliced, rj.jobs) as reduced: partitioned = tools.merge_partitions(*reduced, sort=rj.sort) + self.logger.info( + "Finished reduce job %s with %s keys", rj, len(partitioned)) + partitioned = _mrtools.strip_sort_key(partitioned) + self.logger.info("Sorting output data by key") if self.sort_output: partitioned = self._output_sorter(partitioned) - return self.output(partitioned) + try: + self.logger.info("Producing output dataset") + return self.output(partitioned) + finally: + self.logger.setLevel(original_log_level)
ReduceJob().chunksize is populated with reduce_jobs
geowurster/tinymr
diff --git a/tests/test_base.py b/tests/test_base.py index 7468888..47309ac 100644 --- a/tests/test_base.py +++ b/tests/test_base.py @@ -5,6 +5,7 @@ Unittests for tinymr.base import pytest +from tinymr import _mrtools from tinymr import base from tinymr import errors @@ -36,6 +37,48 @@ def test_default_methods(): assert list(mr.output(expected)) == expected +def test_reduce_job_confs(): + # Make sure attributes are coming from the correct location + class MR(base.BaseMapReduce): + + jobs = 4 + reduce2_chunksize = 10 + reduce10_jobs = 2 + sort = False + sort_reduce2 = True + + # Define out of order to test sorting + def reducer10(self, key, values): + pass + + def reducer(self, key, values): + pass + + def reducer2(self, key, values): + pass + + mr = MR() + + rj = _mrtools.ReduceJobConf( + reducer=mr.reducer, + sort=False, + jobs=4, + chunksize=1) + rj2 = _mrtools.ReduceJobConf( + reducer=mr.reducer2, + sort=True, + jobs=4, + chunksize=10) + rj10 = _mrtools.ReduceJobConf( + reducer=mr.reducer10, + sort=False, + jobs=2, + chunksize=1) + + assert mr._reduce_job_confs == [rj, rj2, rj10] + + + # def test_context_manager(): # # class MapReduce(base.BaseMapReduce):
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 3, "test_score": 2 }, "num_modified_files": 4 }
unknown
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "pytest" ], "pre_install": null, "python": "3.5", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work certifi==2021.5.30 charset-normalizer==2.0.12 coverage==6.2 coveralls==3.3.1 docopt==0.6.2 idna==3.10 importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work py @ file:///opt/conda/conda-bld/py_1644396412707/work pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work pytest==6.2.4 pytest-cov==4.0.0 requests==2.27.1 six==1.17.0 -e git+https://github.com/geowurster/tinymr.git@5dbf46845a8caba995916f76d1e681860f1e198f#egg=tinymr toml @ file:///tmp/build/80754af9/toml_1616166611790/work tomli==1.2.3 typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work urllib3==1.26.20 zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
name: tinymr channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - attrs=21.4.0=pyhd3eb1b0_0 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - importlib-metadata=4.8.1=py36h06a4308_0 - importlib_metadata=4.8.1=hd3eb1b0_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - more-itertools=8.12.0=pyhd3eb1b0_0 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - packaging=21.3=pyhd3eb1b0_0 - pip=21.2.2=py36h06a4308_0 - pluggy=0.13.1=py36h06a4308_0 - py=1.11.0=pyhd3eb1b0_0 - pyparsing=3.0.4=pyhd3eb1b0_0 - pytest=6.2.4=py36h06a4308_2 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - toml=0.10.2=pyhd3eb1b0_0 - typing_extensions=4.1.1=pyh06a4308_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zipp=3.6.0=pyhd3eb1b0_0 - zlib=1.2.13=h5eee18b_1 - pip: - charset-normalizer==2.0.12 - coverage==6.2 - coveralls==3.3.1 - docopt==0.6.2 - idna==3.10 - pytest-cov==4.0.0 - requests==2.27.1 - six==1.17.0 - tomli==1.2.3 - urllib3==1.26.20 prefix: /opt/conda/envs/tinymr
[ "tests/test_base.py::test_reduce_job_confs" ]
[]
[ "tests/test_base.py::test_not_implemented_methods", "tests/test_base.py::test_default_settings", "tests/test_base.py::test_default_methods" ]
[]
New BSD License
410
mmerickel__pyramid_services-12
85f423102ec14195698b1e44b12fccf30650679d
2016-02-03 20:48:38
85f423102ec14195698b1e44b12fccf30650679d
diff --git a/CHANGES.txt b/CHANGES.txt index 8d05981..6240712 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -1,8 +1,21 @@ unreleased ========== +Backward Incompatibilities +-------------------------- + - Drop Python 3.2 support. +- Use the original service context interface as the cache key instead + of the current context. This means the service will be properly created + only once for any context satisfying the original interface. + + Previously, if you requested the same service from 2 different contexts + in the same request you would receive 2 service objects, instead of + a cached version of the original service, assuming the service was + registered to satisfy both contexts. + See https://github.com/mmerickel/pyramid_services/pull/12 + 0.3 (2015-12-13) ================ diff --git a/pyramid_services/__init__.py b/pyramid_services/__init__.py index 9b80908..197e94a 100644 --- a/pyramid_services/__init__.py +++ b/pyramid_services/__init__.py @@ -25,6 +25,11 @@ def includeme(config): config.add_directive('register_service_factory', register_service_factory) config.add_directive('find_service_factory', find_service_factory) +class ServiceInfo(object): + def __init__(self, factory, context_iface): + self.factory = factory + self.context_iface = context_iface + class SingletonServiceWrapper(object): def __init__(self, service): self.service = service @@ -64,13 +69,15 @@ def register_service_factory( else: context_iface = context + info = ServiceInfo(service_factory, context_iface) + def register(): adapters = config.registry.adapters adapters.register( (IServiceClassifier, context_iface), iface, name, - service_factory, + info, ) discriminator = ('service factories', (iface, context, name)) @@ -101,11 +108,16 @@ def find_service(request, iface=Interface, context=_marker, name=''): svc = cache.lookup(svc_types, iface, name=name, default=None) if svc is None: adapters = request.registry.adapters - svc_factory = adapters.lookup(svc_types, iface, name=name) - if svc_factory is None: + info = adapters.lookup(svc_types, iface, name=name) + if info is None: raise ValueError('could not find registered service') - svc = svc_factory(context, request) - cache.register(svc_types, iface, name, svc) + svc = info.factory(context, request) + cache.register( + (IServiceClassifier, info.context_iface), + iface, + name, + svc, + ) return svc def find_service_factory( @@ -118,7 +130,7 @@ def find_service_factory( svc_types = (IServiceClassifier, context_iface) adapters = config_or_request.registry.adapters - svc_factory = adapters.lookup(svc_types, iface, name=name) - if svc_factory is None: + info = adapters.lookup(svc_types, iface, name=name) + if info is None: raise ValueError('could not find registered service') - return svc_factory + return info.factory
Singleton per request object One issue I ran into with your **dbsession** service example (that uses a service factory) is the following: When looking for the service different sessions are returned depending on the context. This is by design. Citing the documentation: > The factory will be used at most once per request/context/name combination. Having different DB session in one request is quite ugly and I can think of no use case for it. How did I end up with different contexts? By using the *request* object outside a view function invoked directly with *pyramid.threadlocal.get_current_request* . In that case the context is None. This example by [Jon Rosebaugh - What the Zope Transaction Manager Means To Me (and you)](https://metaclassical.com/what-the-zope-transaction-manager-means-to-me-and-you/) would work but I wanted to use *pyramid_services* (for consistency, because I'm already using it in other parts of the application). I'm aware that this might not be the place for this since it classifies more as a question than an issue, but you could at least fix the example to be more clear that ```python request.find_service(name='db') ``` will return different sessions depending on the request context. My workaround is to always pass context=None: ```python request.find_service(name='db', context=None) ``` Is there a simpler solution (e.g. a singleton per request)? *register_service()* would register a singleton object for the whole application, but I do want a different DB session for every request.
mmerickel/pyramid_services
diff --git a/pyramid_services/tests/test_it.py b/pyramid_services/tests/test_it.py index 1e482a3..5abeec3 100644 --- a/pyramid_services/tests/test_it.py +++ b/pyramid_services/tests/test_it.py @@ -256,6 +256,25 @@ class TestIntegration_register_service_factory(unittest.TestCase): self.assertEqual(resp.body, b'foo') self.assertEqual(called, [True]) + def test_context_does_not_affect_cache(self): + config = self.config + + config.register_service_factory( + lambda ctx, req: DummyService('foo'), name='foo') + + def dummy_view(context, request): + s1 = request.find_service(name='foo', context=Root()) + s2 = request.find_service(name='foo', context=Leaf()) + self.assertTrue(s1 is s2) + return s1.result + + config.add_view(dummy_view, renderer='string') + + app = self._makeApp() + resp = app.get('/') + self.assertEqual(resp.body, b'foo') + + class TestIntegration_find_service_factory(unittest.TestCase): def setUp(self): self.config = pyramid.testing.setUp() @@ -305,12 +324,12 @@ class DummyService(object): class DummyServiceFactory(object): def __init__(self, result): - self.result = DummyService(result) + self.result = result def __call__(self, context, request): self.context = context self.request = request - return self.result + return DummyService(self.result) class DummyView(object): def __init__(self, *a, **kw): @@ -318,5 +337,5 @@ class DummyView(object): self.kw = kw def __call__(self, request): - svc = request.find_service(*self.a, **self.kw) - return svc() + self.svc = request.find_service(*self.a, **self.kw) + return self.svc()
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_hyperlinks", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 0, "test_score": 1 }, "num_modified_files": 2 }
0.3
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[testing]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "pytest" ], "pre_install": null, "python": "3.5", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work beautifulsoup4==4.12.3 certifi==2021.5.30 coverage==6.2 hupper==1.10.3 importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work nose==1.3.7 packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work PasteDeploy==2.1.1 plaster==1.0 plaster-pastedeploy==0.7 pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work py @ file:///opt/conda/conda-bld/py_1644396412707/work pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work pyramid==2.0.2 -e git+https://github.com/mmerickel/pyramid_services.git@85f423102ec14195698b1e44b12fccf30650679d#egg=pyramid_services pytest==6.2.4 soupsieve==2.3.2.post1 toml @ file:///tmp/build/80754af9/toml_1616166611790/work translationstring==1.4 typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work venusian==3.0.0 waitress==2.0.0 WebOb==1.8.9 WebTest==3.0.0 zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work zope.deprecation==4.4.0 zope.interface==5.5.2
name: pyramid_services channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - attrs=21.4.0=pyhd3eb1b0_0 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - importlib-metadata=4.8.1=py36h06a4308_0 - importlib_metadata=4.8.1=hd3eb1b0_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - more-itertools=8.12.0=pyhd3eb1b0_0 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - packaging=21.3=pyhd3eb1b0_0 - pip=21.2.2=py36h06a4308_0 - pluggy=0.13.1=py36h06a4308_0 - py=1.11.0=pyhd3eb1b0_0 - pyparsing=3.0.4=pyhd3eb1b0_0 - pytest=6.2.4=py36h06a4308_2 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - toml=0.10.2=pyhd3eb1b0_0 - typing_extensions=4.1.1=pyh06a4308_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zipp=3.6.0=pyhd3eb1b0_0 - zlib=1.2.13=h5eee18b_1 - pip: - beautifulsoup4==4.12.3 - coverage==6.2 - hupper==1.10.3 - nose==1.3.7 - pastedeploy==2.1.1 - plaster==1.0 - plaster-pastedeploy==0.7 - pyramid==2.0.2 - soupsieve==2.3.2.post1 - translationstring==1.4 - venusian==3.0.0 - waitress==2.0.0 - webob==1.8.9 - webtest==3.0.0 - zope-deprecation==4.4.0 - zope-interface==5.5.2 prefix: /opt/conda/envs/pyramid_services
[ "pyramid_services/tests/test_it.py::TestIntegration_register_service_factory::test_context_does_not_affect_cache" ]
[]
[ "pyramid_services/tests/test_it.py::TestIntegration_register_service::test_context_sensitive", "pyramid_services/tests/test_it.py::TestIntegration_register_service::test_iface", "pyramid_services/tests/test_it.py::TestIntegration_register_service::test_introspectable", "pyramid_services/tests/test_it.py::TestIntegration_register_service::test_name", "pyramid_services/tests/test_it.py::TestIntegration_register_service_factory::test_context_sensitive", "pyramid_services/tests/test_it.py::TestIntegration_register_service_factory::test_iface", "pyramid_services/tests/test_it.py::TestIntegration_register_service_factory::test_introspectable", "pyramid_services/tests/test_it.py::TestIntegration_register_service_factory::test_name", "pyramid_services/tests/test_it.py::TestIntegration_register_service_factory::test_with_no_context", "pyramid_services/tests/test_it.py::TestIntegration_find_service_factory::test_find_service_factory", "pyramid_services/tests/test_it.py::TestIntegration_find_service_factory::test_find_service_factory_fail", "pyramid_services/tests/test_it.py::TestIntegration_find_service_factory::test_find_service_factory_service" ]
[]
MIT License
411
networkx__networkx-1976
293632863d5e895691b56dff4b12c937b2ea77dc
2016-02-05 18:54:35
3f4fd85765bf2d88188cfd4c84d0707152e6cd1e
diff --git a/networkx/algorithms/connectivity/kcutsets.py b/networkx/algorithms/connectivity/kcutsets.py index 2ad8bdfb4..c3457139a 100644 --- a/networkx/algorithms/connectivity/kcutsets.py +++ b/networkx/algorithms/connectivity/kcutsets.py @@ -3,6 +3,7 @@ Kanevsky all minimum node k cutsets algorithm. """ from operator import itemgetter +from itertools import combinations import networkx as nx from .utils import build_auxiliary_node_connectivity @@ -86,6 +87,22 @@ def all_node_cuts(G, k=None, flow_func=None): if not nx.is_connected(G): raise nx.NetworkXError('Input graph is disconnected.') + # Addess some corner cases first. + # For cycle graphs + if G.order() == G.size(): + if all(2 == d for n, d in G.degree()): + seen = set() + for u in G: + for v in nx.non_neighbors(G, u): + if (u, v) not in seen and (v, u) not in seen: + yield {v, u} + seen.add((v, u)) + return + # For complete Graphs + if nx.density(G) == 1: + for cut_set in combinations(G, len(G)-1): + yield set(cut_set) + return # Initialize data structures. # Keep track of the cuts already computed so we do not repeat them. seen = []
all_node_cuts returns too few and incorrect cuts. This could be a documentation issue, a bug or a user understanding issue. Are these cases pathological for the algorithm? Given a square graph: a -- b | | c -- d Based on a cursory reading of the documentation, I would have expected all_node_cuts() to return: [{'a','d'}, {'c','b'}] I get `[{'a','c'}]` but if this is a valid node cut then surely {a,b}, {b,d}, {c,d} are also equally valid and a function called "all node cuts" should return them. G = nx.Graph([('a','b'), ('a','c'), ('c','d'), ('b','d')]) print( G.nodes() ) print( G.edges() ) print( list(nx.all_node_cuts(G)) ) >>> ['a', 'c', 'b', 'd'] >>> [('a', 'c'), ('a', 'b'), ('c', 'd'), ('b', 'd')] >>> [{'a', 'c'}] Expanding to a hexagon, we see similar pattern of node cuts. There are many isometric node cuts omitted from the results list. Two of the proposed cuts fail to create "two or more connected components" as the documentation suggests. G = nx.Graph([('a','b'), ('b','c'), ('c','d'),('d','e'), ('e','f'),('f','a')]) list(nx.all_node_cuts(G)) >>> [{'a', 'c'}, {'a', 'b'}, {'b', 'c'}]
networkx/networkx
diff --git a/networkx/algorithms/connectivity/tests/test_kcutsets.py b/networkx/algorithms/connectivity/tests/test_kcutsets.py index e11547faf..9ca49698a 100644 --- a/networkx/algorithms/connectivity/tests/test_kcutsets.py +++ b/networkx/algorithms/connectivity/tests/test_kcutsets.py @@ -241,3 +241,27 @@ def test_non_repeated_cuts(): assert_true(len(solution) == len(cuts)) for cut in cuts: assert_true(cut in solution) + + +def test_cycle_graph(): + G = nx.cycle_graph(5) + solution = [{0, 2}, {0, 3}, {1, 3}, {1, 4}, {2, 4}] + cuts = list(nx.all_node_cuts(G)) + assert_true(len(solution) == len(cuts)) + for cut in cuts: + assert_true(cut in solution) + + +def test_complete_graph(): + G = nx.complete_graph(5) + solution = [ + {0, 1, 2, 3}, + {0, 1, 2, 4}, + {0, 1, 3, 4}, + {0, 2, 3, 4}, + {1, 2, 3, 4}, + ] + cuts = list(nx.all_node_cuts(G)) + assert_true(len(solution) == len(cuts)) + for cut in cuts: + assert_true(cut in solution)
{ "commit_name": "head_commit", "failed_lite_validators": [], "has_test_patch": true, "is_lite": true, "llm_score": { "difficulty_score": 2, "issue_text_score": 2, "test_score": 1 }, "num_modified_files": 1 }
help
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "nose", "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y libgdal-dev graphviz" ], "python": "3.6", "reqs_path": [ "requirements/default.txt", "requirements/test.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==22.2.0 certifi==2021.5.30 decorator==5.1.1 importlib-metadata==4.8.3 iniconfig==1.1.1 -e git+https://github.com/networkx/networkx.git@293632863d5e895691b56dff4b12c937b2ea77dc#egg=networkx nose==1.3.7 packaging==21.3 pluggy==1.0.0 py==1.11.0 pyparsing==3.1.4 pytest==7.0.1 tomli==1.2.3 typing_extensions==4.1.1 zipp==3.6.0
name: networkx channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==22.2.0 - decorator==5.1.1 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - nose==1.3.7 - packaging==21.3 - pluggy==1.0.0 - py==1.11.0 - pyparsing==3.1.4 - pytest==7.0.1 - tomli==1.2.3 - typing-extensions==4.1.1 - zipp==3.6.0 prefix: /opt/conda/envs/networkx
[ "networkx/algorithms/connectivity/tests/test_kcutsets.py::test_cycle_graph", "networkx/algorithms/connectivity/tests/test_kcutsets.py::test_complete_graph" ]
[]
[ "networkx/algorithms/connectivity/tests/test_kcutsets.py::test_torrents_and_ferraro_graph", "networkx/algorithms/connectivity/tests/test_kcutsets.py::test_example_1", "networkx/algorithms/connectivity/tests/test_kcutsets.py::test_random_gnp", "networkx/algorithms/connectivity/tests/test_kcutsets.py::test_shell", "networkx/algorithms/connectivity/tests/test_kcutsets.py::test_configuration", "networkx/algorithms/connectivity/tests/test_kcutsets.py::test_karate", "networkx/algorithms/connectivity/tests/test_kcutsets.py::test_articulation_points", "networkx/algorithms/connectivity/tests/test_kcutsets.py::test_grid_2d_graph", "networkx/algorithms/connectivity/tests/test_kcutsets.py::test_disconnected_graph", "networkx/algorithms/connectivity/tests/test_kcutsets.py::test_alternative_flow_functions", "networkx/algorithms/connectivity/tests/test_kcutsets.py::test_is_separating_set_complete_graph", "networkx/algorithms/connectivity/tests/test_kcutsets.py::test_is_separating_set", "networkx/algorithms/connectivity/tests/test_kcutsets.py::test_non_repeated_cuts" ]
[]
BSD 3-Clause
412
unified-font-object__ufoNormalizer-23
f14d55967d4621114ff598a4407ee839f5387ff4
2016-02-06 21:19:33
f14d55967d4621114ff598a4407ee839f5387ff4
anthrotype: thank you Denis! This looks much better than my hotfix :+1: can you remind me why we can't read/write plist files as unicode strings and we need to use bytes?
diff --git a/normalization/ufonormalizer.py b/normalization/ufonormalizer.py index 7d0fa56..c4f15fb 100644 --- a/normalization/ufonormalizer.py +++ b/normalization/ufonormalizer.py @@ -1,6 +1,6 @@ #! /usr/bin/env python # -*- coding: utf-8 -*- -from __future__ import print_function +from __future__ import print_function, unicode_literals import time import os @@ -11,6 +11,7 @@ import textwrap import datetime import glob from collections import OrderedDict +from io import open """ - filter out unknown attributes and subelements @@ -89,22 +90,22 @@ except NameError: # plistlib.readPlistFromString instead. if hasattr(plistlib, "loads"): - def _readPlistFromBytes(data): + def _loads(data): return plistlib.loads(data) - def _writePlistToBytes(plist): + def _dumps(plist): return plistlib.dumps(plist) elif hasattr(plistlib, "readPlistFromBytes"): - def _readPlistFromBytes(data): + def _loads(data): return plistlib.readPlistFromBytes(tobytes(data)) - def _writePlistToBytes(plist): + def _dumps(plist): return plistlib.writePlistToBytes(plist) else: - def _readPlistFromBytes(data): + def _loads(data): return plistlib.readPlistFromString(data) - def _writePlistToBytes(plist): + def _dumps(plist): return plistlib.writePlistToString(plist) @@ -1334,9 +1335,8 @@ def subpathReadFile(ufoPath, *subpath): Read the contents of a file. """ path = subpathJoin(ufoPath, *subpath) - f = open(path, "rb") - text = f.read() - f.close() + with open(path, "r", encoding="utf-8") as f: + text = f.read() return text def subpathReadPlist(ufoPath, *subpath): @@ -1344,12 +1344,14 @@ def subpathReadPlist(ufoPath, *subpath): Read the contents of a property list and convert it into a Python object. """ - text = subpathReadFile(ufoPath, *subpath) - return _readPlistFromBytes(text) + path = subpathJoin(ufoPath, *subpath) + with open(path, "rb") as f: + data = f.read() + return _loads(data) # write -def subpathWriteFile(data, ufoPath, *subpath): +def subpathWriteFile(text, ufoPath, *subpath): """ Write data to a file. @@ -1360,19 +1362,12 @@ def subpathWriteFile(data, ufoPath, *subpath): path = subpathJoin(ufoPath, *subpath) if subpathExists(ufoPath, *subpath): existing = subpathReadFile(ufoPath, *subpath) - - if type(data) != type(existing): - if not isinstance(data, unicode): - data = unicode(data, "utf-8") - if not isinstance(existing, unicode): - existing = unicode(existing, "utf-8") else: existing = None - if data != existing: - f = open(path, "wb") - f.write(tobytes(data)) - f.close() + if text != existing: + with open(path, "w", encoding="utf-8") as f: + f.write(text) def subpathWritePlist(data, ufoPath, *subpath): """ @@ -1383,8 +1378,16 @@ def subpathWritePlist(data, ufoPath, *subpath): file contains data that is different from the new data. """ - data = _writePlistToBytes(data) - subpathWriteFile(data, ufoPath, *subpath) + data = _dumps(data) + path = subpathJoin(ufoPath, *subpath) + if subpathExists(ufoPath, *subpath): + existing = subpathReadPlist(ufoPath, *subpath) + else: + existing = None + + if data != existing: + with open(path, "wb") as f: + f.write(data) # rename
String encoding issue When using the latest version, I am getting this error. It seems to relate to © symbol in font info area. ``` File "/Users/…/bin/FDK/Tools/osx/Python/Current/lib/python2.7/site-packages/ufonormalizer-0.1-py2.7.egg/ufonormalizer.py", line 109, in tobytes return s.encode(encoding, errors) UnicodeEncodeError: 'ascii' codec can't encode character u'\xa9' in position 310: ordinal not in range(128) ```
unified-font-object/ufoNormalizer
diff --git a/normalization/test_ufonormalizer.py b/normalization/test_ufonormalizer.py index 42957dc..6631c7d 100644 --- a/normalization/test_ufonormalizer.py +++ b/normalization/test_ufonormalizer.py @@ -11,10 +11,13 @@ from io import open from xml.etree import cElementTree as ET from ufonormalizer import ( normalizeGLIF, normalizeGlyphsDirectoryNames, normalizeGlyphNames, - subpathJoin, subpathReadPlist, subpathWriteFile, subpathWritePlist, - UFONormalizerError, XMLWriter, tobytes, userNameToFileName, handleClash1, - handleClash2, xmlEscapeText, xmlEscapeAttribute, xmlConvertValue, - xmlConvertFloat, xmlConvertInt, + subpathJoin, subpathSplit, subpathExists, subpathReadFile, + subpathReadPlist, subpathWriteFile, subpathWritePlist, subpathRenameFile, + subpathRenameDirectory, subpathRenameDirectory, subpathRemoveFile, + subpathGetModTime, subpathNeedsRefresh, modTimeLibKey, storeModTimes, + readModTimes, UFONormalizerError, XMLWriter, + tobytes, userNameToFileName, handleClash1, handleClash2, xmlEscapeText, + xmlEscapeAttribute, xmlConvertValue, xmlConvertFloat, xmlConvertInt, _normalizeGlifAnchor, _normalizeGlifGuideline, _normalizeGlifLib, _normalizeGlifNote, _normalizeFontInfoGuidelines, _normalizeGlifUnicode, _normalizeGlifAdvance, _normalizeGlifImage, _normalizeDictGuideline, @@ -25,16 +28,19 @@ from ufonormalizer import ( _normalizeGlifPointAttributesFormat2, _normalizeGlifComponentAttributesFormat2, _normalizeGlifTransformation, _normalizeColorString, _convertPlistElementToObject) +from ufonormalizer import __version__ as ufonormalizerVersion # Python 3.4 deprecated readPlistFromBytes and writePlistToBytes # Python 2 has readPlistFromString and writePlistToString try: - from plistlib import loads + from plistlib import loads, dumps except ImportError: try: from plistlib import readPlistFromBytes as loads + from plistlib import writePlistToBytes as dumps except ImportError: from plistlib import readPlistFromString as loads + from plistlib import writePlistToString as dumps GLIFFORMAT1 = '''\ <?xml version="1.0" encoding="UTF-8"?> @@ -1565,6 +1571,142 @@ class XMLWriterTest(unittest.TestCase): self.assertEqual(xmlConvertInt(0o0000030), '24') self.assertEqual(xmlConvertInt(65536), '65536') + def test_duplicateUFO(self): + pass + + +class SubpathTest(unittest.TestCase): + def __init__(self, methodName): + unittest.TestCase.__init__(self, methodName) + self.filename = 'tmp' + self.plistname = 'tmp.plist' + + def setUp(self): + self.directory = tempfile.mkdtemp() + self.filepath = os.path.join(self.directory, self.filename) + self.plistpath = os.path.join(self.directory, self.plistname) + + def tearDown(self): + shutil.rmtree(self.directory) + + def createTestFile(self, text, num=None): + if num is None: + with open(self.filepath, 'w', encoding='utf-8') as f: + f.write(text) + else: + for i in range(num): + filepath = self.filepath + str(i) + with open(filepath, 'w', encoding='utf-8') as f: + f.write(text) + + def test_subpathJoin(self): + self.assertEqual(subpathJoin('a', 'b', 'c'), + os.path.join('a', 'b', 'c')) + self.assertEqual(subpathJoin('a', os.path.join('b', 'c')), + os.path.join('a', 'b', 'c')) + + def test_subpathSplit(self): + self.assertEqual(subpathSplit(os.path.join('a', 'b')), + os.path.split(os.path.join('a', 'b'))) + self.assertEqual(subpathSplit(os.path.join('a', 'b', 'c')), + os.path.split(os.path.join('a', 'b', 'c'))) + + def test_subpathExists(self): + self.createTestFile('') + self.assertTrue(subpathExists(self.directory, self.filepath)) + self.assertFalse(subpathExists(self.directory, 'nofile.txt')) + + def test_subpathReadFile(self): + text = 'foo bar™⁜' + self.createTestFile(text) + self.assertEqual(text, subpathReadFile(self.directory, self.filename)) + + def test_subpathReadPlist(self): + data = dict([('a', 'foo'), ('b', 'bar'), ('c', '™')]) + with open(self.plistpath, 'wb') as f: + f.write(dumps(data)) + self.assertEqual(subpathReadPlist(self.directory, self.plistname), + data) + + def test_subpathWriteFile(self): + expected_text = 'foo bar™⁜' + subpathWriteFile(expected_text, self.directory, self.filename) + with open(self.filepath, 'r', encoding='utf-8') as f: + text = f.read() + self.assertEqual(text, expected_text) + + def test_subpathWritePlist(self): + expected_data = dict([('a', 'foo'), ('b', 'bar'), ('c', '™')]) + subpathWritePlist(expected_data, self.directory, self.plistname) + with open(self.plistpath, 'rb') as f: + data = loads(f.read()) + self.assertEqual(data, expected_data) + + def test_subpathRenameFile(self): + self.createTestFile('') + subpathRenameFile(self.directory, self.filename, self.filename + "_") + self.assertTrue(os.path.exists(self.filepath + "_")) + + def test_subpathRenameDirectory(self): + dirname = 'tmpdir' + dirpath = os.path.join(self.directory, dirname) + os.mkdir(dirpath) + subpathRenameFile(self.directory, dirname, dirname + "_") + self.assertTrue(os.path.exists(dirpath + "_")) + + def test_subpathRemoveFile(self): + self.createTestFile('') + subpathRemoveFile(self.directory, self.filename) + self.assertFalse(os.path.exists(self.filepath)) + + def test_subpathGetModTime(self): + self.createTestFile('') + mtime = subpathGetModTime(self.directory, self.filename) + self.assertEqual(os.path.getmtime(self.filepath), mtime) + + def test_subpathNeedsRefresh(self): + import time + self.createTestFile('') + modTime = os.path.getmtime(self.filepath) + modTimes = {} + modTimes[self.filename] = float(modTime) + self.assertFalse(subpathNeedsRefresh(modTimes, self.directory, + self.filename)) + time.sleep(1) # to get a different modtime + with open(self.filepath, 'w', encoding='utf-8') as f: + f.write('foo') + self.assertTrue(subpathNeedsRefresh(modTimes, self.directory, + self.filename)) + + def test_storeModTimes(self): + num = 5 + lib = {} + modTimes = {} + self.createTestFile('', num) + filenames = [self.filename + str(i) for i in range(num)] + for filename in filenames: + filepath = os.path.join(self.directory, filename) + modTime = os.path.getmtime(filepath) + modTimes[filename] = float('%.1f' % (modTime)) + lines = ['version: %s' % (ufonormalizerVersion)] + lines += ['%.1f %s' % (modTimes[filename], filename) + for filename in filenames] + storeModTimes(lib, modTimes) + self.assertEqual('\n'.join(lines), lib[modTimeLibKey]) + + def test_readModTimes(self): + num = 5 + lib = {} + modTimes = {} + lines = ['version: %s' % (ufonormalizerVersion)] + filenames = [self.filename + str(i) for i in range(num)] + modTime = float(os.path.getmtime(self.directory)) + for i, filename in enumerate(filenames): + modTimes[filename] = float('%.1f' % (modTime + i)) + lines.append('%.1f %s' % (modTime + i, filename)) + lib[modTimeLibKey] = '\n'.join(lines) + self.assertEqual(readModTimes(lib), modTimes) + class NameTranslationTest(unittest.TestCase):
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 1, "test_score": 3 }, "num_modified_files": 1 }
unknown
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "pytest", "coverage" ], "pre_install": null, "python": "3.4", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work certifi==2021.5.30 coverage==6.2 importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work py @ file:///opt/conda/conda-bld/py_1644396412707/work pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work pytest==6.2.4 toml @ file:///tmp/build/80754af9/toml_1616166611790/work typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work -e git+https://github.com/unified-font-object/ufoNormalizer.git@f14d55967d4621114ff598a4407ee839f5387ff4#egg=ufonormalizer zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
name: ufoNormalizer channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - attrs=21.4.0=pyhd3eb1b0_0 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - importlib-metadata=4.8.1=py36h06a4308_0 - importlib_metadata=4.8.1=hd3eb1b0_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - more-itertools=8.12.0=pyhd3eb1b0_0 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - packaging=21.3=pyhd3eb1b0_0 - pip=21.2.2=py36h06a4308_0 - pluggy=0.13.1=py36h06a4308_0 - py=1.11.0=pyhd3eb1b0_0 - pyparsing=3.0.4=pyhd3eb1b0_0 - pytest=6.2.4=py36h06a4308_2 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - toml=0.10.2=pyhd3eb1b0_0 - typing_extensions=4.1.1=pyh06a4308_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zipp=3.6.0=pyhd3eb1b0_0 - zlib=1.2.13=h5eee18b_1 - pip: - coverage==6.2 prefix: /opt/conda/envs/ufoNormalizer
[ "normalization/test_ufonormalizer.py::SubpathTest::test_subpathReadFile", "normalization/test_ufonormalizer.py::SubpathTest::test_subpathWriteFile" ]
[]
[ "normalization/test_ufonormalizer.py::UFONormalizerErrorTest::test_str", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_convert_plist_Element_to_object", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeFontInfoPlist_guidelines", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeFontInfoPlist_guidelines_everything", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeFontInfoPlist_guidelines_invalid_angle", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeFontInfoPlist_guidelines_invalid_x", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeFontInfoPlist_guidelines_invalid_y", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeFontInfoPlist_guidelines_no_angle", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeFontInfoPlist_guidelines_no_color", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeFontInfoPlist_guidelines_no_identifier", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeFontInfoPlist_guidelines_no_name", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeFontInfoPlist_guidelines_no_x", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeFontInfoPlist_guidelines_no_y", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeFontInfoPlist_no_guidelines", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGLIF_advance_defaults", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGLIF_advance_height", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGLIF_advance_invalid_values", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGLIF_advance_undefined", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGLIF_advance_width", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGLIF_anchor_everything", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGLIF_anchor_no_color", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGLIF_anchor_no_identifier", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGLIF_anchor_no_name", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGLIF_anchor_no_x", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGLIF_anchor_no_y", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGLIF_formats_1_and_2", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGLIF_guideline_everything", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGLIF_guideline_invalid", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGLIF_image_empty", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGLIF_image_everything", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGLIF_image_no_color", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGLIF_image_no_file_name", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGLIF_image_no_transformation", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGLIF_lib_defined", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGLIF_lib_undefined", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGLIF_no_formats", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGLIF_note_defined", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGLIF_note_undefined", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGLIF_outline_format1_element_order", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGLIF_outline_format1_empty", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGLIF_unicode_with_hex", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGLIF_unicode_without_hex", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_component_attributes_format1_defaults", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_component_attributes_format1_everything", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_component_attributes_format1_no_base", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_component_attributes_format1_no_transformation", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_component_attributes_format2_everything", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_component_format1_everything", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_component_format1_no_base", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_component_format1_subelement", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_contour_format1_empty", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_contour_format1_implied_anchor", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_contour_format1_implied_anchor_with_empty_name", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_contour_format1_implied_anchor_without_name", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_contour_format1_normal", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_contour_format1_point_without_attributes", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_contour_format1_unkown_child_element", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_contour_format1_unkown_point_type", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_contour_format2_empty", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_contour_format2_normal", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_contour_format2_point_without_attributes", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_contour_format2_unknown_child_element", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_outline_format2_element_order", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_outline_format2_empty", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_point_attributes_format1_empty_name", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_point_attributes_format1_everything", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_point_attributes_format1_invalid_x", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_point_attributes_format1_invalid_y", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_point_attributes_format1_no_name", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_point_attributes_format1_no_x", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_point_attributes_format1_no_y", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_point_attributes_format1_subelement", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_point_attributes_format1_type_and_smooth", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_point_attributes_format2_everything", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_transformation_default", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_transformation_empty", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_transformation_invalid_value", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_transformation_non_default", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlif_transformation_unknown_attribute", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlyphNames_non_standard", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlyphNames_old_same_as_new", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlyphsDirectoryNames_non_standard", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeGlyphsDirectoryNames_old_same_as_new", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalizeLayerInfoPlist_color", "normalization/test_ufonormalizer.py::UFONormalizerTest::test_normalize_color_string", "normalization/test_ufonormalizer.py::XMLWriterTest::test_attributesToString", "normalization/test_ufonormalizer.py::XMLWriterTest::test_duplicateUFO", "normalization/test_ufonormalizer.py::XMLWriterTest::test_propertyListObject_array", "normalization/test_ufonormalizer.py::XMLWriterTest::test_propertyListObject_boolean", "normalization/test_ufonormalizer.py::XMLWriterTest::test_propertyListObject_data", "normalization/test_ufonormalizer.py::XMLWriterTest::test_propertyListObject_date", "normalization/test_ufonormalizer.py::XMLWriterTest::test_propertyListObject_dict", "normalization/test_ufonormalizer.py::XMLWriterTest::test_propertyListObject_float", "normalization/test_ufonormalizer.py::XMLWriterTest::test_propertyListObject_integer", "normalization/test_ufonormalizer.py::XMLWriterTest::test_propertyListObject_none", "normalization/test_ufonormalizer.py::XMLWriterTest::test_propertyListObject_string", "normalization/test_ufonormalizer.py::XMLWriterTest::test_propertyListObject_unknown_data_type", "normalization/test_ufonormalizer.py::XMLWriterTest::test_xmlConvertFloat", "normalization/test_ufonormalizer.py::XMLWriterTest::test_xmlConvertInt", "normalization/test_ufonormalizer.py::XMLWriterTest::test_xmlConvertValue", "normalization/test_ufonormalizer.py::XMLWriterTest::test_xmlEscapeAttribute", "normalization/test_ufonormalizer.py::XMLWriterTest::test_xmlEscapeText", "normalization/test_ufonormalizer.py::SubpathTest::test_readModTimes", "normalization/test_ufonormalizer.py::SubpathTest::test_storeModTimes", "normalization/test_ufonormalizer.py::SubpathTest::test_subpathExists", "normalization/test_ufonormalizer.py::SubpathTest::test_subpathGetModTime", "normalization/test_ufonormalizer.py::SubpathTest::test_subpathJoin", "normalization/test_ufonormalizer.py::SubpathTest::test_subpathNeedsRefresh", "normalization/test_ufonormalizer.py::SubpathTest::test_subpathReadPlist", "normalization/test_ufonormalizer.py::SubpathTest::test_subpathRemoveFile", "normalization/test_ufonormalizer.py::SubpathTest::test_subpathRenameDirectory", "normalization/test_ufonormalizer.py::SubpathTest::test_subpathRenameFile", "normalization/test_ufonormalizer.py::SubpathTest::test_subpathSplit", "normalization/test_ufonormalizer.py::SubpathTest::test_subpathWritePlist", "normalization/test_ufonormalizer.py::NameTranslationTest::test_handleClash1", "normalization/test_ufonormalizer.py::NameTranslationTest::test_handleClash1_max_file_length", "normalization/test_ufonormalizer.py::NameTranslationTest::test_handleClash2", "normalization/test_ufonormalizer.py::NameTranslationTest::test_userNameToFileName" ]
[]
BSD-3-Clause
413
sympy__sympy-10543
313b89b696bd90bcc522262589d3c84fc909614b
2016-02-06 21:25:50
0241b35cae5f2adc04dc37b25f7dc9c5f00bd746
mvnnn: @gxyd please check the changes. gxyd: In your branch ``` >>> a = Symbol('a', real=True) >>> zeta(a).is_finite True # this should return None ``` mvnnn: @gxyd thanks. i will fix that mvnnn: @gxyd please check the changes. jksuom: The test function could be reorganized. It is not necessary to define a separate negative variable ``c`` as ``-b`` could be used instead (``zeta(-b).is_finite``). There could also be tests for e.g. ``a + I``, ``b + 1`` and ``s + 1`` for a non-zero ``s``. mvnnn: @jksuom please review. jksuom: It seems that the branch is in conflict with the previous version. (I get this when attempting to pull: `` ! [rejected] refs/pull/10543/head -> pr-10543 (non-fast-forward)``) It is possible to add new commits without removing the old ones. That will make it easier to review. Was there some problem with the tests I suggested? mvnnn: @jksuom done.please Review. jksuom: I don't see new tests or the difference to the previous commit. Was there a problem? mvnnn: done mvnnn: @jksuom done. should we need to add more test cases ? jksuom: I was expecting to see the cases I did give earlier. mvnnn: @jksuom done. jksuom: Thanks! I think these tests should suffice. But there are still a couple of ways to simplify the code to avoid new imports. It is not necessary to import ``Eq`` for ``Eq(self.args[0], 1)``. It is enough to form ``self.args[0] - 1``. ``solve`` need not be called either. We can use ``(self.args[0] - 1).is_zero`` instead. That can be the first thing to do, and then the code could become something like this: ``` arg_is_one = (self.args[0] - 1).is_zero if arg_is_one is not None: return not arg_is_one ``` mvnnn: ``` >>> b = Symbol('b', positive=True) >>>zeta(b).is_finite None ``` because it's domain contain ```b = 1``` also. now we take this ``` >>> b = Symbol('b', positive=True) >>>zeta(b + 1).is_finite True >>>zeta(b - 1).is_finite None >>>zeta(b**2 - 2*b + 1).is_finite None ``` because ```(b+1)>1``` so it's finite but -1<(b-1)<00 is include ```1``` so ans is ```None```. in case of ```(b**2 - 2*b + 1)``` .if we take ```b=2``` then this eqn value is ```1``` so it's return ```None``` overall, we can put any ```args``` and check it's possible solution is ```1``` then it's output is ```None``` otherwise output is ```True``` jksuom: I think the tests are ok. But I don't think that it should be necessary to call ``solve`` to find out that ``f = b**2 - 2*b + 1`` would take the value 1 for some positive ``b``. ``(f - 1).is_zero`` should also show that. (There is no need for the actual solution ``b = 2``.) mvnnn: i will fix that.
diff --git a/sympy/functions/special/zeta_functions.py b/sympy/functions/special/zeta_functions.py index a78d9e7171..561c3e0cab 100644 --- a/sympy/functions/special/zeta_functions.py +++ b/sympy/functions/special/zeta_functions.py @@ -463,6 +463,11 @@ def _eval_rewrite_as_dirichlet_eta(self, s, a=1): def _eval_rewrite_as_lerchphi(self, s, a=1): return lerchphi(1, s, a) + def _eval_is_finite(self): + arg_is_one = (self.args[0] - 1).is_zero + if arg_is_one is not None: + return not arg_is_one + def fdiff(self, argindex=1): if len(self.args) == 2: s, a = self.args
zeta(2 + I).is_finite returns None Atleast `zeta(x)` for `Re(x) > 1` and `Im(x)` being finite it should return `True`.
sympy/sympy
diff --git a/sympy/functions/special/tests/test_zeta_functions.py b/sympy/functions/special/tests/test_zeta_functions.py index d3a5b4c3c1..de2007fe16 100644 --- a/sympy/functions/special/tests/test_zeta_functions.py +++ b/sympy/functions/special/tests/test_zeta_functions.py @@ -178,3 +178,21 @@ def test_stieltjes_evalf(): assert abs(stieltjes(0).evalf() - 0.577215664) < 1E-9 assert abs(stieltjes(0, 0.5).evalf() - 1.963510026) < 1E-9 assert abs(stieltjes(1, 2).evalf() + 0.072815845 ) < 1E-9 + + +def test_issue_10475(): + a = Symbol('a', real=True) + b = Symbol('b', positive=True) + s = Symbol('s', zero=False) + + assert zeta(2 + I).is_finite + assert zeta(1).is_finite is False + assert zeta(x).is_finite is None + assert zeta(x + I).is_finite is None + assert zeta(a).is_finite is None + assert zeta(b).is_finite is None + assert zeta(-b).is_finite is True + assert zeta(b**2 - 2*b + 1).is_finite is None + assert zeta(a + I).is_finite is True + assert zeta(b + 1).is_finite is True + assert zeta(s + 1).is_finite is True
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 1, "test_score": 2 }, "num_modified_files": 1 }
0.7
{ "env_vars": null, "env_yml_path": [], "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "mpmath>=0.19", "pip_packages": [ "pytest" ], "pre_install": [], "python": "3.6", "reqs_path": [], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==22.2.0 certifi==2021.5.30 importlib-metadata==4.8.3 iniconfig==1.1.1 mpmath==1.2.1 packaging==21.3 pluggy==1.0.0 py==1.11.0 pyparsing==3.1.4 pytest==7.0.1 -e git+https://github.com/sympy/sympy.git@313b89b696bd90bcc522262589d3c84fc909614b#egg=sympy tomli==1.2.3 typing_extensions==4.1.1 zipp==3.6.0
name: sympy channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - mpmath=1.2.1=py36h06a4308_0 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==22.2.0 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - packaging==21.3 - pluggy==1.0.0 - py==1.11.0 - pyparsing==3.1.4 - pytest==7.0.1 - tomli==1.2.3 - typing-extensions==4.1.1 - zipp==3.6.0 prefix: /opt/conda/envs/sympy
[ "sympy/functions/special/tests/test_zeta_functions.py::test_issue_10475" ]
[]
[ "sympy/functions/special/tests/test_zeta_functions.py::test_zeta_eval", "sympy/functions/special/tests/test_zeta_functions.py::test_zeta_series", "sympy/functions/special/tests/test_zeta_functions.py::test_dirichlet_eta_eval", "sympy/functions/special/tests/test_zeta_functions.py::test_rewriting", "sympy/functions/special/tests/test_zeta_functions.py::test_derivatives", "sympy/functions/special/tests/test_zeta_functions.py::test_polylog_expansion", "sympy/functions/special/tests/test_zeta_functions.py::test_lerchphi_expansion", "sympy/functions/special/tests/test_zeta_functions.py::test_stieltjes", "sympy/functions/special/tests/test_zeta_functions.py::test_stieltjes_evalf" ]
[]
BSD
414
scrapy__scrapy-1771
e328a9b9dfa4fbc79c59ed4f45f757e998301c31
2016-02-07 06:15:48
a975a50558cd78a1573bee2e957afcb419fd1bd6
diff --git a/scrapy/http/cookies.py b/scrapy/http/cookies.py index e92c3fe73..a1e95102e 100644 --- a/scrapy/http/cookies.py +++ b/scrapy/http/cookies.py @@ -137,13 +137,29 @@ class WrappedRequest(object): """ return self.request.meta.get('is_unverifiable', False) - # python3 uses request.unverifiable + def get_origin_req_host(self): + return urlparse_cached(self.request).hostname + + # python3 uses attributes instead of methods + @property + def full_url(self): + return self.get_full_url() + + @property + def host(self): + return self.get_host() + + @property + def type(self): + return self.get_type() + @property def unverifiable(self): return self.is_unverifiable() - def get_origin_req_host(self): - return urlparse_cached(self.request).hostname + @property + def origin_req_host(self): + return self.get_origin_req_host() def has_header(self, name): return name in self.request.headers
PY3: Fail to download the second or later requests to hosts using secure cookies ## Environment * Mac OS X 10.10.5 * Python 3.4.2 * Scrapy 1.1.0rc1 * Twisted 15.5.0 ## Steps to Reproduce 1. Save the following spider as `secure_cookie_spider.py`. ```py import scrapy class SecureCookieSpider(scrapy.Spider): name = 'secure_cookie_spider' start_urls = [ 'https://github.com/', ] def parse(self, response): # Request the same url again yield scrapy.Request(url=response.url, callback=self.parse_second_request) def parse_second_request(self, response): pass ``` 2. Run the following command. ``` $ scrapy runspider secure_cookie_spider.py ``` ## Expected Results No error is reported. ## Actual Results Fail to download the second request with `AttributeError: 'WrappedRequest' object has no attribute 'type'`. ``` $ scrapy runspider secure_cookie_spider.py 2016-02-07 11:57:11 [scrapy] INFO: Scrapy 1.1.0rc1 started (bot: scrapybot) 2016-02-07 11:57:11 [scrapy] INFO: Overridden settings: {} 2016-02-07 11:57:11 [scrapy] INFO: Enabled extensions: ['scrapy.extensions.corestats.CoreStats', 'scrapy.extensions.logstats.LogStats'] 2016-02-07 11:57:11 [scrapy] INFO: Enabled downloader middlewares: ['scrapy.downloadermiddlewares.httpauth.HttpAuthMiddleware', 'scrapy.downloadermiddlewares.downloadtimeout.DownloadTimeoutMiddleware', 'scrapy.downloadermiddlewares.useragent.UserAgentMiddleware', 'scrapy.downloadermiddlewares.retry.RetryMiddleware', 'scrapy.downloadermiddlewares.defaultheaders.DefaultHeadersMiddleware', 'scrapy.downloadermiddlewares.redirect.MetaRefreshMiddleware', 'scrapy.downloadermiddlewares.httpcompression.HttpCompressionMiddleware', 'scrapy.downloadermiddlewares.redirect.RedirectMiddleware', 'scrapy.downloadermiddlewares.cookies.CookiesMiddleware', 'scrapy.downloadermiddlewares.chunked.ChunkedTransferMiddleware', 'scrapy.downloadermiddlewares.stats.DownloaderStats'] 2016-02-07 11:57:11 [scrapy] INFO: Enabled spider middlewares: ['scrapy.spidermiddlewares.httperror.HttpErrorMiddleware', 'scrapy.spidermiddlewares.offsite.OffsiteMiddleware', 'scrapy.spidermiddlewares.referer.RefererMiddleware', 'scrapy.spidermiddlewares.urllength.UrlLengthMiddleware', 'scrapy.spidermiddlewares.depth.DepthMiddleware'] 2016-02-07 11:57:11 [scrapy] INFO: Enabled item pipelines: [] 2016-02-07 11:57:11 [scrapy] INFO: Spider opened 2016-02-07 11:57:11 [scrapy] INFO: Crawled 0 pages (at 0 pages/min), scraped 0 items (at 0 items/min) 2016-02-07 11:57:12 [scrapy] DEBUG: Crawled (200) <GET https://github.com/> (referer: None) 2016-02-07 11:57:12 [scrapy] ERROR: Error downloading <GET https://github.com/> Traceback (most recent call last): File "/private/tmp/scrapy1.1/venv/lib/python3.4/site-packages/twisted/internet/defer.py", line 1128, in _inlineCallbacks result = g.send(result) File "/private/tmp/scrapy1.1/venv/lib/python3.4/site-packages/scrapy/core/downloader/middleware.py", line 37, in process_request response = yield method(request=request, spider=spider) File "/private/tmp/scrapy1.1/venv/lib/python3.4/site-packages/scrapy/downloadermiddlewares/cookies.py", line 39, in process_request jar.add_cookie_header(request) File "/private/tmp/scrapy1.1/venv/lib/python3.4/site-packages/scrapy/http/cookies.py", line 42, in add_cookie_header cookies += self.jar._cookies_for_domain(host, wreq) File "/usr/local/Cellar/python3/3.4.2_1/Frameworks/Python.framework/Versions/3.4/lib/python3.4/http/cookiejar.py", line 1242, in _cookies_for_domain if not self._policy.return_ok(cookie, request): File "/usr/local/Cellar/python3/3.4.2_1/Frameworks/Python.framework/Versions/3.4/lib/python3.4/http/cookiejar.py", line 1077, in return_ok if not fn(cookie, request): File "/usr/local/Cellar/python3/3.4.2_1/Frameworks/Python.framework/Versions/3.4/lib/python3.4/http/cookiejar.py", line 1103, in return_ok_secure if cookie.secure and request.type != "https": AttributeError: 'WrappedRequest' object has no attribute 'type' 2016-02-07 11:57:12 [scrapy] INFO: Closing spider (finished) 2016-02-07 11:57:12 [scrapy] INFO: Dumping Scrapy stats: {'downloader/exception_count': 1, 'downloader/exception_type_count/builtins.AttributeError': 1, 'downloader/request_bytes': 211, 'downloader/request_count': 1, 'downloader/request_method_count/GET': 1, 'downloader/response_bytes': 9735, 'downloader/response_count': 1, 'downloader/response_status_count/200': 1, 'finish_reason': 'finished', 'finish_time': datetime.datetime(2016, 2, 7, 2, 57, 12, 757829), 'log_count/DEBUG': 1, 'log_count/ERROR': 1, 'log_count/INFO': 7, 'request_depth_max': 1, 'response_received_count': 1, 'scheduler/dequeued': 2, 'scheduler/dequeued/memory': 2, 'scheduler/enqueued': 2, 'scheduler/enqueued/memory': 2, 'start_time': datetime.datetime(2016, 2, 7, 2, 57, 11, 384330)} 2016-02-07 11:57:12 [scrapy] INFO: Spider closed (finished) ``` Note that no error is reported in Python 2.
scrapy/scrapy
diff --git a/tests/test_http_cookies.py b/tests/test_http_cookies.py index d529f609b..549f779d8 100644 --- a/tests/test_http_cookies.py +++ b/tests/test_http_cookies.py @@ -14,12 +14,15 @@ class WrappedRequestTest(TestCase): def test_get_full_url(self): self.assertEqual(self.wrapped.get_full_url(), self.request.url) + self.assertEqual(self.wrapped.full_url, self.request.url) def test_get_host(self): self.assertEqual(self.wrapped.get_host(), urlparse(self.request.url).netloc) + self.assertEqual(self.wrapped.host, urlparse(self.request.url).netloc) def test_get_type(self): self.assertEqual(self.wrapped.get_type(), urlparse(self.request.url).scheme) + self.assertEqual(self.wrapped.type, urlparse(self.request.url).scheme) def test_is_unverifiable(self): self.assertFalse(self.wrapped.is_unverifiable()) @@ -32,6 +35,7 @@ class WrappedRequestTest(TestCase): def test_get_origin_req_host(self): self.assertEqual(self.wrapped.get_origin_req_host(), 'www.example.com') + self.assertEqual(self.wrapped.origin_req_host, 'www.example.com') def test_has_header(self): self.assertTrue(self.wrapped.has_header('content-type'))
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_hyperlinks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 1, "test_score": 2 }, "num_modified_files": 1 }
1.1
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc libxml2-dev libxslt1-dev zlib1g-dev libffi-dev libssl-dev" ], "python": "3.9", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==25.3.0 Automat==24.8.1 cffi==1.17.1 constantly==23.10.4 cryptography==44.0.2 cssselect==1.3.0 exceptiongroup==1.2.2 hyperlink==21.0.0 idna==3.10 incremental==24.7.2 iniconfig==2.1.0 jmespath==1.0.1 lxml==5.3.1 packaging==24.2 parsel==1.10.0 pluggy==1.5.0 pyasn1==0.6.1 pyasn1_modules==0.4.2 pycparser==2.22 PyDispatcher==2.0.7 pyOpenSSL==25.0.0 pytest==8.3.5 queuelib==1.7.0 -e git+https://github.com/scrapy/scrapy.git@e328a9b9dfa4fbc79c59ed4f45f757e998301c31#egg=Scrapy service-identity==24.2.0 six==1.17.0 tomli==2.2.1 Twisted==24.11.0 typing_extensions==4.13.0 w3lib==2.3.1 zope.interface==7.2
name: scrapy channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==25.3.0 - automat==24.8.1 - cffi==1.17.1 - constantly==23.10.4 - cryptography==44.0.2 - cssselect==1.3.0 - exceptiongroup==1.2.2 - hyperlink==21.0.0 - idna==3.10 - incremental==24.7.2 - iniconfig==2.1.0 - jmespath==1.0.1 - lxml==5.3.1 - packaging==24.2 - parsel==1.10.0 - pluggy==1.5.0 - pyasn1==0.6.1 - pyasn1-modules==0.4.2 - pycparser==2.22 - pydispatcher==2.0.7 - pyopenssl==25.0.0 - pytest==8.3.5 - queuelib==1.7.0 - service-identity==24.2.0 - six==1.17.0 - tomli==2.2.1 - twisted==24.11.0 - typing-extensions==4.13.0 - w3lib==2.3.1 - zope-interface==7.2 prefix: /opt/conda/envs/scrapy
[ "tests/test_http_cookies.py::WrappedRequestTest::test_get_full_url", "tests/test_http_cookies.py::WrappedRequestTest::test_get_host", "tests/test_http_cookies.py::WrappedRequestTest::test_get_origin_req_host", "tests/test_http_cookies.py::WrappedRequestTest::test_get_type" ]
[]
[ "tests/test_http_cookies.py::WrappedRequestTest::test_add_unredirected_header", "tests/test_http_cookies.py::WrappedRequestTest::test_get_header", "tests/test_http_cookies.py::WrappedRequestTest::test_has_header", "tests/test_http_cookies.py::WrappedRequestTest::test_header_items", "tests/test_http_cookies.py::WrappedRequestTest::test_is_unverifiable", "tests/test_http_cookies.py::WrappedRequestTest::test_is_unverifiable2", "tests/test_http_cookies.py::WrappedResponseTest::test_get_all", "tests/test_http_cookies.py::WrappedResponseTest::test_getheaders", "tests/test_http_cookies.py::WrappedResponseTest::test_info" ]
[]
BSD 3-Clause "New" or "Revised" License
415
pika__pika-701
c467ad22fb8f2fd2bc925fa59d3b082fd035302b
2016-02-07 08:43:01
f73f9bbaddd90b03583a6693f6158e56fbede948
vitaly-krugl: @gmr, please take a look when you have a moment
diff --git a/docs/examples/asynchronous_publisher_example.rst b/docs/examples/asynchronous_publisher_example.rst index e1267ad..6cba07c 100644 --- a/docs/examples/asynchronous_publisher_example.rst +++ b/docs/examples/asynchronous_publisher_example.rst @@ -162,7 +162,7 @@ publisher.py:: different parameters. In this case, we'll close the connection to shutdown the object. - :param pika.channel.Channel channel: The closed channel + :param pika.channel.Channel: The closed channel :param int reply_code: The numeric reason the channel was closed :param str reply_text: The text reason the channel was closed diff --git a/docs/examples/blocking_consume.rst b/docs/examples/blocking_consume.rst index 8603c15..85852e4 100644 --- a/docs/examples/blocking_consume.rst +++ b/docs/examples/blocking_consume.rst @@ -9,21 +9,21 @@ When pika calls your method, it will pass in the channel, a :py:class:`pika.spec Example of consuming messages and acknowledging them:: - import pika + import pika - def on_message(channel, method_frame, header_frame, body): - print(method_frame.delivery_tag) - print(body) - print() - channel.basic_ack(delivery_tag=method_frame.delivery_tag) + def on_message(channel, method_frame, header_frame, body): + print(method_frame.delivery_tag) + print(body) + print() + channel.basic_ack(delivery_tag=method_frame.delivery_tag) - connection = pika.BlockingConnection() - channel = connection.channel() - channel.basic_consume(on_message, 'test') - try: - channel.start_consuming() - except KeyboardInterrupt: - channel.stop_consuming() - connection.close() + connection = pika.BlockingConnection() + channel = connection.channel() + channel.basic_consume(on_message, 'test') + try: + channel.start_consuming() + except KeyboardInterrupt: + channel.stop_consuming() + connection.close() \ No newline at end of file diff --git a/docs/examples/heartbeat_and_blocked_timeouts.rst b/docs/examples/heartbeat_and_blocked_timeouts.rst new file mode 100644 index 0000000..ba2c4b5 --- /dev/null +++ b/docs/examples/heartbeat_and_blocked_timeouts.rst @@ -0,0 +1,37 @@ +Ensuring well-behaved connection with heartbeat and blocked-connection timeouts +=============================================================================== + + +This example demonstrates explicit setting of heartbeat and blocked connection timeouts. + +Starting with RabbitMQ 3.5.5, the broker's default hearbeat timeout decreased from 580 seconds to 60 seconds. As a result, applications that perform lengthy processing in the same thread that also runs their Pika connection may experience unexpected dropped connections due to heartbeat timeout. Here, we specify an explicit lower bound for heartbeat timeout. + +When RabbitMQ broker is running out of certain resources, such as memory and disk space, it may block connections that are performing resource-consuming operations, such as publishing messages. Once a connection is blocked, RabbiMQ stops reading from that connection's socket, so no commands from the client will get through to te broker on that connection until the broker unblocks it. A blocked connection may last for an indefinite period of time, stalling the connection and possibly resulting in a hang (e.g., in BlockingConnection) until the connection is unblocked. Blocked Connectin Timeout is intended to interrupt (i.e., drop) a connection that has been blocked longer than the given timeout value. + +Example of configuring hertbeat and blocked-connection timeouts:: + + import pika + + + def main(): + + # NOTE: These paramerers work with all Pika connection types + params = pika.ConnectionParameters(heartbeat_interval=600, + blocked_connection_timeout=300) + + conn = pika.BlockingConnection(params) + + chan = conn.channel() + + chan.basic_publish('', 'my-alphabet-queue', "abc") + + # If publish causes the connection to become blocked, then this conn.close() + # would hang until the connection is unblocked, if ever. However, the + # blocked_connection_timeout connection parameter would interrupt the wait, + # resulting in ConnectionClosed exception from BlockingConnection (or the + # on_connection_closed callback call in an asynchronous adapter) + conn.close() + + + if __name__ == '__main__': + main() diff --git a/docs/version_history.rst b/docs/version_history.rst index c3eb70d..9d7ffc3 100644 --- a/docs/version_history.rst +++ b/docs/version_history.rst @@ -18,6 +18,10 @@ Next Release never be serviced in the asynchronous scenario. - `Channel.basic_reject` fixed to allow `delivery_tag` to be of type `long` as well as `int`. (by quantum5) + - Implemented support for blocked connection timeouts in + `pika.connection.Connection`. This feature is available to all pika adapters. + See `pika.connection.ConnectionParameters` docstring to learn more about + `blocked_connection_timeout` configuration. 0.10.0 2015-09-02 ----------------- diff --git a/examples/heatbeat_and_blocked_timeouts.py b/examples/heatbeat_and_blocked_timeouts.py new file mode 100644 index 0000000..3fe9a99 --- /dev/null +++ b/examples/heatbeat_and_blocked_timeouts.py @@ -0,0 +1,48 @@ +""" +This example demonstrates explicit setting of heartbeat and blocked connection +timeouts. + +Starting with RabbitMQ 3.5.5, the broker's default hearbeat timeout decreased +from 580 seconds to 60 seconds. As a result, applications that perform lengthy +processing in the same thread that also runs their Pika connection may +experience unexpected dropped connections due to heartbeat timeout. Here, we +specify an explicit lower bound for heartbeat timeout. + +When RabbitMQ broker is running out of certain resources, such as memory and +disk space, it may block connections that are performing resource-consuming +operations, such as publishing messages. Once a connection is blocked, RabbiMQ +stops reading from that connection's socket, so no commands from the client will +get through to te broker on that connection until the broker unblocks it. A +blocked connection may last for an indefinite period of time, stalling the +connection and possibly resulting in a hang (e.g., in BlockingConnection) until +the connection is unblocked. Blocked Connectin Timeout is intended to interrupt +(i.e., drop) a connection that has been blocked longer than the given timeout +value. +""" + + +import pika + + +def main(): + + # NOTE: These paramerers work with all Pika connection types + params = pika.ConnectionParameters(heartbeat_interval=600, + blocked_connection_timeout=300) + + conn = pika.BlockingConnection(params) + + chan = conn.channel() + + chan.basic_publish('', 'my-alphabet-queue', "abc") + + # If publish causes the connection to become blocked, then this conn.close() + # would hang until the connection is unblocked, if ever. However, the + # blocked_connection_timeout connection parameter would interrupt the wait, + # resulting in ConnectionClosed exception from BlockingConnection (or the + # on_connection_closed callback call in an asynchronous adapter) + conn.close() + + +if __name__ == '__main__': + main() diff --git a/pika/adapters/base_connection.py b/pika/adapters/base_connection.py index d355c60..3bfc71a 100644 --- a/pika/adapters/base_connection.py +++ b/pika/adapters/base_connection.py @@ -78,6 +78,29 @@ class BaseConnection(connection.Connection): on_open_error_callback, on_close_callback) + def __repr__(self): + def get_socket_repr(sock): + if sock is None: + return None + + sockname = sock.getsockname() + + peername = None + try: + peername = sock.getpeername() + except socket.error: + # not connected? + pass + + return '%s->%s' % (sockname, peername) + + return ( + '<%s state=%s socket=%s params=%s>' % + (self.__class__.__name__, + self.connection_state, + get_socket_repr(self.socket), + self.params)) + def add_timeout(self, deadline, callback_method): """Add the callback_method to the IOLoop timer to fire after deadline seconds. Returns a handle to the timeout @@ -315,7 +338,8 @@ class BaseConnection(connection.Connection): LOGGER.error("Socket Error: %s", error_code) # Disconnect from our IOLoop and let Connection know what's up - self._on_terminate(-1, repr(error_value)) + self._on_terminate(connection.InternalCloseReasons.SOCKET_ERROR, + repr(error_value)) def _handle_timeout(self): """Handle a socket timeout in read or write. @@ -349,7 +373,8 @@ class BaseConnection(connection.Connection): error_msg = ('BAD libc: Write-Only but Read+Error. ' 'Assume socket disconnected.') LOGGER.error(error_msg) - self._on_terminate(-1, error_msg) + self._on_terminate(connection.InternalCloseReasons.SOCKET_ERROR, + error_msg) if self.socket and (events & self.ERROR): LOGGER.error('Error event %r, %r', events, error) @@ -391,7 +416,9 @@ class BaseConnection(connection.Connection): # Empty data, should disconnect if not data or data == 0: LOGGER.error('Read empty data, calling disconnect') - return self._on_terminate(-1, "EOF") + return self._on_terminate( + connection.InternalCloseReasons.SOCKET_ERROR, + "EOF") # Pass the data into our top level frame dispatching method self._on_data_available(data) @@ -400,13 +427,13 @@ class BaseConnection(connection.Connection): def _handle_write(self): """Try and write as much as we can, if we get blocked requeue what's left""" - bytes_written = 0 + total_bytes_sent = 0 try: while self.outbound_buffer: frame = self.outbound_buffer.popleft() while True: try: - bw = self.socket.send(frame) + num_bytes_sent = self.socket.send(frame) break except _SOCKET_ERROR as error: if error.errno == errno.EINTR: @@ -414,10 +441,10 @@ class BaseConnection(connection.Connection): else: raise - bytes_written += bw - if bw < len(frame): + total_bytes_sent += num_bytes_sent + if num_bytes_sent < len(frame): LOGGER.debug("Partial write, requeing remaining data") - self.outbound_buffer.appendleft(frame[bw:]) + self.outbound_buffer.appendleft(frame[num_bytes_sent:]) break except socket.timeout: @@ -433,7 +460,7 @@ class BaseConnection(connection.Connection): else: return self._handle_error(error) - return bytes_written + return total_bytes_sent def _init_connection_state(self): diff --git a/pika/adapters/blocking_connection.py b/pika/adapters/blocking_connection.py index d93413b..9c74de5 100644 --- a/pika/adapters/blocking_connection.py +++ b/pika/adapters/blocking_connection.py @@ -212,7 +212,7 @@ class _TimerEvt(object): # pylint: disable=R0903 self.timer_id = None def __repr__(self): - return '%s(timer_id=%s, callback=%s)' % (self.__class__.__name__, + return '<%s timer_id=%s callback=%s>' % (self.__class__.__name__, self.timer_id, self._callback) def dispatch(self): @@ -236,9 +236,9 @@ class _ConnectionBlockedUnblockedEvtBase(object): # pylint: disable=R0903 self._method_frame = method_frame def __repr__(self): - return '%s(callback=%s, frame=%s)' % (self.__class__.__name__, - self._callback, - self._method_frame) + return '<%s callback=%s, frame=%s>' % (self.__class__.__name__, + self._callback, + self._method_frame) def dispatch(self): """Dispatch the user's callback method""" @@ -266,7 +266,7 @@ class BlockingConnection(object): # pylint: disable=R0902 receive messages from RabbitMQ using :meth:`basic_consume <BlockingChannel.basic_consume>` or if you want to be notified of a delivery failure when using - :meth:`basic_publish <BlockingChannel.basic_publish>` . + :meth:`basic_publish <BlockingChannel.basic_publish>`. For more information about communicating with the blocking_connection adapter, be sure to check out the @@ -274,6 +274,40 @@ class BlockingConnection(object): # pylint: disable=R0902 :class:`Channel <pika.channel.Channel>` based communication for the blocking_connection adapter. + To prevent recursion/reentrancy, the blocking connection and channel + implementations queue asynchronously-delivered events received + in nested context (e.g., while waiting for `BlockingConnection.channel` or + `BlockingChannel.queue_declare` to complete), dispatching them synchronously + once nesting returns to the desired context. This concerns all callbacks, + such as those registered via `BlockingConnection.add_timeout`, + `BlockingConnection.add_on_connection_blocked_callback`, + `BlockingConnection.add_on_connection_unblocked_callback`, + `BlockingChannel.basic_consume`, etc. + + Blocked Connection deadlock avoidance: when RabbitMQ becomes low on + resources, it emits Connection.Blocked (AMQP extension) to the client + connection when client makes a resource-consuming request on that connection + or its channel (e.g., `Basic.Publish`); subsequently, RabbitMQ suspsends + processing requests from that connection until the affected resources are + restored. See http://www.rabbitmq.com/connection-blocked.html. This + may impact `BlockingConnection` and `BlockingChannel` operations in a + way that users might not be expecting. For example, if the user dispatches + `BlockingChannel.basic_publish` in non-publisher-confirmation mode while + RabbitMQ is in this low-resource state followed by a synchronous request + (e.g., `BlockingConnection.channel`, `BlockingChannel.consume`, + `BlockingChannel.basic_consume`, etc.), the synchronous request will block + indefinitely (until Connection.Unblocked) waiting for RabbitMQ to reply. If + the blocked state persists for a long time, the blocking operation will + appear to hang. In this state, `BlockingConnection` instance and its + channels will not dispatch user callbacks. SOLUTION: To break this potential + deadlock, applications may configure the `blocked_connection_timeout` + connection parameter when instantiating `BlockingConnection`. Upon blocked + connection timeout, this adapter will raise ConnectionClosed exception with + first exception arg of + `pika.connection.InternalCloseReasons.BLOCKED_CONNECTION_TIMEOUT`. See + `pika.connection.ConnectionParameters` documentation to learn more about + `blocked_connection_timeout` configuration. + """ # Connection-opened callback args _OnOpenedArgs = namedtuple('BlockingConnection__OnOpenedArgs', @@ -341,6 +375,9 @@ class BlockingConnection(object): # pylint: disable=R0902 self._process_io_for_connection_setup() + def __repr__(self): + return '<%s impl=%r>' % (self.__class__.__name__, self._impl) + def _cleanup(self): """Clean up members that might inhibit garbage collection""" self._impl.ioloop.deactivate_poller() @@ -541,8 +578,10 @@ class BlockingConnection(object): # pylint: disable=R0902 instead of relying on back pressure throttling. The callback will be passed the `Connection.Blocked` method frame. + See also `ConnectionParameters.blocked_connection_timeout`. + :param method callback_method: Callback to call on `Connection.Blocked`, - having the signature callback_method(pika.frame.Method), where the + having the signature `callback_method(pika.frame.Method)`, where the method frame's `method` member is of type `pika.spec.Connection.Blocked` @@ -559,7 +598,7 @@ class BlockingConnection(object): # pylint: disable=R0902 :param method callback_method: Callback to call on `Connection.Unblocked`, having the signature - callback_method(pika.frame.Method), where the method frame's + `callback_method(pika.frame.Method)`, where the method frame's `method` member is of type `pika.spec.Connection.Unblocked` """ @@ -640,7 +679,12 @@ class BlockingConnection(object): # pylint: disable=R0902 for impl_channel in pika.compat.dictvalues(self._impl._channels): channel = impl_channel._get_cookie() if channel.is_open: - channel.close(reply_code, reply_text) + try: + channel.close(reply_code, reply_text) + except exceptions.ChannelClosed as exc: + # Log and suppress broker-closed channel + LOGGER.warning('Got ChannelClosed while closing channel ' + 'from connection.close: %r', exc) # Close the connection self._impl.close(reply_code, reply_text) @@ -844,8 +888,8 @@ class _ConsumerCancellationEvt(_ChannelPendingEvt): # pylint: disable=R0903 self.method_frame = method_frame def __repr__(self): - return '%s(method_frame=%r)' % (self.__class__.__name__, - self.method_frame) + return '<%s method_frame=%r>' % (self.__class__.__name__, + self.method_frame) @property def method(self): @@ -879,10 +923,10 @@ class _ReturnedMessageEvt(_ChannelPendingEvt): # pylint: disable=R0903 self.body = body def __repr__(self): - return ('%s(callback=%r, channel=%r, method=%r, properties=%r, ' - 'body=%.300r') % (self.__class__.__name__, self.callback, - self.channel, self.method, self.properties, - self.body) + return ('<%s callback=%r channel=%r method=%r properties=%r ' + 'body=%.300r>') % (self.__class__.__name__, self.callback, + self.channel, self.method, self.properties, + self.body) def dispatch(self): """Dispatch user's callback""" @@ -989,7 +1033,7 @@ class _QueueConsumerGeneratorInfo(object): # pylint: disable=R0903 self.pending_events = deque() def __repr__(self): - return '%s(params=%r, consumer_tag=%r)' % ( + return '<%s params=%r consumer_tag=%r>' % ( self.__class__.__name__, self.params, self.consumer_tag) @@ -1118,20 +1162,26 @@ class BlockingChannel(object): # pylint: disable=R0904,R0902 LOGGER.info("Created channel=%s", self.channel_number) - def _cleanup(self): - """Clean up members that might inhibit garbage collection""" - self._message_confirmation_result.reset() - self._pending_events = deque() - self._consumer_infos = dict() - def __int__(self): """Return the channel object as its channel number + NOTE: inherited from legacy BlockingConnection; might be error-prone; + use `channel_number` property instead. + :rtype: int """ return self.channel_number + def __repr__(self): + return '<%s impl=%r>' % (self.__class__.__name__, self._impl) + + def _cleanup(self): + """Clean up members that might inhibit garbage collection""" + self._message_confirmation_result.reset() + self._pending_events = deque() + self._consumer_infos = dict() + @property def channel_number(self): """Channel number""" diff --git a/pika/adapters/select_connection.py b/pika/adapters/select_connection.py index 875c48a..ca7b53d 100644 --- a/pika/adapters/select_connection.py +++ b/pika/adapters/select_connection.py @@ -167,7 +167,7 @@ class IOLoop(object): return self._poller.add_timeout(deadline, callback_method) def remove_timeout(self, timeout_id): - """[API] Remove a timeout if it's still in the timeout stack + """[API] Remove a timeout :param str timeout_id: The timeout id to remove diff --git a/pika/adapters/twisted_connection.py b/pika/adapters/twisted_connection.py index 62e595c..e2c7625 100644 --- a/pika/adapters/twisted_connection.py +++ b/pika/adapters/twisted_connection.py @@ -16,6 +16,7 @@ import functools from twisted.internet import defer, error, reactor from twisted.python import log +from pika import connection from pika import exceptions from pika.adapters import base_connection @@ -338,7 +339,8 @@ class TwistedConnection(base_connection.BaseConnection): if not reason.check(error.ConnectionDone): log.err(reason) - self._on_terminate(-1, str(reason)) + self._on_terminate(connection.InternalCloseReasons.SOCKET_ERROR, + str(reason)) def doRead(self): self._handle_read() diff --git a/pika/channel.py b/pika/channel.py index b7afe54..2165479 100644 --- a/pika/channel.py +++ b/pika/channel.py @@ -75,6 +75,11 @@ class Channel(object): """ return self.channel_number + def __repr__(self): + return '<%s number=%s conn=%r>' % (self.__class__.__name__, + self.channel_number, + self.connection) + def add_callback(self, callback, replies, one_shot=True): """Pass in a callback handler and a list replies from the RabbitMQ broker which you'd like the callback notified of. Callbacks @@ -943,9 +948,10 @@ class Channel(object): """ LOGGER.info('%s', method_frame) - LOGGER.warning('Received remote Channel.Close (%s): %s', + LOGGER.warning('Received remote Channel.Close (%s): %r on channel %s', method_frame.method.reply_code, - method_frame.method.reply_text) + method_frame.method.reply_text, + self) if self.connection.is_open: self._send_method(spec.Channel.CloseOk()) self._set_state(self.CLOSED) diff --git a/pika/connection.py b/pika/connection.py index 51b0b44..1bbf8e7 100644 --- a/pika/connection.py +++ b/pika/connection.py @@ -35,6 +35,16 @@ PRODUCT = "Pika Python Client Library" LOGGER = logging.getLogger(__name__) +class InternalCloseReasons(object): + """Internal reason codes passed to the user's on_close_callback. + + AMQP 0.9.1 specification sites IETF RFC 821 for reply codes. To avoid + conflict, the `InternalCloseReasons` namespace uses negative integers. + """ + SOCKET_ERROR = -1 + BLOCKED_CONNECTION_TIMEOUT = -2 + + class Parameters(object): """Base connection parameters class definition @@ -54,6 +64,7 @@ class Parameters(object): :param dict DEFAULT_SSL_OPTIONS: {} :param int DEFAULT_SSL_PORT: 5671 :param bool DEFAULT_BACKPRESSURE_DETECTION: False + :param number DEFAULT_BLOCKED_CONNECTION_TIMEOUT: None """ DEFAULT_BACKPRESSURE_DETECTION = False @@ -72,6 +83,7 @@ class Parameters(object): DEFAULT_SSL_PORT = 5671 DEFAULT_USERNAME = 'guest' DEFAULT_VIRTUAL_HOST = '/' + DEFAULT_BLOCKED_CONNECTION_TIMEOUT = None def __init__(self): self.virtual_host = self.DEFAULT_VIRTUAL_HOST @@ -90,6 +102,13 @@ class Parameters(object): self.ssl_options = self.DEFAULT_SSL_OPTIONS self.socket_timeout = self.DEFAULT_SOCKET_TIMEOUT + # If not None, blocked_connection_timeout is the timeout, in seconds, + # for the connection to remain blocked; if the timeout expires, the + # connection will be torn down, triggering the connection's + # on_close_callback + self.blocked_connection_timeout = ( + self.DEFAULT_BLOCKED_CONNECTION_TIMEOUT) + def __repr__(self): """Represent the info about the instance. @@ -276,6 +295,23 @@ class Parameters(object): raise ValueError('socket_timeout must be > 0') return True + @staticmethod + def _validate_blocked_connection_timeout(blocked_connection_timeout): + """Validate that the blocked_connection_timeout value is None or a + number + + :param real blocked_connection_timeout: The value to validate + :rtype: bool + :raises: TypeError + + """ + if blocked_connection_timeout is not None: + if not isinstance(blocked_connection_timeout, (int, float)): + raise TypeError('blocked_connection_timeout must be a Real number') + if blocked_connection_timeout < 0: + raise ValueError('blocked_connection_timeout must be >= 0') + return True + @staticmethod def _validate_ssl(ssl): """Validate the SSL toggle is a bool @@ -320,21 +356,6 @@ class ConnectionParameters(Parameters): """Connection parameters object that is passed into the connection adapter upon construction. - :param str host: Hostname or IP Address to connect to - :param int port: TCP port to connect to - :param str virtual_host: RabbitMQ virtual host to use - :param pika.credentials.Credentials credentials: auth credentials - :param int channel_max: Maximum number of channels to allow - :param int frame_max: The maximum byte size for an AMQP frame - :param int heartbeat_interval: How often to send heartbeats - :param bool ssl: Enable SSL - :param dict ssl_options: Arguments passed to ssl.wrap_socket as - :param int connection_attempts: Maximum number of retry attempts - :param int|float retry_delay: Time to wait in seconds, before the next - :param int|float socket_timeout: Use for high latency networks - :param str locale: Set the locale value - :param bool backpressure_detection: Toggle backpressure detection - """ def __init__(self, @@ -351,7 +372,8 @@ class ConnectionParameters(Parameters): retry_delay=None, socket_timeout=None, locale=None, - backpressure_detection=None): + backpressure_detection=None, + blocked_connection_timeout=None): """Create a new ConnectionParameters instance. :param str host: Hostname or IP Address to connect to @@ -371,6 +393,15 @@ class ConnectionParameters(Parameters): :param int|float socket_timeout: Use for high latency networks :param str locale: Set the locale value :param bool backpressure_detection: Toggle backpressure detection + :param blocked_connection_timeout: If not None, + the value is a non-negative timeout, in seconds, for the + connection to remain blocked (triggered by Connection.Blocked from + broker); if the timeout expires before connection becomes unblocked, + the connection will be torn down, triggering the adapter-specific + mechanism for informing client app about the closed connection ( + e.g., on_close_callback or ConnectionClosed exception) with + `reason_code` of `InternalCloseReasons.BLOCKED_CONNECTION_TIMEOUT`. + :type blocked_connection_timeout: None, int, float """ super(ConnectionParameters, self).__init__() @@ -413,6 +444,9 @@ class ConnectionParameters(Parameters): if (backpressure_detection is not None and self._validate_backpressure(backpressure_detection)): self.backpressure_detection = backpressure_detection + if self._validate_blocked_connection_timeout( + blocked_connection_timeout): + self.blocked_connection_timeout = blocked_connection_timeout class URLParameters(Parameters): @@ -447,6 +481,11 @@ class URLParameters(Parameters): connection failure. - socket_timeout: Override low level socket timeout value + - blocked_connection_timeout: + Set the timeout, in seconds, that the connection may remain blocked + (triggered by Connection.Blocked from broker); if the timeout + expires before connection becomes unblocked, the connection will be + torn down, triggering the connection's on_close_callback :param str url: The AMQP URL to connect to @@ -540,8 +579,8 @@ class URLParameters(Parameters): self.heartbeat = values['heartbeat'] elif ('heartbeat_interval' in values and - self._validate_heartbeat_interval( - values['heartbeat_interval'])): + self._validate_heartbeat_interval( + values['heartbeat_interval'])): warnings.warn('heartbeat_interval is deprecated, use heartbeat', DeprecationWarning, stacklevel=2) self.heartbeat = values['heartbeat_interval'] @@ -557,6 +596,12 @@ class URLParameters(Parameters): self._validate_socket_timeout(values['socket_timeout'])): self.socket_timeout = values['socket_timeout'] + if ('blocked_connection_timeout' in values and + self._validate_blocked_connection_timeout( + values['blocked_connection_timeout'])): + self.blocked_connection_timeout = values[ + 'blocked_connection_timeout'] + if 'ssl_options' in values: options = ast.literal_eval(values['ssl_options']) if self._validate_ssl_options(options): @@ -606,21 +651,35 @@ class Connection(object): :param method on_open_error_callback: Called if the connection can't be established: on_open_error_callback(connection, str|exception) :param method on_close_callback: Called when the connection is closed: - on_close_callback(connection, reason_code, reason_text) + `on_close_callback(connection, reason_code, reason_text)`, where + `reason_code` is either an IETF RFC 821 reply code for AMQP-level + closures or a value from `pika.connection.InternalCloseReasons` for + internal causes, such as socket errors. """ self._write_lock = threading.Lock() + self.connection_state = self.CONNECTION_CLOSED + + # Used to hold timer if configured for Connection.Blocked timeout + self._blocked_conn_timer = None + + self.heartbeat = None + + # Set our configuration options + self.params = parameters or ConnectionParameters() + # Define our callback dictionary self.callbacks = callback.CallbackManager() + # Initialize the connection state and connect + self._init_connection_state() + # Add the on connection error callback self.callbacks.add(0, self.ON_CONNECTION_ERROR, on_open_error_callback or self._on_connection_error, False) - self.heartbeat = None - # On connection callback if on_open_callback: self.add_on_open_callback(on_open_callback) @@ -628,12 +687,6 @@ class Connection(object): # On connection callback if on_close_callback: self.add_on_close_callback(on_close_callback) - - # Set our configuration options - self.params = parameters or ConnectionParameters() - - # Initialize the connection state and connect - self._init_connection_state() self.connect() def add_backpressure_callback(self, callback_method): @@ -663,7 +716,12 @@ class Connection(object): instead of relying on back pressure throttling. The callback will be passed the ``Connection.Blocked`` method frame. - :param method callback_method: Callback to call on `Connection.Blocked` + See also `ConnectionParameters.blocked_connection_timeout`. + + :param method callback_method: Callback to call on `Connection.Blocked`, + having the signature `callback_method(pika.frame.Method)`, where the + method frame's `method` member is of type + `pika.spec.Connection.Blocked` """ self.callbacks.add(0, spec.Connection.Blocked, callback_method, False) @@ -675,7 +733,9 @@ class Connection(object): ``Connection.Unblocked`` method frame. :param method callback_method: Callback to call on - `Connection.Unblocked` + `Connection.Unblocked`, having the signature + `callback_method(pika.frame.Method)`, where the method frame's + `method` member is of type `pika.spec.Connection.Unblocked` """ self.callbacks.add(0, spec.Connection.Unblocked, callback_method, False) @@ -782,12 +842,10 @@ class Connection(object): self.remaining_connection_attempts = self.params.connection_attempts self._set_connection_state(self.CONNECTION_CLOSED) - def remove_timeout(self, callback_method): - """Adapters should override to call the callback after the - specified number of seconds have elapsed, using a timer, or a - thread, or similar. + def remove_timeout(self, timeout_id): + """Adapters should override: Remove a timeout - :param method callback_method: The callback to remove a timeout for + :param str timeout_id: The timeout id to remove """ raise NotImplementedError @@ -941,7 +999,7 @@ class Connection(object): """ if (value.method.version_major, - value.method.version_minor) != spec.PROTOCOL_VERSION[0:2]: + value.method.version_minor) != spec.PROTOCOL_VERSION[0:2]: raise exceptions.ProtocolVersionMismatch(frame.ProtocolHeader(), value) @@ -1171,6 +1229,18 @@ class Connection(object): # simply closed the TCP/IP stream. self.callbacks.add(0, spec.Connection.Close, self._on_connection_close) + if self.params.blocked_connection_timeout is not None: + if self._blocked_conn_timer is not None: + # Blocked connection timer was active when teardown was + # initiated + self.remove_timeout(self._blocked_conn_timer) + self._blocked_conn_timer = None + + self.add_on_connection_blocked_callback( + self._on_connection_blocked) + self.add_on_connection_unblocked_callback( + self._on_connection_unblocked) + def _is_basic_deliver_frame(self, frame_value): """Returns true if the frame is a Basic.Deliver @@ -1250,6 +1320,51 @@ class Connection(object): # Start the communication with the RabbitMQ Broker self._send_frame(frame.ProtocolHeader()) + def _on_blocked_connection_timeout(self): + """ Called when the "connection blocked timeout" expires. When this + happens, we tear down the connection + + """ + self._blocked_conn_timer = None + self._on_terminate(InternalCloseReasons.BLOCKED_CONNECTION_TIMEOUT, + 'Blocked connection timeout expired') + + def _on_connection_blocked(self, method_frame): + """Handle Connection.Blocked notification from RabbitMQ broker + + :param pika.frame.Method method_frame: method frame having `method` + member of type `pika.spec.Connection.Blocked` + """ + LOGGER.warning('Received %s from broker', method_frame) + + if self._blocked_conn_timer is not None: + # RabbitMQ is not supposed to repeat Connection.Blocked, but it + # doesn't hurt to be careful + LOGGER.warning('_blocked_conn_timer %s already set when ' + '_on_connection_blocked is called', + self._blocked_conn_timer) + else: + self._blocked_conn_timer = self.add_timeout( + self.params.blocked_connection_timeout, + self._on_blocked_connection_timeout) + + def _on_connection_unblocked(self, method_frame): + """Handle Connection.Unblocked notification from RabbitMQ broker + + :param pika.frame.Method method_frame: method frame having `method` + member of type `pika.spec.Connection.Blocked` + """ + LOGGER.info('Received %s from broker', method_frame) + + if self._blocked_conn_timer is None: + # RabbitMQ is supposed to pair Connection.Blocked/Unblocked, but it + # doesn't hurt to be careful + LOGGER.warning('_blocked_conn_timer was not active when ' + '_on_connection_unblocked called') + else: + self.remove_timeout(self._blocked_conn_timer) + self._blocked_conn_timer = None + def _on_connection_close(self, method_frame): """Called when the connection is closed remotely via Connection.Close frame from broker. @@ -1412,8 +1527,9 @@ class Connection(object): """Terminate the connection and notify registered ON_CONNECTION_ERROR and/or ON_CONNECTION_CLOSED callbacks - :param integer reason_code: HTTP error code for AMQP-reported closures - or -1 for other errors (such as socket errors) + :param integer reason_code: either IETF RFC 821 reply code for + AMQP-level closures or a value from `InternalCloseReasons` for + internal causes, such as socket errors :param str reason_text: human-readable text message describing the error """ LOGGER.warning( @@ -1436,6 +1552,10 @@ class Connection(object): self._remove_callbacks(0, [spec.Connection.Close, spec.Connection.Start, spec.Connection.Open]) + if self.params.blocked_connection_timeout is not None: + self._remove_callbacks(0, [spec.Connection.Blocked, + spec.Connection.Unblocked]) + # Close the socket self._adapter_disconnect() @@ -1501,7 +1621,7 @@ class Connection(object): """ if (self._is_method_frame(frame_value) and - self._has_pending_callbacks(frame_value)): + self._has_pending_callbacks(frame_value)): self.callbacks.process(frame_value.channel_number, # Prefix frame_value.method, # Key self, # Caller
Connection blocked events do not get processed when calling `consume()` If RabbitMQ is below a resource limit when I connect, the BlockingConnection can end up in a hanging state if I first publish a message and then directly go into `consume()`, even when using the `inactivity_timeout` parameter. As far as I can see the problem seems to be that when `consume()` sends the `basic_consume` message it waits for the `OK` from Rabbit and does not call the defined `on_blocked_connection_callback` before entering the actual consumption loop. Obviously I could wait for a second after publishing my message and then call `process_data_events()` before entering `consume()`, but that's not a solution. How to reproduce: - Make sure you have a RabbitMQ which is already below resource limits. For example by setting `disk_free_limit` to some really high value. - This callback gets called as long as the `time.sleep(1)` is there and the sleep is longer than it takes for the `blocked_connection` event to arrive. If you remove the sleep, then it just hangs and the callback does not get called because it's waiting for Rabbit's `OK` message before entering the consumption loop: import time import pika connection_parameters = pika.ConnectionParameters( host='192.168.1.101', port=5672, credentials=pika.PlainCredentials('guest', 'guest'), ) conn = pika.BlockingConnection(connection_parameters) def callback(method): print('connection blocked!') conn.add_on_connection_blocked_callback(callback) chan = conn.channel() chan.basic_publish( exchange='test_exchange', routing_key='key', body='test' ) time.sleep(1) conn.process_data_events() for msg in chan.consume(queue='somequeue', inactivity_timeout=1): print(msg) A possible solution might be to also make this `_flush_output` handle the `connection_blocked` event: https://github.com/pika/pika/blob/master/pika/adapters/blocking_connection.py#L1522
pika/pika
diff --git a/tests/acceptance/async_adapter_tests.py b/tests/acceptance/async_adapter_tests.py index 9a44208..39446b8 100644 --- a/tests/acceptance/async_adapter_tests.py +++ b/tests/acceptance/async_adapter_tests.py @@ -15,6 +15,9 @@ import uuid from pika import spec from pika.compat import as_bytes +import pika.connection +import pika.frame +import pika.spec from async_test_base import (AsyncTestCase, BoundQueueTestCase, AsyncAdapters) @@ -390,3 +393,68 @@ class TestZ_AccessDenied(AsyncTestCase, AsyncAdapters): # pylint: disable=C0103 def on_open(self, connection): super(TestZ_AccessDenied, self).on_open(connection) self.stop() + + +class TestBlockedConnectionTimesOut(AsyncTestCase, AsyncAdapters): # pylint: disable=C0103 + DESCRIPTION = "Verify that blocked connection terminates on timeout" + + def start(self, *args, **kwargs): + self.parameters.blocked_connection_timeout = 0.001 + self.on_closed_pair = None + super(TestBlockedConnectionTimesOut, self).start(*args, **kwargs) + self.assertEqual( + self.on_closed_pair, + (pika.connection.InternalCloseReasons.BLOCKED_CONNECTION_TIMEOUT, + 'Blocked connection timeout expired')) + + def begin(self, channel): + + # Simulate Connection.Blocked + channel.connection._on_connection_blocked(pika.frame.Method( + 0, + pika.spec.Connection.Blocked('Testing blocked connection timeout'))) + + def on_closed(self, connection, reply_code, reply_text): + """called when the connection has finished closing""" + self.on_closed_pair = (reply_code, reply_text) + super(TestBlockedConnectionTimesOut, self).on_closed(connection, + reply_code, + reply_text) + + +class TestBlockedConnectionUnblocks(AsyncTestCase, AsyncAdapters): # pylint: disable=C0103 + DESCRIPTION = "Verify that blocked-unblocked connection closes normally" + + def start(self, *args, **kwargs): + self.parameters.blocked_connection_timeout = 0.001 + self.on_closed_pair = None + super(TestBlockedConnectionUnblocks, self).start(*args, **kwargs) + self.assertEqual( + self.on_closed_pair, + (200, 'Normal shutdown')) + + def begin(self, channel): + + # Simulate Connection.Blocked + channel.connection._on_connection_blocked(pika.frame.Method( + 0, + pika.spec.Connection.Blocked( + 'Testing blocked connection unblocks'))) + + # Simulate Connection.Unblocked + channel.connection._on_connection_unblocked(pika.frame.Method( + 0, + pika.spec.Connection.Unblocked())) + + # Schedule shutdown after blocked connection timeout would expire + channel.connection.add_timeout(0.005, self.on_cleanup_timer) + + def on_cleanup_timer(self): + self.stop() + + def on_closed(self, connection, reply_code, reply_text): + """called when the connection has finished closing""" + self.on_closed_pair = (reply_code, reply_text) + super(TestBlockedConnectionUnblocks, self).on_closed(connection, + reply_code, + reply_text) diff --git a/tests/acceptance/blocking_adapter_test.py b/tests/acceptance/blocking_adapter_test.py index a6e781a..4fac0f9 100644 --- a/tests/acceptance/blocking_adapter_test.py +++ b/tests/acceptance/blocking_adapter_test.py @@ -376,7 +376,7 @@ class TestProcessDataEvents(BlockingTestCaseBase): self.assertLess(elapsed, 0.25) -class TestConnectionBlockAndUnblock(BlockingTestCaseBase): +class TestConnectionRegisterForBlockAndUnblock(BlockingTestCaseBase): def test(self): """BlockingConnection register for Connection.Blocked/Unblocked""" @@ -407,6 +407,35 @@ class TestConnectionBlockAndUnblock(BlockingTestCaseBase): self.assertEqual(unblocked_buffer, ["unblocked"]) +class TestBlockedConnectionTimeout(BlockingTestCaseBase): + + def test(self): + """BlockingConnection Connection.Blocked timeout """ + url = DEFAULT_URL + '&blocked_connection_timeout=0.001' + conn = self._connect(url=url) + + # NOTE: I haven't figured out yet how to coerce RabbitMQ to emit + # Connection.Block and Connection.Unblock from the test, so we'll + # simulate it for now + + # Simulate Connection.Blocked + conn._impl._on_connection_blocked(pika.frame.Method( + 0, + pika.spec.Connection.Blocked('TestBlockedConnectionTimeout'))) + + # Wait for connection teardown + with self.assertRaises(pika.exceptions.ConnectionClosed) as excCtx: + while True: + conn.process_data_events(time_limit=1) + + self.assertEqual( + excCtx.exception.args, + (pika.connection.InternalCloseReasons.BLOCKED_CONNECTION_TIMEOUT, + 'Blocked connection timeout expired')) + + + + class TestAddTimeoutRemoveTimeout(BlockingTestCaseBase): def test(self): diff --git a/tests/unit/blocking_connection_tests.py b/tests/unit/blocking_connection_tests.py index 8814bc9..775e490 100644 --- a/tests/unit/blocking_connection_tests.py +++ b/tests/unit/blocking_connection_tests.py @@ -18,7 +18,6 @@ Tests for pika.adapters.blocking_connection.BlockingConnection import socket -from pika.exceptions import AMQPConnectionError try: from unittest import mock # pylint: disable=E0611 @@ -33,6 +32,8 @@ except ImportError: import pika from pika.adapters import blocking_connection +import pika.channel +from pika.exceptions import AMQPConnectionError, ChannelClosed class BlockingConnectionMockTemplate(blocking_connection.BlockingConnection): @@ -198,7 +199,8 @@ class BlockingConnectionTests(unittest.TestCase): '_process_io_for_connection_setup'): connection = blocking_connection.BlockingConnection('params') - connection._impl._channels = {1: mock.Mock()} + impl_channel_mock = mock.Mock() + connection._impl._channels = {1: impl_channel_mock} with mock.patch.object( blocking_connection.BlockingConnection, @@ -207,6 +209,53 @@ class BlockingConnectionTests(unittest.TestCase): connection._closed_result.signal_once() connection.close(200, 'text') + impl_channel_mock._get_cookie.return_value.close.assert_called_once_with( + 200, 'text') + select_connection_class_mock.return_value.close.assert_called_once_with( + 200, 'text') + + @patch.object(blocking_connection, 'SelectConnection', + spec_set=SelectConnectionTemplate) + def test_close_with_channel_closed_exception(self, + select_connection_class_mock): + select_connection_class_mock.return_value.is_closed = False + + with mock.patch.object(blocking_connection.BlockingConnection, + '_process_io_for_connection_setup'): + connection = blocking_connection.BlockingConnection('params') + + channel1_mock = mock.Mock( + is_open=True, + close=mock.Mock(side_effect=ChannelClosed, + spec_set=pika.channel.Channel.close), + spec_set=blocking_connection.BlockingChannel) + + channel2_mock = mock.Mock( + is_open=True, + spec_set=blocking_connection.BlockingChannel) + + connection._impl._channels = { + 1: mock.Mock( + _get_cookie=mock.Mock( + return_value=channel1_mock, + spec_set=pika.channel.Channel._get_cookie), + spec_set=pika.channel.Channel), + 2: mock.Mock( + _get_cookie=mock.Mock( + return_value=channel2_mock, + spec_set=pika.channel.Channel._get_cookie), + spec_set=pika.channel.Channel) + } + + with mock.patch.object( + blocking_connection.BlockingConnection, + '_flush_output', + spec_set=blocking_connection.BlockingConnection._flush_output): + connection._closed_result.signal_once() + connection.close(200, 'text') + + channel1_mock.close.assert_called_once_with(200, 'text') + channel2_mock.close.assert_called_once_with(200, 'text') select_connection_class_mock.return_value.close.assert_called_once_with( 200, 'text') diff --git a/tests/unit/channel_tests.py b/tests/unit/channel_tests.py index 93294a3..4a31e89 100644 --- a/tests/unit/channel_tests.py +++ b/tests/unit/channel_tests.py @@ -1287,6 +1287,8 @@ class ChannelTests(unittest.TestCase): method_frame = frame.Method(self.obj.channel_number, spec.Channel.Close(999, 'Test_Value')) self.obj._on_close(method_frame) - warning.assert_called_with('Received remote Channel.Close (%s): %s', - method_frame.method.reply_code, - method_frame.method.reply_text) + warning.assert_called_with( + 'Received remote Channel.Close (%s): %r on channel %s', + method_frame.method.reply_code, + method_frame.method.reply_text, + self.obj) diff --git a/tests/unit/connection_tests.py b/tests/unit/connection_tests.py index f7e2a53..821dfa3 100644 --- a/tests/unit/connection_tests.py +++ b/tests/unit/connection_tests.py @@ -42,14 +42,15 @@ def callback_method(): class ConnectionTests(unittest.TestCase): - @mock.patch('pika.connection.Connection.connect') - def setUp(self, connect): - self.connection = connection.Connection() + def setUp(self): + with mock.patch('pika.connection.Connection.connect'): + self.connection = connection.Connection() + self.connection._set_connection_state( + connection.Connection.CONNECTION_OPEN) + self.channel = mock.Mock(spec=channel.Channel) self.channel.is_open = True self.connection._channels[1] = self.channel - self.connection._set_connection_state( - connection.Connection.CONNECTION_OPEN) def tearDown(self): del self.connection @@ -335,7 +336,8 @@ class ConnectionTests(unittest.TestCase): 'ssl': True, 'connection_attempts': 2, 'locale': 'en', - 'ssl_options': {'ssl': 'options'} + 'ssl_options': {'ssl': 'options'}, + 'blocked_connection_timeout': 10.5 } conn = connection.ConnectionParameters(**kwargs) #check values @@ -356,9 +358,10 @@ class ConnectionTests(unittest.TestCase): 'frame_max': 40000, 'heartbeat_interval': 7, 'backpressure_detection': False, - 'ssl': True + 'ssl': True, + 'blocked_connection_timeout': 10.5 } - #Test Type Errors + # Test Type Errors for bad_field, bad_value in ( ('host', 15672), ('port', '5672'), ('virtual_host', True), ('channel_max', '4'), ('frame_max', '5'), @@ -366,9 +369,13 @@ class ConnectionTests(unittest.TestCase): ('heartbeat_interval', '6'), ('socket_timeout', '42'), ('retry_delay', 'two'), ('backpressure_detection', 'true'), ('ssl', {'ssl': 'dict'}), ('ssl_options', True), - ('connection_attempts', 'hello')): + ('connection_attempts', 'hello'), + ('blocked_connection_timeout', set())): + bkwargs = copy.deepcopy(kwargs) + bkwargs[bad_field] = bad_value + self.assertRaises(TypeError, connection.ConnectionParameters, **bkwargs) @@ -590,3 +597,205 @@ class ConnectionTests(unittest.TestCase): self.assertEqual(1, self.connection.frames_received) if frame_type == frame.Heartbeat: self.assertTrue(self.connection.heartbeat.received.called) + + @mock.patch.object(connection.Connection, 'connect', + spec_set=connection.Connection.connect) + @mock.patch.object(connection.Connection, + 'add_on_connection_blocked_callback') + @mock.patch.object(connection.Connection, + 'add_on_connection_unblocked_callback') + def test_create_with_blocked_connection_timeout_config( + self, + add_on_unblocked_callback_mock, + add_on_blocked_callback_mock, + connect_mock): + + conn = connection.Connection( + parameters=connection.ConnectionParameters( + blocked_connection_timeout=60)) + + # Check + conn.add_on_connection_blocked_callback.assert_called_once_with( + conn._on_connection_blocked) + + conn.add_on_connection_unblocked_callback.assert_called_once_with( + conn._on_connection_unblocked) + + @mock.patch.object(connection.Connection, 'add_timeout') + @mock.patch.object(connection.Connection, 'connect', + spec_set=connection.Connection.connect) + def test_connection_blocked_sets_timer( + self, + connect_mock, + add_timeout_mock): + + conn = connection.Connection( + parameters=connection.ConnectionParameters( + blocked_connection_timeout=60)) + + conn._on_connection_blocked( + mock.Mock(name='frame.Method(Connection.Blocked)')) + + # Check + conn.add_timeout.assert_called_once_with( + 60, + conn._on_blocked_connection_timeout) + + self.assertIsNotNone(conn._blocked_conn_timer) + + @mock.patch.object(connection.Connection, 'add_timeout') + @mock.patch.object(connection.Connection, 'connect', + spec_set=connection.Connection.connect) + def test_multi_connection_blocked_in_a_row_sets_timer_once( + self, + connect_mock, + add_timeout_mock): + + conn = connection.Connection( + parameters=connection.ConnectionParameters( + blocked_connection_timeout=60)) + + # Simulate Connection.Blocked trigger + conn._on_connection_blocked( + mock.Mock(name='frame.Method(Connection.Blocked)')) + + # Check + conn.add_timeout.assert_called_once_with( + 60, + conn._on_blocked_connection_timeout) + + self.assertIsNotNone(conn._blocked_conn_timer) + + timer = conn._blocked_conn_timer + + # Simulate Connection.Blocked trigger again + conn._on_connection_blocked( + mock.Mock(name='frame.Method(Connection.Blocked)')) + + self.assertEqual(conn.add_timeout.call_count, 1) + self.assertIs(conn._blocked_conn_timer, timer) + + @mock.patch.object(connection.Connection, '_on_terminate') + @mock.patch.object(connection.Connection, 'add_timeout', + spec_set=connection.Connection.add_timeout) + @mock.patch.object(connection.Connection, 'connect', + spec_set=connection.Connection.connect) + def test_blocked_connection_timeout_teminates_connection( + self, + connect_mock, + add_timeout_mock, + on_terminate_mock): + + conn = connection.Connection( + parameters=connection.ConnectionParameters( + blocked_connection_timeout=60)) + + conn._on_connection_blocked( + mock.Mock(name='frame.Method(Connection.Blocked)')) + + conn._on_blocked_connection_timeout() + + # Check + conn._on_terminate.assert_called_once_with( + connection.InternalCloseReasons.BLOCKED_CONNECTION_TIMEOUT, + 'Blocked connection timeout expired') + + self.assertIsNone(conn._blocked_conn_timer) + + @mock.patch.object(connection.Connection, 'remove_timeout') + @mock.patch.object(connection.Connection, 'add_timeout', + spec_set=connection.Connection.add_timeout) + @mock.patch.object(connection.Connection, 'connect', + spec_set=connection.Connection.connect) + def test_connection_unblocked_removes_timer( + self, + connect_mock, + add_timeout_mock, + remove_timeout_mock): + + conn = connection.Connection( + parameters=connection.ConnectionParameters( + blocked_connection_timeout=60)) + + conn._on_connection_blocked( + mock.Mock(name='frame.Method(Connection.Blocked)')) + + self.assertIsNotNone(conn._blocked_conn_timer) + + timer = conn._blocked_conn_timer + + conn._on_connection_unblocked( + mock.Mock(name='frame.Method(Connection.Unblocked)')) + + # Check + conn.remove_timeout.assert_called_once_with(timer) + self.assertIsNone(conn._blocked_conn_timer) + + @mock.patch.object(connection.Connection, 'remove_timeout') + @mock.patch.object(connection.Connection, 'add_timeout', + spec_set=connection.Connection.add_timeout) + @mock.patch.object(connection.Connection, 'connect', + spec_set=connection.Connection.connect) + def test_multi_connection_unblocked_in_a_row_removes_timer_once( + self, + connect_mock, + add_timeout_mock, + remove_timeout_mock): + + conn = connection.Connection( + parameters=connection.ConnectionParameters( + blocked_connection_timeout=60)) + + # Simulate Connection.Blocked + conn._on_connection_blocked( + mock.Mock(name='frame.Method(Connection.Blocked)')) + + self.assertIsNotNone(conn._blocked_conn_timer) + + timer = conn._blocked_conn_timer + + # Simulate Connection.Unblocked + conn._on_connection_unblocked( + mock.Mock(name='frame.Method(Connection.Unblocked)')) + + # Check + conn.remove_timeout.assert_called_once_with(timer) + self.assertIsNone(conn._blocked_conn_timer) + + # Simulate Connection.Unblocked again + conn._on_connection_unblocked( + mock.Mock(name='frame.Method(Connection.Unblocked)')) + + self.assertEqual(conn.remove_timeout.call_count, 1) + self.assertIsNone(conn._blocked_conn_timer) + + @mock.patch.object(connection.Connection, 'remove_timeout') + @mock.patch.object(connection.Connection, 'add_timeout', + spec_set=connection.Connection.add_timeout) + @mock.patch.object(connection.Connection, 'connect', + spec_set=connection.Connection.connect) + @mock.patch.object(connection.Connection, '_adapter_disconnect', + spec_set=connection.Connection._adapter_disconnect) + def test_on_terminate_removes_timer( + self, + adapter_disconnect_mock, + connect_mock, + add_timeout_mock, + remove_timeout_mock): + + conn = connection.Connection( + parameters=connection.ConnectionParameters( + blocked_connection_timeout=60)) + + conn._on_connection_blocked( + mock.Mock(name='frame.Method(Connection.Blocked)')) + + self.assertIsNotNone(conn._blocked_conn_timer) + + timer = conn._blocked_conn_timer + + conn._on_terminate(0, 'test_on_terminate_removes_timer') + + # Check + conn.remove_timeout.assert_called_once_with(timer) + self.assertIsNone(conn._blocked_conn_timer)
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_added_files", "has_many_modified_files", "has_many_hunks", "has_pytest_match_arg" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 1, "test_score": 2 }, "num_modified_files": 9 }
0.10
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "yapf", "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc libev-dev" ], "python": "3.5", "reqs_path": [ "test-requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==22.2.0 certifi==2021.5.30 charset-normalizer==2.0.12 codecov==2.1.13 coverage==6.2 idna==3.10 importlib-metadata==4.8.3 iniconfig==1.1.1 mock==5.2.0 nose==1.3.7 packaging==21.3 -e git+https://github.com/pika/pika.git@c467ad22fb8f2fd2bc925fa59d3b082fd035302b#egg=pika pluggy==1.0.0 py==1.11.0 pyparsing==3.1.4 pytest==7.0.1 requests==2.27.1 tomli==1.2.3 tornado==6.1 Twisted==15.3.0 typing_extensions==4.1.1 urllib3==1.26.20 yapf==0.32.0 zipp==3.6.0 zope.interface==5.5.2
name: pika channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==22.2.0 - charset-normalizer==2.0.12 - codecov==2.1.13 - coverage==6.2 - idna==3.10 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - mock==5.2.0 - nose==1.3.7 - packaging==21.3 - pluggy==1.0.0 - py==1.11.0 - pyparsing==3.1.4 - pytest==7.0.1 - requests==2.27.1 - tomli==1.2.3 - tornado==6.1 - twisted==15.3.0 - typing-extensions==4.1.1 - urllib3==1.26.20 - yapf==0.32.0 - zipp==3.6.0 - zope-interface==5.5.2 prefix: /opt/conda/envs/pika
[ "tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_close_with_channel_closed_exception", "tests/unit/channel_tests.py::ChannelTests::test_on_close_warning", "tests/unit/connection_tests.py::ConnectionTests::test_blocked_connection_timeout_teminates_connection", "tests/unit/connection_tests.py::ConnectionTests::test_connection_blocked_sets_timer", "tests/unit/connection_tests.py::ConnectionTests::test_connection_unblocked_removes_timer", "tests/unit/connection_tests.py::ConnectionTests::test_create_with_blocked_connection_timeout_config", "tests/unit/connection_tests.py::ConnectionTests::test_good_connection_parameters", "tests/unit/connection_tests.py::ConnectionTests::test_multi_connection_blocked_in_a_row_sets_timer_once", "tests/unit/connection_tests.py::ConnectionTests::test_multi_connection_unblocked_in_a_row_removes_timer_once", "tests/unit/connection_tests.py::ConnectionTests::test_on_terminate_removes_timer" ]
[ "tests/acceptance/blocking_adapter_test.py::TestCreateAndCloseConnection::test", "tests/acceptance/blocking_adapter_test.py::TestMultiCloseConnection::test", "tests/acceptance/blocking_adapter_test.py::TestConnectionContextManagerClosesConnection::test", "tests/acceptance/blocking_adapter_test.py::TestConnectionContextManagerClosesConnectionAndPassesOriginalException::test", "tests/acceptance/blocking_adapter_test.py::TestConnectionContextManagerClosesConnectionAndPassesSystemException::test", "tests/acceptance/blocking_adapter_test.py::TestInvalidExchangeTypeRaisesConnectionClosed::test", "tests/acceptance/blocking_adapter_test.py::TestCreateAndCloseConnectionWithChannelAndConsumer::test", "tests/acceptance/blocking_adapter_test.py::TestSuddenBrokerDisconnectBeforeChannel::test", "tests/acceptance/blocking_adapter_test.py::TestNoAccessToFileDescriptorAfterConnectionClosed::test", "tests/acceptance/blocking_adapter_test.py::TestDisconnectDuringConnectionStart::test", "tests/acceptance/blocking_adapter_test.py::TestDisconnectDuringConnectionTune::test", "tests/acceptance/blocking_adapter_test.py::TestProcessDataEvents::test", "tests/acceptance/blocking_adapter_test.py::TestConnectionRegisterForBlockAndUnblock::test", "tests/acceptance/blocking_adapter_test.py::TestBlockedConnectionTimeout::test", "tests/acceptance/blocking_adapter_test.py::TestAddTimeoutRemoveTimeout::test", "tests/acceptance/blocking_adapter_test.py::TestRemoveTimeoutFromTimeoutCallback::test", "tests/acceptance/blocking_adapter_test.py::TestSleep::test", "tests/acceptance/blocking_adapter_test.py::TestConnectionProperties::test", "tests/acceptance/blocking_adapter_test.py::TestCreateAndCloseChannel::test", "tests/acceptance/blocking_adapter_test.py::TestExchangeDeclareAndDelete::test", "tests/acceptance/blocking_adapter_test.py::TestExchangeBindAndUnbind::test", "tests/acceptance/blocking_adapter_test.py::TestQueueDeclareAndDelete::test", "tests/acceptance/blocking_adapter_test.py::TestPassiveQueueDeclareOfUnknownQueueRaisesChannelClosed::test", "tests/acceptance/blocking_adapter_test.py::TestQueueBindAndUnbindAndPurge::test", "tests/acceptance/blocking_adapter_test.py::TestBasicGet::test", "tests/acceptance/blocking_adapter_test.py::TestBasicReject::test", "tests/acceptance/blocking_adapter_test.py::TestBasicRejectNoRequeue::test", "tests/acceptance/blocking_adapter_test.py::TestBasicNack::test", "tests/acceptance/blocking_adapter_test.py::TestBasicNackNoRequeue::test", "tests/acceptance/blocking_adapter_test.py::TestBasicNackMultiple::test", "tests/acceptance/blocking_adapter_test.py::TestBasicRecoverWithRequeue::test", "tests/acceptance/blocking_adapter_test.py::TestTxCommit::test", "tests/acceptance/blocking_adapter_test.py::TestTxRollback::test", "tests/acceptance/blocking_adapter_test.py::TestBasicConsumeFromUnknownQueueRaisesChannelClosed::test", "tests/acceptance/blocking_adapter_test.py::TestPublishAndBasicPublishWithPubacksUnroutable::test", "tests/acceptance/blocking_adapter_test.py::TestConfirmDeliveryAfterUnroutableMessage::test", "tests/acceptance/blocking_adapter_test.py::TestUnroutableMessagesReturnedInNonPubackMode::test", "tests/acceptance/blocking_adapter_test.py::TestUnroutableMessageReturnedInPubackMode::test", "tests/acceptance/blocking_adapter_test.py::TestBasicPublishDeliveredWhenPendingUnroutable::test", "tests/acceptance/blocking_adapter_test.py::TestPublishAndConsumeWithPubacksAndQosOfOne::test", "tests/acceptance/blocking_adapter_test.py::TestTwoBasicConsumersOnSameChannel::test", "tests/acceptance/blocking_adapter_test.py::TestBasicCancelPurgesPendingConsumerCancellationEvt::test", "tests/acceptance/blocking_adapter_test.py::TestBasicPublishWithoutPubacks::test", "tests/acceptance/blocking_adapter_test.py::TestPublishFromBasicConsumeCallback::test", "tests/acceptance/blocking_adapter_test.py::TestStopConsumingFromBasicConsumeCallback::test", "tests/acceptance/blocking_adapter_test.py::TestCloseChannelFromBasicConsumeCallback::test", "tests/acceptance/blocking_adapter_test.py::TestCloseConnectionFromBasicConsumeCallback::test", "tests/acceptance/blocking_adapter_test.py::TestNonPubAckPublishAndConsumeHugeMessage::test", "tests/acceptance/blocking_adapter_test.py::TestNonPubackPublishAndConsumeManyMessages::test", "tests/acceptance/blocking_adapter_test.py::TestBasicCancelWithNonAckableConsumer::test", "tests/acceptance/blocking_adapter_test.py::TestBasicCancelWithAckableConsumer::test", "tests/acceptance/blocking_adapter_test.py::TestUnackedMessageAutoRestoredToQueueOnChannelClose::test", "tests/acceptance/blocking_adapter_test.py::TestNoAckMessageNotRestoredToQueueOnChannelClose::test", "tests/acceptance/blocking_adapter_test.py::TestChannelFlow::test" ]
[ "tests/acceptance/blocking_adapter_test.py::TestConnectWithDownedBroker::test", "tests/acceptance/blocking_adapter_test.py::TestDisconnectDuringConnectionProtocol::test", "tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_channel", "tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_close", "tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_connection_attempts_with_timeout", "tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_constructor", "tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_flush_output", "tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_flush_output_server_initiated_error_close", "tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_flush_output_server_initiated_no_error_close", "tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_flush_output_user_initiated_close", "tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_process_io_for_connection_setup", "tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_process_io_for_connection_setup_fails_with_open_error", "tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_sleep", "tests/unit/channel_tests.py::ChannelTests::test_add_callback", "tests/unit/channel_tests.py::ChannelTests::test_add_callback_multiple_replies", "tests/unit/channel_tests.py::ChannelTests::test_add_callbacks_basic_cancel_empty_added", "tests/unit/channel_tests.py::ChannelTests::test_add_callbacks_basic_get_empty_added", "tests/unit/channel_tests.py::ChannelTests::test_add_callbacks_channel_close_added", "tests/unit/channel_tests.py::ChannelTests::test_add_callbacks_channel_flow_added", "tests/unit/channel_tests.py::ChannelTests::test_add_on_cancel_callback", "tests/unit/channel_tests.py::ChannelTests::test_add_on_close_callback", "tests/unit/channel_tests.py::ChannelTests::test_add_on_flow_callback", "tests/unit/channel_tests.py::ChannelTests::test_add_on_return_callback", "tests/unit/channel_tests.py::ChannelTests::test_basic_ack_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_basic_cancel_callback_appended", "tests/unit/channel_tests.py::ChannelTests::test_basic_cancel_calls_validate", "tests/unit/channel_tests.py::ChannelTests::test_basic_cancel_channel_cancelled_appended", "tests/unit/channel_tests.py::ChannelTests::test_basic_cancel_no_consumer_tag", "tests/unit/channel_tests.py::ChannelTests::test_basic_cancel_on_cancel_appended", "tests/unit/channel_tests.py::ChannelTests::test_basic_cancel_raises_value_error", "tests/unit/channel_tests.py::ChannelTests::test_basic_cancel_then_close", "tests/unit/channel_tests.py::ChannelTests::test_basic_consume_calls_validate", "tests/unit/channel_tests.py::ChannelTests::test_basic_consume_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_basic_consume_consumer_tag", "tests/unit/channel_tests.py::ChannelTests::test_basic_consume_consumer_tag_cancelled_full", "tests/unit/channel_tests.py::ChannelTests::test_basic_consume_consumer_tag_in_consumers", "tests/unit/channel_tests.py::ChannelTests::test_basic_consume_consumers_callback_value", "tests/unit/channel_tests.py::ChannelTests::test_basic_consume_consumers_pending_list_is_empty", "tests/unit/channel_tests.py::ChannelTests::test_basic_consume_consumers_rpc_called", "tests/unit/channel_tests.py::ChannelTests::test_basic_consume_duplicate_consumer_tag_raises", "tests/unit/channel_tests.py::ChannelTests::test_basic_consume_has_pending_list", "tests/unit/channel_tests.py::ChannelTests::test_basic_get_callback", "tests/unit/channel_tests.py::ChannelTests::test_basic_get_calls_validate", "tests/unit/channel_tests.py::ChannelTests::test_basic_get_send_method_called", "tests/unit/channel_tests.py::ChannelTests::test_basic_nack_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_basic_nack_send_method_request", "tests/unit/channel_tests.py::ChannelTests::test_basic_publish_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_basic_publish_send_method_request", "tests/unit/channel_tests.py::ChannelTests::test_basic_qos_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_basic_qos_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_basic_recover_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_basic_recover_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_basic_reject_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_basic_reject_send_method_request_with_int_tag", "tests/unit/channel_tests.py::ChannelTests::test_basic_reject_send_method_request_with_long_tag", "tests/unit/channel_tests.py::ChannelTests::test_basic_reject_spec_with_int_tag", "tests/unit/channel_tests.py::ChannelTests::test_basic_reject_spec_with_long_tag", "tests/unit/channel_tests.py::ChannelTests::test_basic_send_method_calls_rpc", "tests/unit/channel_tests.py::ChannelTests::test_channel_open_add_callbacks_called", "tests/unit/channel_tests.py::ChannelTests::test_cleanup", "tests/unit/channel_tests.py::ChannelTests::test_close_basic_cancel_called", "tests/unit/channel_tests.py::ChannelTests::test_close_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_close_state", "tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_callback_basic_ack", "tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_callback_basic_nack", "tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_callback_with_nowait_raises_value_error", "tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_callback_without_nowait_selectok", "tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_no_callback_callback_call_count", "tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_no_callback_no_basic_ack_callback", "tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_no_callback_no_basic_nack_callback", "tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_raises_method_not_implemented_for_confirms", "tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_raises_method_not_implemented_for_nack", "tests/unit/channel_tests.py::ChannelTests::test_consumer_tags", "tests/unit/channel_tests.py::ChannelTests::test_exchange_bind_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_exchange_bind_raises_value_error_on_invalid_callback", "tests/unit/channel_tests.py::ChannelTests::test_exchange_bind_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_exchange_bind_rpc_request_nowait", "tests/unit/channel_tests.py::ChannelTests::test_exchange_declare_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_exchange_declare_raises_value_error_on_invalid_callback", "tests/unit/channel_tests.py::ChannelTests::test_exchange_declare_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_exchange_declare_rpc_request_nowait", "tests/unit/channel_tests.py::ChannelTests::test_exchange_declare_with_type_arg_assigns_to_exchange_type", "tests/unit/channel_tests.py::ChannelTests::test_exchange_declare_with_type_arg_raises_deprecation_warning", "tests/unit/channel_tests.py::ChannelTests::test_exchange_delete_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_exchange_delete_raises_value_error_on_invalid_callback", "tests/unit/channel_tests.py::ChannelTests::test_exchange_delete_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_exchange_delete_rpc_request_nowait", "tests/unit/channel_tests.py::ChannelTests::test_exchange_unbind_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_exchange_unbind_raises_value_error_on_invalid_callback", "tests/unit/channel_tests.py::ChannelTests::test_exchange_unbind_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_exchange_unbind_rpc_request_nowait", "tests/unit/channel_tests.py::ChannelTests::test_flow_off_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_flow_on_flowok_callback", "tests/unit/channel_tests.py::ChannelTests::test_flow_on_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_flow_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_flow_raises_invalid_callback", "tests/unit/channel_tests.py::ChannelTests::test_get_pending_message", "tests/unit/channel_tests.py::ChannelTests::test_get_pending_message_item_popped", "tests/unit/channel_tests.py::ChannelTests::test_handle_content_frame_basic_deliver_called", "tests/unit/channel_tests.py::ChannelTests::test_handle_content_frame_basic_get_called", "tests/unit/channel_tests.py::ChannelTests::test_handle_content_frame_basic_return_called", "tests/unit/channel_tests.py::ChannelTests::test_handle_content_frame_method_returns_none", "tests/unit/channel_tests.py::ChannelTests::test_handle_content_frame_sets_header_frame", "tests/unit/channel_tests.py::ChannelTests::test_handle_content_frame_sets_method_frame", "tests/unit/channel_tests.py::ChannelTests::test_has_content_false", "tests/unit/channel_tests.py::ChannelTests::test_has_content_true", "tests/unit/channel_tests.py::ChannelTests::test_immediate_called_logger_warning", "tests/unit/channel_tests.py::ChannelTests::test_init_blocked", "tests/unit/channel_tests.py::ChannelTests::test_init_blocking", "tests/unit/channel_tests.py::ChannelTests::test_init_callbacks", "tests/unit/channel_tests.py::ChannelTests::test_init_cancelled", "tests/unit/channel_tests.py::ChannelTests::test_init_channel_number", "tests/unit/channel_tests.py::ChannelTests::test_init_connection", "tests/unit/channel_tests.py::ChannelTests::test_init_consumers", "tests/unit/channel_tests.py::ChannelTests::test_init_frame_dispatcher", "tests/unit/channel_tests.py::ChannelTests::test_init_has_on_flow_callback", "tests/unit/channel_tests.py::ChannelTests::test_init_invalid_channel_number", "tests/unit/channel_tests.py::ChannelTests::test_init_on_flowok_callback", "tests/unit/channel_tests.py::ChannelTests::test_init_on_getok_callback", "tests/unit/channel_tests.py::ChannelTests::test_init_on_openok_callback", "tests/unit/channel_tests.py::ChannelTests::test_init_pending", "tests/unit/channel_tests.py::ChannelTests::test_init_state", "tests/unit/channel_tests.py::ChannelTests::test_is_closed_false", "tests/unit/channel_tests.py::ChannelTests::test_is_closed_true", "tests/unit/channel_tests.py::ChannelTests::test_is_closing_false", "tests/unit/channel_tests.py::ChannelTests::test_is_closing_true", "tests/unit/channel_tests.py::ChannelTests::test_on_cancel_not_appended_cancelled", "tests/unit/channel_tests.py::ChannelTests::test_on_cancel_removed_consumer", "tests/unit/channel_tests.py::ChannelTests::test_on_cancelok_removed_consumer", "tests/unit/channel_tests.py::ChannelTests::test_on_cancelok_removed_pending", "tests/unit/channel_tests.py::ChannelTests::test_on_confirm_selectok", "tests/unit/channel_tests.py::ChannelTests::test_on_deliver_callback_called", "tests/unit/channel_tests.py::ChannelTests::test_on_deliver_pending_callbacks_called", "tests/unit/channel_tests.py::ChannelTests::test_on_deliver_pending_called", "tests/unit/channel_tests.py::ChannelTests::test_on_eventok", "tests/unit/channel_tests.py::ChannelTests::test_on_flow", "tests/unit/channel_tests.py::ChannelTests::test_on_flow_with_callback", "tests/unit/channel_tests.py::ChannelTests::test_on_flowok", "tests/unit/channel_tests.py::ChannelTests::test_on_flowok_callback_reset", "tests/unit/channel_tests.py::ChannelTests::test_on_flowok_calls_callback", "tests/unit/channel_tests.py::ChannelTests::test_on_getempty", "tests/unit/channel_tests.py::ChannelTests::test_on_getok_callback_called", "tests/unit/channel_tests.py::ChannelTests::test_on_getok_callback_reset", "tests/unit/channel_tests.py::ChannelTests::test_on_getok_no_callback", "tests/unit/channel_tests.py::ChannelTests::test_on_openok_callback_called", "tests/unit/channel_tests.py::ChannelTests::test_on_openok_no_callback", "tests/unit/channel_tests.py::ChannelTests::test_on_synchronous_complete", "tests/unit/channel_tests.py::ChannelTests::test_onreturn", "tests/unit/channel_tests.py::ChannelTests::test_onreturn_warning", "tests/unit/channel_tests.py::ChannelTests::test_queue_bind_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_queue_bind_raises_value_error_on_invalid_callback", "tests/unit/channel_tests.py::ChannelTests::test_queue_bind_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_queue_bind_rpc_request_nowait", "tests/unit/channel_tests.py::ChannelTests::test_queue_declare_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_queue_declare_raises_value_error_on_invalid_callback", "tests/unit/channel_tests.py::ChannelTests::test_queue_declare_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_queue_declare_rpc_request_nowait", "tests/unit/channel_tests.py::ChannelTests::test_queue_delete_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_queue_delete_raises_value_error_on_invalid_callback", "tests/unit/channel_tests.py::ChannelTests::test_queue_delete_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_queue_delete_rpc_request_nowait", "tests/unit/channel_tests.py::ChannelTests::test_queue_purge_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_queue_purge_raises_value_error_on_invalid_callback", "tests/unit/channel_tests.py::ChannelTests::test_queue_purge_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_queue_purge_rpc_request_nowait", "tests/unit/channel_tests.py::ChannelTests::test_queue_unbind_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_queue_unbind_raises_value_error_on_invalid_callback", "tests/unit/channel_tests.py::ChannelTests::test_queue_unbind_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_rpc_adds_callback", "tests/unit/channel_tests.py::ChannelTests::test_rpc_enters_blocking_and_adds_on_synchronous_complete", "tests/unit/channel_tests.py::ChannelTests::test_rpc_not_blocking_and_no_on_synchronous_complete_when_no_replies", "tests/unit/channel_tests.py::ChannelTests::test_rpc_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_rpc_throws_type_error_with_invalid_callback", "tests/unit/channel_tests.py::ChannelTests::test_rpc_throws_value_error_with_unacceptable_replies", "tests/unit/channel_tests.py::ChannelTests::test_rpc_while_blocking_appends_blocked_collection", "tests/unit/channel_tests.py::ChannelTests::test_send_method", "tests/unit/channel_tests.py::ChannelTests::test_set_state", "tests/unit/channel_tests.py::ChannelTests::test_tx_commit_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_tx_commit_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_tx_rollback_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_tx_select_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_validate_channel_and_callback_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_validate_channel_and_callback_raises_value_error_not_callable", "tests/unit/connection_tests.py::ConnectionTests::test_add_callbacks", "tests/unit/connection_tests.py::ConnectionTests::test_add_on_close_callback", "tests/unit/connection_tests.py::ConnectionTests::test_add_on_open_error_callback", "tests/unit/connection_tests.py::ConnectionTests::test_bad_type_connection_parameters", "tests/unit/connection_tests.py::ConnectionTests::test_channel", "tests/unit/connection_tests.py::ConnectionTests::test_client_properties", "tests/unit/connection_tests.py::ConnectionTests::test_close_channels", "tests/unit/connection_tests.py::ConnectionTests::test_close_closes_open_channels", "tests/unit/connection_tests.py::ConnectionTests::test_close_ignores_closed_channels", "tests/unit/connection_tests.py::ConnectionTests::test_connect", "tests/unit/connection_tests.py::ConnectionTests::test_connect_reconnect", "tests/unit/connection_tests.py::ConnectionTests::test_new_conn_should_use_first_channel", "tests/unit/connection_tests.py::ConnectionTests::test_next_channel_number_returns_lowest_unused", "tests/unit/connection_tests.py::ConnectionTests::test_on_channel_cleanup_no_open_channels", "tests/unit/connection_tests.py::ConnectionTests::test_on_channel_cleanup_non_closing_state", "tests/unit/connection_tests.py::ConnectionTests::test_on_channel_cleanup_open_channels", "tests/unit/connection_tests.py::ConnectionTests::test_on_close_ready_no_open_channels", "tests/unit/connection_tests.py::ConnectionTests::test_on_close_ready_open_channels", "tests/unit/connection_tests.py::ConnectionTests::test_on_connection_close_ok", "tests/unit/connection_tests.py::ConnectionTests::test_on_connection_closed", "tests/unit/connection_tests.py::ConnectionTests::test_on_connection_start", "tests/unit/connection_tests.py::ConnectionTests::test_on_connection_tune", "tests/unit/connection_tests.py::ConnectionTests::test_on_data_available", "tests/unit/connection_tests.py::ConnectionTests::test_on_terminate_cleans_up", "tests/unit/connection_tests.py::ConnectionTests::test_on_terminate_invokes_access_denied_on_connection_error_and_closed", "tests/unit/connection_tests.py::ConnectionTests::test_on_terminate_invokes_auth_on_connection_error_and_closed", "tests/unit/connection_tests.py::ConnectionTests::test_on_terminate_invokes_connection_closed_callback", "tests/unit/connection_tests.py::ConnectionTests::test_on_terminate_invokes_protocol_on_connection_error_and_closed", "tests/unit/connection_tests.py::ConnectionTests::test_process_url", "tests/unit/connection_tests.py::ConnectionTests::test_set_backpressure_multiplier" ]
[]
BSD 3-Clause "New" or "Revised" License
416
picovico__python-sdk-13
5bbb1e00f64eeeb87bc682afe2b535394b1dd682
2016-02-08 11:51:34
5bbb1e00f64eeeb87bc682afe2b535394b1dd682
diff --git a/.gitignore b/.gitignore deleted file mode 100644 index ca7fe0a..0000000 --- a/.gitignore +++ /dev/null @@ -1,6 +0,0 @@ -*.pyc -__pycache__ -.cache -.tox -.eggs -*.egg-info diff --git a/picovico/__init__.py b/picovico/__init__.py index 81dadc4..7e14d3d 100644 --- a/picovico/__init__.py +++ b/picovico/__init__.py @@ -1,5 +1,5 @@ from .session import PicovicoSessionMixin -from .base import PicovicoRequest +from .baserequest import PicovicoRequest from .components import PicovicoComponentMixin from .decorators import pv_auth_required from . import urls as pv_urls @@ -12,7 +12,7 @@ class PicovicoAPI(PicovicoSessionMixin, PicovicoComponentMixin): if self.is_authorized(): self._ready_component_property() - + def login(self, username, password): """ Picovico: login with username and password """ assert username, 'username is required for login'
Categorize URL config based on API endpoints. The URL endpoints should clearly define the workflow. They should be categorized.
picovico/python-sdk
diff --git a/tests/pv_api_test.py b/tests/pv_api_test.py index 377a51a..e368fe8 100644 --- a/tests/pv_api_test.py +++ b/tests/pv_api_test.py @@ -13,7 +13,7 @@ class TestPicovicoAPI: post_call = method_calls.get('post').copy() with pytest.raises(pv_exceptions.PicovicoAPINotAllowed): api.me() - with mock.patch('picovico.base.requests.request') as mr: + with mock.patch('picovico.baserequest.requests.request') as mr: mr.return_value = auth_response api.login(*calls[2:]) post_call.update(data=dict(zip(calls, calls)), url=parse.urljoin(pv_urls.PICOVICO_BASE, pv_urls.PICOVICO_LOGIN)) @@ -42,13 +42,13 @@ class TestPicovicoAPI: assert not api.is_authorized() def test_login_authenticate(self, auth_response): - with mock.patch('picovico.base.requests.request') as mr: + with mock.patch('picovico.baserequest.requests.request') as mr: mr.return_value = auth_response api = PicovicoAPI('app_id', 'device_id') assert not api.is_authorized() api.login('username', 'password') assert api.is_authorized() - with mock.patch('picovico.base.requests.request') as mr: + with mock.patch('picovico.baserequest.requests.request') as mr: mr.return_value = auth_response api = PicovicoAPI('app_id', 'device_id') assert not api.is_authorized() diff --git a/tests/pv_base_test.py b/tests/pv_base_test.py index 34c7962..9939c52 100644 --- a/tests/pv_base_test.py +++ b/tests/pv_base_test.py @@ -3,7 +3,7 @@ import mock import six from six.moves.urllib import parse -from picovico import base as api +from picovico import baserequest as api from picovico import urls from picovico import exceptions @@ -28,7 +28,7 @@ class TestPicovicoRequest: assert args['url'] == parse.urljoin(urls.PICOVICO_BASE, urls.ME) def test_api_methods(self, success_response): - with mock.patch('picovico.base.requests.request') as mr: + with mock.patch('picovico.baserequest.requests.request') as mr: mr.return_value = success_response pv_api = api.PicovicoRequest() assert pv_api.get(urls.ME) == success_response.json() @@ -37,7 +37,7 @@ class TestPicovicoRequest: with pytest.raises(AssertionError) as excinfo: pv_api.post(urls.ME, data="hello") assert success_response.json() == pv_api.put(urls.ME) - with mock.patch('picovico.base.open', mock.mock_open(read_data='bibble')): + with mock.patch('picovico.baserequest.open', mock.mock_open(read_data='bibble')): pv_api.put(urls.ME, filename="fo", data_headers={'MUSIC_NAME': "Hello"}, ) assert 'MUSIC_NAME' in pv_api.headers assert pv_api.request_args['method'] == 'put' diff --git a/tests/pv_component_test.py b/tests/pv_component_test.py index def6fb6..2ea7dc2 100644 --- a/tests/pv_component_test.py +++ b/tests/pv_component_test.py @@ -53,7 +53,7 @@ class TestComponent: def test_library_and_free_component(self, success_response, method_calls, response_messages): req = pv_base.PicovicoRequest(response_messages.get('valid_auth_header')) style_component = PicovicoStyle(req) - with mock.patch('picovico.base.requests.request') as mr: + with mock.patch('picovico.baserequest.requests.request') as mr: mr.return_value = success_response get_call = method_calls.get('get').copy() get_call.update(url=parse.urljoin(pv_urls.PICOVICO_BASE, pv_urls.PICOVICO_STYLES)) diff --git a/tests/pv_specific_component_test.py b/tests/pv_specific_component_test.py index 964fd42..eb60208 100644 --- a/tests/pv_specific_component_test.py +++ b/tests/pv_specific_component_test.py @@ -12,7 +12,7 @@ class TestPhotoComponent: req = PicovicoRequest(auth_header) ph_comp = PicovicoPhoto(req) assert ph_comp.component == 'photo' - with mock.patch('picovico.base.requests.request') as mr: + with mock.patch('picovico.baserequest.requests.request') as mr: mr.return_value = success_response args = ("something", "something_thumb") ph_comp.upload_photo_url(*args) @@ -20,4 +20,4 @@ class TestPhotoComponent: post_request.update(data=dict(zip(('url', 'thumbnail_url'), args))) post_request.update(headers=auth_header) mr.assert_called_with(**post_request) - +
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_removed_files" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 3, "test_score": 3 }, "num_modified_files": 1 }
2.0
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "", "pip_packages": [ "pytest", "mock" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.9", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
certifi==2025.1.31 charset-normalizer==3.4.1 exceptiongroup==1.2.2 idna==3.10 iniconfig==2.1.0 mock==5.2.0 packaging==24.2 -e git+https://github.com/picovico/python-sdk.git@5bbb1e00f64eeeb87bc682afe2b535394b1dd682#egg=picovico pluggy==1.5.0 pytest==8.3.5 requests==2.32.3 six==1.17.0 tomli==2.2.1 urllib3==2.3.0
name: python-sdk channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - certifi==2025.1.31 - charset-normalizer==3.4.1 - exceptiongroup==1.2.2 - idna==3.10 - iniconfig==2.1.0 - mock==5.2.0 - packaging==24.2 - pluggy==1.5.0 - pytest==8.3.5 - requests==2.32.3 - six==1.17.0 - tomli==2.2.1 - urllib3==2.3.0 prefix: /opt/conda/envs/python-sdk
[ "tests/pv_api_test.py::TestPicovicoAPI::test_auth_decoration", "tests/pv_api_test.py::TestPicovicoAPI::test_api_proxy", "tests/pv_api_test.py::TestPicovicoAPI::test_login_authenticate", "tests/pv_base_test.py::TestPicovicoRequest::test_properties", "tests/pv_base_test.py::TestPicovicoRequest::test_request_args", "tests/pv_base_test.py::TestPicovicoRequest::test_api_methods", "tests/pv_base_test.py::TestPicovicoRequest::test_authentication_header", "tests/pv_component_test.py::TestComponentMixin::test_component_property", "tests/pv_component_test.py::TestComponent::test_component_methods", "tests/pv_component_test.py::TestComponent::test_photo_component", "tests/pv_component_test.py::TestComponent::test_style_component", "tests/pv_component_test.py::TestComponent::test_library_and_free_component", "tests/pv_specific_component_test.py::TestPhotoComponent::test_upload_url" ]
[]
[]
[]
null
417
SiftScience__sift-python-34
a9993ecae67cc4804fd572cd3ef95ec9535758a2
2016-02-08 22:37:17
a9993ecae67cc4804fd572cd3ef95ec9535758a2
diff --git a/README.rst b/README.rst index 30404c0..4d3f726 100644 --- a/README.rst +++ b/README.rst @@ -68,9 +68,9 @@ Here's an example: "$payment_gateway" : "$braintree", "$card_bin" : "542486", "$card_last4" : "4444" - }, - "$currency_code" : "USD", - "$amount" : 15230000, + }, + "$currency_code" : "USD", + "$amount" : 15230000, } response = client.track(event, properties) diff --git a/setup.py b/setup.py index dd04072..e60cbaf 100644 --- a/setup.py +++ b/setup.py @@ -19,7 +19,7 @@ except: setup( name='Sift', description='Python bindings for Sift Science\'s API', - version='1.1.2.2', # NB: must be kept in sync with sift/version.py + version='1.1.2.3', # NB: must be kept in sync with sift/version.py url='https://siftscience.com', author='Sift Science', diff --git a/sift/client.py b/sift/client.py index 68e9311..72fbe38 100644 --- a/sift/client.py +++ b/sift/client.py @@ -105,8 +105,7 @@ class Client(object): return Response(response) except requests.exceptions.RequestException as e: warnings.warn('Failed to track event: %s' % properties) - warnings.warn(traceback.format_exception_only(type(e), e)) - + warnings.warn(traceback.format_exc()) return e def score(self, user_id, timeout = None): @@ -137,8 +136,7 @@ class Client(object): return Response(response) except requests.exceptions.RequestException as e: warnings.warn('Failed to get score for user %s' % user_id) - warnings.warn(traceback.format_exception_only(type(e), e)) - + warnings.warn(traceback.format_exc()) return e def label(self, user_id, properties, timeout = None): @@ -192,8 +190,7 @@ class Client(object): except requests.exceptions.RequestException as e: warnings.warn('Failed to unlabel user %s' % user_id) - warnings.warn(traceback.format_exception_only(type(e), e)) - + warnings.warn(traceback.format_exc()) return e diff --git a/sift/version.py b/sift/version.py index 3b2ddd3..5a6aa7f 100644 --- a/sift/version.py +++ b/sift/version.py @@ -1,3 +1,3 @@ # NB: Be sure to keep in sync w/ setup.py -VERSION = '1.1.2.2' +VERSION = '1.1.2.3' API_VERSION = '203'
Warnings cause TypeError I'm seeing some TypeError exceptions, it seems that the new warnings / exception handling code has some issues? ``` ... File "tasks.py", line 296, in track_sift_event response = sift_client.track(event_type, attrs) File "/home/ubuntu/closeio/venv/local/lib/python2.7/site-packages/sift/client.py", line 108, in track warnings.warn(traceback.format_exception_only(type(e), e)) TypeError: expected string or buffer ``` Not sure exactly how/when/why this happens but it seems that the Sift responses aren't always handled properly. /cc @JohnMcSpedon @fredsadaghiani
SiftScience/sift-python
diff --git a/tests/client_test.py b/tests/client_test.py index e2d57c0..1c0b43e 100644 --- a/tests/client_test.py +++ b/tests/client_test.py @@ -1,4 +1,5 @@ import datetime +import warnings import json import mock import sift @@ -303,6 +304,41 @@ class TestSiftPythonClient(unittest.TestCase): assert(response.is_ok()) assert(response.api_error_message == "OK") assert(response.body['score'] == 0.55) + + def test_exception_during_track_call(self): + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter("always") + with mock.patch('requests.post') as mock_post: + mock_post.side_effect = mock.Mock(side_effect = requests.exceptions.RequestException("Failed")) + response = self.sift_client.track('$transaction', valid_transaction_properties()) + assert(len(w) == 2) + assert('Failed to track event:' in str(w[0].message)) + assert('RequestException: Failed' in str(w[1].message)) + assert('Traceback' in str(w[1].message)) + + def test_exception_during_score_call(self): + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter("always") + with mock.patch('requests.get') as mock_get: + mock_get.side_effect = mock.Mock(side_effect = requests.exceptions.RequestException("Failed")) + response = self.sift_client.score('Fred') + assert(len(w) == 2) + assert('Failed to get score for user Fred' in str(w[0].message)) + assert('RequestException: Failed' in str(w[1].message)) + assert('Traceback' in str(w[1].message)) + + def test_exception_during_unlabel_call(self): + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter("always") + with mock.patch('requests.delete') as mock_delete: + mock_delete.side_effect = mock.Mock(side_effect = requests.exceptions.RequestException("Failed")) + response = self.sift_client.unlabel('Fred') + + assert(len(w) == 2) + assert('Failed to unlabel user Fred' in str(w[0].message)) + assert('RequestException: Failed' in str(w[1].message)) + assert('Traceback' in str(w[1].message)) + def main(): unittest.main()
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 3, "test_score": 2 }, "num_modified_files": 4 }
unknown
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "pytest" ], "pre_install": null, "python": "3.9", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
certifi==2025.1.31 charset-normalizer==3.4.1 exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work idna==3.10 iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work mock==5.2.0 packaging @ file:///croot/packaging_1734472117206/work pluggy @ file:///croot/pluggy_1733169602837/work pytest @ file:///croot/pytest_1738938843180/work requests==2.32.3 -e git+https://github.com/SiftScience/sift-python.git@a9993ecae67cc4804fd572cd3ef95ec9535758a2#egg=Sift tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work urllib3==2.3.0
name: sift-python channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - exceptiongroup=1.2.0=py39h06a4308_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - packaging=24.2=py39h06a4308_0 - pip=25.0=py39h06a4308_0 - pluggy=1.5.0=py39h06a4308_0 - pytest=8.3.4=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tomli=2.0.1=py39h06a4308_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - certifi==2025.1.31 - charset-normalizer==3.4.1 - idna==3.10 - mock==5.2.0 - requests==2.32.3 - urllib3==2.3.0 prefix: /opt/conda/envs/sift-python
[ "tests/client_test.py::TestSiftPythonClient::test_exception_during_score_call", "tests/client_test.py::TestSiftPythonClient::test_exception_during_track_call", "tests/client_test.py::TestSiftPythonClient::test_exception_during_unlabel_call" ]
[]
[ "tests/client_test.py::TestSiftPythonClient::test_constructor_api_key", "tests/client_test.py::TestSiftPythonClient::test_constructor_invalid_api_url", "tests/client_test.py::TestSiftPythonClient::test_constructor_requires_valid_api_key", "tests/client_test.py::TestSiftPythonClient::test_event_ok", "tests/client_test.py::TestSiftPythonClient::test_event_with_timeout_param_ok", "tests/client_test.py::TestSiftPythonClient::test_global_api_key", "tests/client_test.py::TestSiftPythonClient::test_label_user__with_special_chars_ok", "tests/client_test.py::TestSiftPythonClient::test_label_user_ok", "tests/client_test.py::TestSiftPythonClient::test_label_user_with_timeout_param_ok", "tests/client_test.py::TestSiftPythonClient::test_score__with_special_user_id_chars_ok", "tests/client_test.py::TestSiftPythonClient::test_score_ok", "tests/client_test.py::TestSiftPythonClient::test_score_requires_user_id", "tests/client_test.py::TestSiftPythonClient::test_score_with_timeout_param_ok", "tests/client_test.py::TestSiftPythonClient::test_sync_score_ok", "tests/client_test.py::TestSiftPythonClient::test_track_requires_properties", "tests/client_test.py::TestSiftPythonClient::test_track_requires_valid_event", "tests/client_test.py::TestSiftPythonClient::test_unicode_string_parameter_support", "tests/client_test.py::TestSiftPythonClient::test_unlabel_user_ok", "tests/client_test.py::TestSiftPythonClient::test_unlabel_user_with_special_chars_ok" ]
[]
MIT License
418
docker__docker-py-928
575305fdba6c57f06d605920e01b5e1d6b952d3e
2016-02-09 00:47:19
4c34be5d4ab8a5a017950712e9c96b56d78d1c58
diff --git a/docker/utils/utils.py b/docker/utils/utils.py index 4404c217..61e5a8dc 100644 --- a/docker/utils/utils.py +++ b/docker/utils/utils.py @@ -128,7 +128,13 @@ def exclude_paths(root, patterns, dockerfile=None): paths = get_paths(root, exclude_patterns, include_patterns, has_exceptions=len(exceptions) > 0) - return set(paths) + return set(paths).union( + # If the Dockerfile is in a subdirectory that is excluded, get_paths + # will not descend into it and the file will be skipped. This ensures + # it doesn't happen. + set([dockerfile]) + if os.path.exists(os.path.join(root, dockerfile)) else set() + ) def should_include(path, exclude_patterns, include_patterns):
[1.7] regression in .dockerignore handling If the `Dockerfile` is being ignored by a path in the `.dockerignore` file, it is incorrectly being removed from the context. There is a special case handling when the file is being excluded directly, but it should also apply when there is a path which includes the `Dockerfile`. Possibly caused by #863 ``` docker-py version: 1.7.0 $ tree -a . ├── Docker │   ├── dc.yml │   └── Dockerfile.debug └── .dockerignore 1 directory, 3 files $ cat Docker/Dockerfile.debug FROM alpine:3.3 RUN echo building CMD echo run $ cat Docker/dc.yml app: build: .. dockerfile: Docker/Dockerfile.debug $ cat .dockerignore Docker/ docker-compose -f Docker/dc.yml build ERROR: Cannot locate specified Dockerfile: Docker/Dockerfile.debug ```
docker/docker-py
diff --git a/tests/unit/utils_test.py b/tests/unit/utils_test.py index df29b9d3..a0a96bbe 100644 --- a/tests/unit/utils_test.py +++ b/tests/unit/utils_test.py @@ -736,6 +736,7 @@ class ExcludePathsTest(base.BaseTestCase): 'foo/b.py', 'foo/bar/a.py', 'bar/a.py', + 'foo/Dockerfile3', ] all_paths = set(dirs + files) @@ -775,6 +776,14 @@ class ExcludePathsTest(base.BaseTestCase): assert self.exclude(['*'], dockerfile='Dockerfile.alt') == \ set(['Dockerfile.alt', '.dockerignore']) + assert self.exclude(['*'], dockerfile='foo/Dockerfile3') == \ + set(['foo/Dockerfile3', '.dockerignore']) + + def test_exclude_dockerfile_child(self): + includes = self.exclude(['foo/'], dockerfile='foo/Dockerfile3') + assert 'foo/Dockerfile3' in includes + assert 'foo/a.py' not in includes + def test_single_filename(self): assert self.exclude(['a.py']) == self.all_paths - set(['a.py']) @@ -825,28 +834,31 @@ class ExcludePathsTest(base.BaseTestCase): def test_directory(self): assert self.exclude(['foo']) == self.all_paths - set([ 'foo', 'foo/a.py', 'foo/b.py', - 'foo/bar', 'foo/bar/a.py', + 'foo/bar', 'foo/bar/a.py', 'foo/Dockerfile3' ]) def test_directory_with_trailing_slash(self): assert self.exclude(['foo']) == self.all_paths - set([ 'foo', 'foo/a.py', 'foo/b.py', - 'foo/bar', 'foo/bar/a.py', + 'foo/bar', 'foo/bar/a.py', 'foo/Dockerfile3' ]) def test_directory_with_single_exception(self): assert self.exclude(['foo', '!foo/bar/a.py']) == self.all_paths - set([ - 'foo/a.py', 'foo/b.py', 'foo', 'foo/bar' + 'foo/a.py', 'foo/b.py', 'foo', 'foo/bar', + 'foo/Dockerfile3' ]) def test_directory_with_subdir_exception(self): assert self.exclude(['foo', '!foo/bar']) == self.all_paths - set([ - 'foo/a.py', 'foo/b.py', 'foo' + 'foo/a.py', 'foo/b.py', 'foo', + 'foo/Dockerfile3' ]) def test_directory_with_wildcard_exception(self): assert self.exclude(['foo', '!foo/*.py']) == self.all_paths - set([ - 'foo/bar', 'foo/bar/a.py', 'foo' + 'foo/bar', 'foo/bar/a.py', 'foo', + 'foo/Dockerfile3' ]) def test_subdirectory(self):
{ "commit_name": "head_commit", "failed_lite_validators": [], "has_test_patch": true, "is_lite": true, "llm_score": { "difficulty_score": 0, "issue_text_score": 0, "test_score": 2 }, "num_modified_files": 1 }
1.7
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest pytest-cov pytest-xdist pytest-mock pytest-asyncio" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.4", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==22.2.0 certifi==2021.5.30 coverage==6.2 -e git+https://github.com/docker/docker-py.git@575305fdba6c57f06d605920e01b5e1d6b952d3e#egg=docker_py execnet==1.9.0 importlib-metadata==4.8.3 iniconfig==1.1.1 packaging==21.3 pluggy==1.0.0 py==1.11.0 pyparsing==3.1.4 pytest==7.0.1 pytest-asyncio==0.16.0 pytest-cov==4.0.0 pytest-mock==3.6.1 pytest-xdist==3.0.2 requests==2.5.3 six==1.17.0 tomli==1.2.3 typing_extensions==4.1.1 websocket-client==0.32.0 zipp==3.6.0
name: docker-py channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==22.2.0 - coverage==6.2 - execnet==1.9.0 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - packaging==21.3 - pluggy==1.0.0 - py==1.11.0 - pyparsing==3.1.4 - pytest==7.0.1 - pytest-asyncio==0.16.0 - pytest-cov==4.0.0 - pytest-mock==3.6.1 - pytest-xdist==3.0.2 - requests==2.5.3 - six==1.17.0 - tomli==1.2.3 - typing-extensions==4.1.1 - websocket-client==0.32.0 - zipp==3.6.0 prefix: /opt/conda/envs/docker-py
[ "tests/unit/utils_test.py::ExcludePathsTest::test_exclude_custom_dockerfile", "tests/unit/utils_test.py::ExcludePathsTest::test_exclude_dockerfile_child" ]
[ "tests/unit/utils_test.py::SSLAdapterTest::test_only_uses_tls" ]
[ "tests/unit/utils_test.py::HostConfigTest::test_create_endpoint_config_with_aliases", "tests/unit/utils_test.py::HostConfigTest::test_create_host_config_invalid_cpu_cfs_types", "tests/unit/utils_test.py::HostConfigTest::test_create_host_config_no_options", "tests/unit/utils_test.py::HostConfigTest::test_create_host_config_no_options_newer_api_version", "tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_cpu_period", "tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_cpu_quota", "tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_oom_kill_disable", "tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_shm_size", "tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_shm_size_in_mb", "tests/unit/utils_test.py::UlimitTest::test_create_host_config_dict_ulimit", "tests/unit/utils_test.py::UlimitTest::test_create_host_config_dict_ulimit_capitals", "tests/unit/utils_test.py::UlimitTest::test_create_host_config_obj_ulimit", "tests/unit/utils_test.py::UlimitTest::test_ulimit_invalid_type", "tests/unit/utils_test.py::LogConfigTest::test_create_host_config_dict_logconfig", "tests/unit/utils_test.py::LogConfigTest::test_create_host_config_obj_logconfig", "tests/unit/utils_test.py::LogConfigTest::test_logconfig_invalid_config_type", "tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_empty", "tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_no_cert_path", "tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_tls", "tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_tls_verify_false", "tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_tls_verify_false_no_cert", "tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_compact", "tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_complete", "tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_empty", "tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_list", "tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_no_mode", "tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_unicode_bytes_input", "tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_unicode_unicode_input", "tests/unit/utils_test.py::ParseEnvFileTest::test_parse_env_file_commented_line", "tests/unit/utils_test.py::ParseEnvFileTest::test_parse_env_file_invalid_line", "tests/unit/utils_test.py::ParseEnvFileTest::test_parse_env_file_proper", "tests/unit/utils_test.py::ParseHostTest::test_parse_host", "tests/unit/utils_test.py::ParseHostTest::test_parse_host_empty_value", "tests/unit/utils_test.py::ParseHostTest::test_parse_host_tls", "tests/unit/utils_test.py::ParseRepositoryTagTest::test_index_image_no_tag", "tests/unit/utils_test.py::ParseRepositoryTagTest::test_index_image_sha", "tests/unit/utils_test.py::ParseRepositoryTagTest::test_index_image_tag", "tests/unit/utils_test.py::ParseRepositoryTagTest::test_index_user_image_no_tag", "tests/unit/utils_test.py::ParseRepositoryTagTest::test_index_user_image_tag", "tests/unit/utils_test.py::ParseRepositoryTagTest::test_private_reg_image_no_tag", "tests/unit/utils_test.py::ParseRepositoryTagTest::test_private_reg_image_sha", "tests/unit/utils_test.py::ParseRepositoryTagTest::test_private_reg_image_tag", "tests/unit/utils_test.py::ParseDeviceTest::test_dict", "tests/unit/utils_test.py::ParseDeviceTest::test_full_string_definition", "tests/unit/utils_test.py::ParseDeviceTest::test_hybrid_list", "tests/unit/utils_test.py::ParseDeviceTest::test_partial_string_definition", "tests/unit/utils_test.py::ParseDeviceTest::test_permissionless_string_definition", "tests/unit/utils_test.py::ParseBytesTest::test_parse_bytes_float", "tests/unit/utils_test.py::ParseBytesTest::test_parse_bytes_invalid", "tests/unit/utils_test.py::ParseBytesTest::test_parse_bytes_maxint", "tests/unit/utils_test.py::ParseBytesTest::test_parse_bytes_valid", "tests/unit/utils_test.py::UtilsTest::test_convert_filters", "tests/unit/utils_test.py::UtilsTest::test_create_ipam_config", "tests/unit/utils_test.py::UtilsTest::test_decode_json_header", "tests/unit/utils_test.py::SplitCommandTest::test_split_command_with_unicode", "tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_matching_internal_port_ranges", "tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_matching_internal_ports", "tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_nonmatching_internal_port_ranges", "tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_nonmatching_internal_ports", "tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_one_port", "tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_port_range", "tests/unit/utils_test.py::PortsTest::test_host_only_with_colon", "tests/unit/utils_test.py::PortsTest::test_non_matching_length_port_ranges", "tests/unit/utils_test.py::PortsTest::test_port_and_range_invalid", "tests/unit/utils_test.py::PortsTest::test_port_only_with_colon", "tests/unit/utils_test.py::PortsTest::test_split_port_invalid", "tests/unit/utils_test.py::PortsTest::test_split_port_no_host_port", "tests/unit/utils_test.py::PortsTest::test_split_port_range_no_host_port", "tests/unit/utils_test.py::PortsTest::test_split_port_range_with_host_ip_no_port", "tests/unit/utils_test.py::PortsTest::test_split_port_range_with_host_port", "tests/unit/utils_test.py::PortsTest::test_split_port_range_with_protocol", "tests/unit/utils_test.py::PortsTest::test_split_port_with_host_ip", "tests/unit/utils_test.py::PortsTest::test_split_port_with_host_ip_no_port", "tests/unit/utils_test.py::PortsTest::test_split_port_with_host_port", "tests/unit/utils_test.py::PortsTest::test_split_port_with_protocol", "tests/unit/utils_test.py::ExcludePathsTest::test_directory", "tests/unit/utils_test.py::ExcludePathsTest::test_directory_with_single_exception", "tests/unit/utils_test.py::ExcludePathsTest::test_directory_with_subdir_exception", "tests/unit/utils_test.py::ExcludePathsTest::test_directory_with_trailing_slash", "tests/unit/utils_test.py::ExcludePathsTest::test_directory_with_wildcard_exception", "tests/unit/utils_test.py::ExcludePathsTest::test_exclude_dockerfile_dockerignore", "tests/unit/utils_test.py::ExcludePathsTest::test_no_dupes", "tests/unit/utils_test.py::ExcludePathsTest::test_no_excludes", "tests/unit/utils_test.py::ExcludePathsTest::test_question_mark", "tests/unit/utils_test.py::ExcludePathsTest::test_single_filename", "tests/unit/utils_test.py::ExcludePathsTest::test_single_filename_trailing_slash", "tests/unit/utils_test.py::ExcludePathsTest::test_single_subdir_single_filename", "tests/unit/utils_test.py::ExcludePathsTest::test_single_subdir_wildcard_filename", "tests/unit/utils_test.py::ExcludePathsTest::test_subdirectory", "tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_exclude", "tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_filename_end", "tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_filename_start", "tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_subdir_single_filename", "tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_subdir_wildcard_filename", "tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_with_exception", "tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_with_wildcard_exception", "tests/unit/utils_test.py::TarTest::test_tar_with_directory_symlinks", "tests/unit/utils_test.py::TarTest::test_tar_with_empty_directory", "tests/unit/utils_test.py::TarTest::test_tar_with_excludes", "tests/unit/utils_test.py::TarTest::test_tar_with_file_symlinks" ]
[]
Apache License 2.0
419
docker__docker-py-929
575305fdba6c57f06d605920e01b5e1d6b952d3e
2016-02-09 02:18:57
4c34be5d4ab8a5a017950712e9c96b56d78d1c58
dnephin: LGTM
diff --git a/docker/auth/auth.py b/docker/auth/auth.py index 399dae2b..eedb7944 100644 --- a/docker/auth/auth.py +++ b/docker/auth/auth.py @@ -46,7 +46,7 @@ def resolve_repository_name(repo_name): def resolve_index_name(index_name): index_name = convert_to_hostname(index_name) - if index_name == 'index.'+INDEX_NAME: + if index_name == 'index.' + INDEX_NAME: index_name = INDEX_NAME return index_name @@ -102,12 +102,14 @@ def encode_header(auth): return base64.urlsafe_b64encode(auth_json) -def parse_auth(entries): +def parse_auth(entries, raise_on_error=False): """ Parses authentication entries Args: - entries: Dict of authentication entries. + entries: Dict of authentication entries. + raise_on_error: If set to true, an invalid format will raise + InvalidConfigFile Returns: Authentication registry. @@ -115,6 +117,19 @@ def parse_auth(entries): conf = {} for registry, entry in six.iteritems(entries): + if not (isinstance(entry, dict) and 'auth' in entry): + log.debug( + 'Config entry for key {0} is not auth config'.format(registry) + ) + # We sometimes fall back to parsing the whole config as if it was + # the auth config by itself, for legacy purposes. In that case, we + # fail silently and return an empty conf if any of the keys is not + # formatted properly. + if raise_on_error: + raise errors.InvalidConfigFile( + 'Invalid configuration for registry {0}'.format(registry) + ) + return {} username, password = decode_auth(entry['auth']) log.debug( 'Found entry (registry={0}, username={1})' @@ -170,7 +185,7 @@ def load_config(config_path=None): res = {} if data.get('auths'): log.debug("Found 'auths' section") - res.update(parse_auth(data['auths'])) + res.update(parse_auth(data['auths'], raise_on_error=True)) if data.get('HttpHeaders'): log.debug("Found 'HttpHeaders' section") res.update({'HttpHeaders': data['HttpHeaders']})
Using a docker/config.json file causes "TypeError: string indices must be integers" Using a ~/.docker/config.json file causes docker-compose to output a Python error. @dnephin in https://github.com/docker/compose/issues/2697#issuecomment-172936366 suggests that this is an issue to be raised with the docker-py project instead of the compose project. So here's a simple config.json file, and a dummy docker-compose.yml file which demonstrate the issue: ``` $ cat ~/.docker/config.json { "detachKeys": "ctrl-q,ctrl-u,ctrl-i,ctrl-t" } $ cat docker-compose.yml version: '2' services: s1: image: ubuntu $ docker-compose ps Traceback (most recent call last): File "<string>", line 3, in <module> File "/compose/compose/cli/main.py", line 55, in main File "/compose/compose/cli/docopt_command.py", line 23, in sys_dispatch File "/compose/compose/cli/docopt_command.py", line 26, in dispatch File "/compose/compose/cli/main.py", line 172, in perform_command File "/compose/compose/cli/command.py", line 52, in project_from_options File "/compose/compose/cli/command.py", line 85, in get_project File "/compose/compose/cli/command.py", line 66, in get_client File "/compose/compose/cli/docker_client.py", line 37, in docker_client File "/compose/venv/lib/python2.7/site-packages/docker/client.py", line 56, in __init__ File "/compose/venv/lib/python2.7/site-packages/docker/auth/auth.py", line 182, in load_config File "/compose/venv/lib/python2.7/site-packages/docker/auth/auth.py", line 118, in parse_auth TypeError: string indices must be integers docker-compose returned -1 ``` And if we delete the config and try again: ``` $ rm ~/.docker/config.json $ docker-compose ps Name Command State Ports ------------------------------ ``` I'm using Docker Toolbox 1.10.0 on OSX 10.11.3. The docker container is being run on a VirtualBox VM managed by docker-machine. Here are some more version numbers: ``` $ docker version Client: Version: 1.10.0 API version: 1.22 Go version: go1.5.3 Git commit: 590d5108 Built: Thu Feb 4 18:18:11 2016 OS/Arch: darwin/amd64 Server: Version: 1.10.0 API version: 1.22 Go version: go1.5.3 Git commit: 590d5108 Built: Thu Feb 4 19:55:25 2016 OS/Arch: linux/amd64 $ docker-compose version docker-compose version 1.6.0, build d99cad6 docker-py version: 1.7.0 CPython version: 2.7.9 OpenSSL version: OpenSSL 1.0.1j 15 Oct 2014 ```
docker/docker-py
diff --git a/tests/unit/auth_test.py b/tests/unit/auth_test.py index 3fba602c..921aae00 100644 --- a/tests/unit/auth_test.py +++ b/tests/unit/auth_test.py @@ -433,3 +433,32 @@ class LoadConfigTest(base.Cleanup, base.BaseTestCase): self.assertEqual(cfg['Name'], 'Spike') self.assertEqual(cfg['Surname'], 'Spiegel') + + def test_load_config_unknown_keys(self): + folder = tempfile.mkdtemp() + self.addCleanup(shutil.rmtree, folder) + dockercfg_path = os.path.join(folder, 'config.json') + config = { + 'detachKeys': 'ctrl-q, ctrl-u, ctrl-i' + } + with open(dockercfg_path, 'w') as f: + json.dump(config, f) + + cfg = auth.load_config(dockercfg_path) + assert cfg == {} + + def test_load_config_invalid_auth_dict(self): + folder = tempfile.mkdtemp() + self.addCleanup(shutil.rmtree, folder) + dockercfg_path = os.path.join(folder, 'config.json') + config = { + 'auths': { + 'scarlet.net': {'sakuya': 'izayoi'} + } + } + with open(dockercfg_path, 'w') as f: + json.dump(config, f) + + self.assertRaises( + errors.InvalidConfigFile, auth.load_config, dockercfg_path + )
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_hyperlinks", "has_many_hunks", "has_pytest_match_arg" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 1, "test_score": 1 }, "num_modified_files": 1 }
1.7
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest pytest-cov pytest-xdist pytest-mock pytest-asyncio" ], "pre_install": null, "python": "3.9", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
coverage==7.8.0 -e git+https://github.com/docker/docker-py.git@575305fdba6c57f06d605920e01b5e1d6b952d3e#egg=docker_py exceptiongroup==1.2.2 execnet==2.1.1 iniconfig==2.1.0 packaging==24.2 pluggy==1.5.0 pytest==8.3.5 pytest-asyncio==0.26.0 pytest-cov==6.0.0 pytest-mock==3.14.0 pytest-xdist==3.6.1 requests==2.5.3 six==1.17.0 tomli==2.2.1 typing_extensions==4.13.0 websocket_client==0.32.0
name: docker-py channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - coverage==7.8.0 - exceptiongroup==1.2.2 - execnet==2.1.1 - iniconfig==2.1.0 - packaging==24.2 - pluggy==1.5.0 - pytest==8.3.5 - pytest-asyncio==0.26.0 - pytest-cov==6.0.0 - pytest-mock==3.14.0 - pytest-xdist==3.6.1 - requests==2.5.3 - six==1.17.0 - tomli==2.2.1 - typing-extensions==4.13.0 - websocket-client==0.32.0 prefix: /opt/conda/envs/docker-py
[ "tests/unit/auth_test.py::LoadConfigTest::test_load_config_invalid_auth_dict", "tests/unit/auth_test.py::LoadConfigTest::test_load_config_unknown_keys" ]
[]
[ "tests/unit/auth_test.py::RegressionTest::test_803_urlsafe_encode", "tests/unit/auth_test.py::ResolveRepositoryNameTest::test_explicit_hub_index_library_image", "tests/unit/auth_test.py::ResolveRepositoryNameTest::test_explicit_legacy_hub_index_library_image", "tests/unit/auth_test.py::ResolveRepositoryNameTest::test_invalid_index_name", "tests/unit/auth_test.py::ResolveRepositoryNameTest::test_resolve_repository_name_dotted_hub_library_image", "tests/unit/auth_test.py::ResolveRepositoryNameTest::test_resolve_repository_name_hub_image", "tests/unit/auth_test.py::ResolveRepositoryNameTest::test_resolve_repository_name_hub_library_image", "tests/unit/auth_test.py::ResolveRepositoryNameTest::test_resolve_repository_name_localhost", "tests/unit/auth_test.py::ResolveRepositoryNameTest::test_resolve_repository_name_localhost_with_username", "tests/unit/auth_test.py::ResolveRepositoryNameTest::test_resolve_repository_name_no_dots_but_port", "tests/unit/auth_test.py::ResolveRepositoryNameTest::test_resolve_repository_name_no_dots_but_port_and_username", "tests/unit/auth_test.py::ResolveRepositoryNameTest::test_resolve_repository_name_private_registry", "tests/unit/auth_test.py::ResolveRepositoryNameTest::test_resolve_repository_name_private_registry_with_port", "tests/unit/auth_test.py::ResolveRepositoryNameTest::test_resolve_repository_name_private_registry_with_username", "tests/unit/auth_test.py::ResolveAuthTest::test_resolve_authconfig_default_explicit_none", "tests/unit/auth_test.py::ResolveAuthTest::test_resolve_authconfig_default_registry", "tests/unit/auth_test.py::ResolveAuthTest::test_resolve_authconfig_fully_explicit", "tests/unit/auth_test.py::ResolveAuthTest::test_resolve_authconfig_hostname_only", "tests/unit/auth_test.py::ResolveAuthTest::test_resolve_authconfig_legacy_config", "tests/unit/auth_test.py::ResolveAuthTest::test_resolve_authconfig_no_match", "tests/unit/auth_test.py::ResolveAuthTest::test_resolve_authconfig_no_path", "tests/unit/auth_test.py::ResolveAuthTest::test_resolve_authconfig_no_path_trailing_slash", "tests/unit/auth_test.py::ResolveAuthTest::test_resolve_authconfig_no_path_wrong_insecure_proto", "tests/unit/auth_test.py::ResolveAuthTest::test_resolve_authconfig_no_path_wrong_secure_proto", "tests/unit/auth_test.py::ResolveAuthTest::test_resolve_authconfig_no_protocol", "tests/unit/auth_test.py::ResolveAuthTest::test_resolve_authconfig_path_wrong_proto", "tests/unit/auth_test.py::ResolveAuthTest::test_resolve_registry_and_auth_explicit_hub", "tests/unit/auth_test.py::ResolveAuthTest::test_resolve_registry_and_auth_explicit_legacy_hub", "tests/unit/auth_test.py::ResolveAuthTest::test_resolve_registry_and_auth_hub_image", "tests/unit/auth_test.py::ResolveAuthTest::test_resolve_registry_and_auth_library_image", "tests/unit/auth_test.py::ResolveAuthTest::test_resolve_registry_and_auth_private_registry", "tests/unit/auth_test.py::ResolveAuthTest::test_resolve_registry_and_auth_unauthenticated_registry", "tests/unit/auth_test.py::LoadConfigTest::test_load_config", "tests/unit/auth_test.py::LoadConfigTest::test_load_config_custom_config_env", "tests/unit/auth_test.py::LoadConfigTest::test_load_config_custom_config_env_utf8", "tests/unit/auth_test.py::LoadConfigTest::test_load_config_custom_config_env_with_auths", "tests/unit/auth_test.py::LoadConfigTest::test_load_config_custom_config_env_with_headers", "tests/unit/auth_test.py::LoadConfigTest::test_load_config_no_file", "tests/unit/auth_test.py::LoadConfigTest::test_load_config_with_random_name" ]
[]
Apache License 2.0
420
adamchainz__pretty-cron-11
ec7be5a9f853342ded46342eb90fdc0c69c06f68
2016-02-09 18:37:29
ec7be5a9f853342ded46342eb90fdc0c69c06f68
diff --git a/HISTORY.rst b/HISTORY.rst index 985360a..7b57b97 100644 --- a/HISTORY.rst +++ b/HISTORY.rst @@ -7,6 +7,8 @@ Pending Release --------------- * New release notes here +* Now interprets day 7 as Sunday as well as 0, like Linux crontab parsers - + thanks @vetyy. 1.0.0 (2015-07-28) ------------------ diff --git a/pretty_cron/api.py b/pretty_cron/api.py index 14af101..f7ac1d0 100644 --- a/pretty_cron/api.py +++ b/pretty_cron/api.py @@ -76,6 +76,7 @@ _WEEKDAYS = { 4: "Thursday", 5: "Friday", 6: "Saturday", + 7: "Sunday", }
Missing key in _WEEKDAYS Hello, linux crontab allows users to set sunday as 0 or 7 (its same for crontab parser). When I was parsing some of my users cron settings I got key_error exception when they were using this syntax (7 as for sunday), you might consider adding this. (I have currently monkey patched that for time being) Thanks
adamchainz/pretty-cron
diff --git a/tests/test_prettify.py b/tests/test_prettify.py index cec1af2..df46049 100644 --- a/tests/test_prettify.py +++ b/tests/test_prettify.py @@ -28,6 +28,9 @@ class PrettyCronTest(unittest.TestCase): def test_weekly(self): assert pc("0 0 * * 0") == "At 00:00 every Sunday" + def test_day_7_is_sunday(self): + assert pc("0 0 * * 7") == "At 00:00 every Sunday" + def test_monthly_and_weekly(self): assert ( pc("0 0 1 * 1") ==
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 0, "test_score": 0 }, "num_modified_files": 2 }
1.0
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": null, "pre_install": null, "python": "3.5", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
bleach==4.1.0 certifi==2021.5.30 cffi==1.15.1 charset-normalizer==2.0.12 colorama==0.4.5 coverage==6.2 cryptography==40.0.2 docutils==0.18.1 idna==3.10 importlib-metadata==4.8.3 importlib-resources==5.4.0 jeepney==0.7.1 keyring==23.4.1 packaging==21.3 pkginfo==1.10.0 -e git+https://github.com/adamchainz/pretty-cron.git@ec7be5a9f853342ded46342eb90fdc0c69c06f68#egg=pretty_cron py==1.11.0 pycparser==2.21 Pygments==2.14.0 pyparsing==3.1.4 pytest==2.8.7 readme-renderer==34.0 requests==2.27.1 requests-toolbelt==1.0.0 rfc3986==1.5.0 SecretStorage==3.3.3 six==1.17.0 tqdm==4.64.1 twine==3.8.0 typing_extensions==4.1.1 urllib3==1.26.20 webencodings==0.5.1 zipp==3.6.0
name: pretty-cron channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - bleach==4.1.0 - cffi==1.15.1 - charset-normalizer==2.0.12 - colorama==0.4.5 - coverage==6.2 - cryptography==40.0.2 - docutils==0.18.1 - idna==3.10 - importlib-metadata==4.8.3 - importlib-resources==5.4.0 - jeepney==0.7.1 - keyring==23.4.1 - packaging==21.3 - pkginfo==1.10.0 - py==1.11.0 - pycparser==2.21 - pygments==2.14.0 - pyparsing==3.1.4 - pytest==2.8.7 - readme-renderer==34.0 - requests==2.27.1 - requests-toolbelt==1.0.0 - rfc3986==1.5.0 - secretstorage==3.3.3 - six==1.17.0 - tqdm==4.64.1 - twine==3.8.0 - typing-extensions==4.1.1 - urllib3==1.26.20 - webencodings==0.5.1 - zipp==3.6.0 prefix: /opt/conda/envs/pretty-cron
[ "tests/test_prettify.py::PrettyCronTest::test_day_7_is_sunday" ]
[]
[ "tests/test_prettify.py::PrettyCronTest::test_continuous", "tests/test_prettify.py::PrettyCronTest::test_daily", "tests/test_prettify.py::PrettyCronTest::test_every_day_in_month", "tests/test_prettify.py::PrettyCronTest::test_every_specific_day_in_month", "tests/test_prettify.py::PrettyCronTest::test_every_specific_day_in_month_and_weekly", "tests/test_prettify.py::PrettyCronTest::test_hourly", "tests/test_prettify.py::PrettyCronTest::test_invalid_unchanged", "tests/test_prettify.py::PrettyCronTest::test_minutely", "tests/test_prettify.py::PrettyCronTest::test_monthly_and_weekly", "tests/test_prettify.py::PrettyCronTest::test_nonsense_unchanged", "tests/test_prettify.py::PrettyCronTest::test_one_day_in_month", "tests/test_prettify.py::PrettyCronTest::test_one_day_in_month_11th", "tests/test_prettify.py::PrettyCronTest::test_one_day_in_month_21st", "tests/test_prettify.py::PrettyCronTest::test_one_day_in_month_2nd", "tests/test_prettify.py::PrettyCronTest::test_unsupported", "tests/test_prettify.py::PrettyCronTest::test_weekly", "tests/test_prettify.py::PrettyCronTest::test_yearly" ]
[]
MIT License
421
sympy__sympy-10559
e7914d7f1aae83d22c8c96c8a5af5617596a890c
2016-02-09 20:51:08
0241b35cae5f2adc04dc37b25f7dc9c5f00bd746
diff --git a/sympy/assumptions/sathandlers.py b/sympy/assumptions/sathandlers.py index 9297bb4138..56aeb6a544 100644 --- a/sympy/assumptions/sathandlers.py +++ b/sympy/assumptions/sathandlers.py @@ -304,6 +304,12 @@ def register_fact(klass, fact, registry=fact_registry): (Mul, Implies(AllArgs(Q.positive), Q.positive)), (Mul, Implies(AllArgs(Q.commutative), Q.commutative)), (Mul, Implies(AllArgs(Q.real), Q.commutative)), + + (Pow, CustomLambda(lambda power: Implies(Q.real(power.base) & + Q.even(power.exp) & Q.nonnegative(power.exp), Q.nonnegative(power)))), + (Pow, CustomLambda(lambda power: Implies(Q.nonnegative(power.base) & Q.odd(power.exp) & Q.nonnegative(power.exp), Q.nonnegative(power)))), + (Pow, CustomLambda(lambda power: Implies(Q.nonpositive(power.base) & Q.odd(power.exp) & Q.nonnegative(power.exp), Q.nonpositive(power)))), + # This one can still be made easier to read. I think we need basic pattern # matching, so that we can just write Equivalent(Q.zero(x**y), Q.zero(x) & Q.positive(y)) (Pow, CustomLambda(lambda power: Equivalent(Q.zero(power), Q.zero(power.base) & Q.positive(power.exp)))), @@ -316,7 +322,7 @@ def register_fact(klass, fact, registry=fact_registry): (Mul, Implies(AllArgs(Q.imaginary | Q.real), Implies(ExactlyOneArg(Q.imaginary), Q.imaginary))), (Mul, Implies(AllArgs(Q.real), Q.real)), (Add, Implies(AllArgs(Q.real), Q.real)), - #General Case: Odd number of imaginary args implies mul is imaginary(To be implemented) + # General Case: Odd number of imaginary args implies mul is imaginary(To be implemented) (Mul, Implies(AllArgs(Q.real), Implies(ExactlyOneArg(Q.irrational), Q.irrational))), (Add, Implies(AllArgs(Q.real), Implies(ExactlyOneArg(Q.irrational),
ask(Q.nonnegative(x**2), Q.positive(x)) gives None ``` In [1]: ask(Q.nonnegative(x**2), Q.positive(x)) In [2]: ask(Q.negative(x**2), Q.positive(x)) Out[2]: False In [3]: ask(Q.real(x**2), Q.positive(x)) Out[3]: True ``` The relevant fact(s) would probably have to be implemented in satask to make this work.
sympy/sympy
diff --git a/sympy/assumptions/tests/test_satask.py b/sympy/assumptions/tests/test_satask.py index 8e42d09474..0fbe727117 100644 --- a/sympy/assumptions/tests/test_satask.py +++ b/sympy/assumptions/tests/test_satask.py @@ -262,3 +262,62 @@ def test_pos_neg(): assert satask(Q.negative(x + y), Q.negative(x) & Q.negative(y)) is True assert satask(Q.positive(x + y), Q.negative(x) & Q.negative(y)) is False assert satask(Q.negative(x + y), Q.positive(x) & Q.positive(y)) is False + +def test_pow_pos_neg(): + assert satask(Q.nonnegative(x**2), Q.positive(x)) is True + assert satask(Q.nonpositive(x**2), Q.positive(x)) is False + assert satask(Q.positive(x**2), Q.positive(x)) is True + assert satask(Q.negative(x**2), Q.positive(x)) is False + assert satask(Q.real(x**2), Q.positive(x)) is True + + assert satask(Q.nonnegative(x**2), Q.negative(x)) is True + assert satask(Q.nonpositive(x**2), Q.negative(x)) is False + assert satask(Q.positive(x**2), Q.negative(x)) is True + assert satask(Q.negative(x**2), Q.negative(x)) is False + assert satask(Q.real(x**2), Q.negative(x)) is True + + assert satask(Q.nonnegative(x**2), Q.nonnegative(x)) is True + assert satask(Q.nonpositive(x**2), Q.nonnegative(x)) is None + assert satask(Q.positive(x**2), Q.nonnegative(x)) is None + assert satask(Q.negative(x**2), Q.nonnegative(x)) is False + assert satask(Q.real(x**2), Q.nonnegative(x)) is True + + assert satask(Q.nonnegative(x**2), Q.nonpositive(x)) is True + assert satask(Q.nonpositive(x**2), Q.nonpositive(x)) is None + assert satask(Q.positive(x**2), Q.nonpositive(x)) is None + assert satask(Q.negative(x**2), Q.nonpositive(x)) is False + assert satask(Q.real(x**2), Q.nonpositive(x)) is True + + assert satask(Q.nonnegative(x**3), Q.positive(x)) is True + assert satask(Q.nonpositive(x**3), Q.positive(x)) is False + assert satask(Q.positive(x**3), Q.positive(x)) is True + assert satask(Q.negative(x**3), Q.positive(x)) is False + assert satask(Q.real(x**3), Q.positive(x)) is True + + assert satask(Q.nonnegative(x**3), Q.negative(x)) is False + assert satask(Q.nonpositive(x**3), Q.negative(x)) is True + assert satask(Q.positive(x**3), Q.negative(x)) is False + assert satask(Q.negative(x**3), Q.negative(x)) is True + assert satask(Q.real(x**3), Q.negative(x)) is True + + assert satask(Q.nonnegative(x**3), Q.nonnegative(x)) is True + assert satask(Q.nonpositive(x**3), Q.nonnegative(x)) is None + assert satask(Q.positive(x**3), Q.nonnegative(x)) is None + assert satask(Q.negative(x**3), Q.nonnegative(x)) is False + assert satask(Q.real(x**3), Q.nonnegative(x)) is True + + assert satask(Q.nonnegative(x**3), Q.nonpositive(x)) is None + assert satask(Q.nonpositive(x**3), Q.nonpositive(x)) is True + assert satask(Q.positive(x**3), Q.nonpositive(x)) is False + assert satask(Q.negative(x**3), Q.nonpositive(x)) is None + assert satask(Q.real(x**3), Q.nonpositive(x)) is True + + # If x is zero, x**negative is not real. + assert satask(Q.nonnegative(x**-2), Q.nonpositive(x)) is None + assert satask(Q.nonpositive(x**-2), Q.nonpositive(x)) is None + assert satask(Q.positive(x**-2), Q.nonpositive(x)) is None + assert satask(Q.negative(x**-2), Q.nonpositive(x)) is None + assert satask(Q.real(x**-2), Q.nonpositive(x)) is None + + # We could deduce things for negative powers if x is nonzero, but it + # isn't implemented yet.
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 2, "test_score": 3 }, "num_modified_files": 1 }
0.7
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "mpmath>=0.19", "pip_packages": [ "pytest" ], "pre_install": null, "python": "3.7", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
certifi @ file:///croot/certifi_1671487769961/work/certifi exceptiongroup==1.2.2 importlib-metadata==6.7.0 iniconfig==2.0.0 mpmath==1.2.1 packaging==24.0 pluggy==1.2.0 pytest==7.4.4 -e git+https://github.com/sympy/sympy.git@e7914d7f1aae83d22c8c96c8a5af5617596a890c#egg=sympy tomli==2.0.1 typing_extensions==4.7.1 zipp==3.15.0
name: sympy channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2022.12.7=py37h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - mpmath=1.2.1=py37h06a4308_0 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=22.3.1=py37h06a4308_0 - python=3.7.16=h7a1cb2a_0 - readline=8.2=h5eee18b_0 - setuptools=65.6.3=py37h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.38.4=py37h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - exceptiongroup==1.2.2 - importlib-metadata==6.7.0 - iniconfig==2.0.0 - packaging==24.0 - pluggy==1.2.0 - pytest==7.4.4 - tomli==2.0.1 - typing-extensions==4.7.1 - zipp==3.15.0 prefix: /opt/conda/envs/sympy
[ "sympy/assumptions/tests/test_satask.py::test_pow_pos_neg" ]
[ "sympy/assumptions/tests/test_satask.py::test_invertible" ]
[ "sympy/assumptions/tests/test_satask.py::test_satask", "sympy/assumptions/tests/test_satask.py::test_zero", "sympy/assumptions/tests/test_satask.py::test_zero_positive", "sympy/assumptions/tests/test_satask.py::test_zero_pow", "sympy/assumptions/tests/test_satask.py::test_prime", "sympy/assumptions/tests/test_satask.py::test_old_assump", "sympy/assumptions/tests/test_satask.py::test_rational_irrational", "sympy/assumptions/tests/test_satask.py::test_even", "sympy/assumptions/tests/test_satask.py::test_odd", "sympy/assumptions/tests/test_satask.py::test_integer", "sympy/assumptions/tests/test_satask.py::test_abs", "sympy/assumptions/tests/test_satask.py::test_imaginary", "sympy/assumptions/tests/test_satask.py::test_real", "sympy/assumptions/tests/test_satask.py::test_pos_neg" ]
[]
BSD
422
box__box-python-sdk-114
8b6afa5cdecd1b622658542c0f4eb5e003a37258
2016-02-10 21:46:38
98f1b812cdcf53276a369213f5cb59bfb2d0e545
diff --git a/HISTORY.rst b/HISTORY.rst index 5c9f373..2194c18 100644 --- a/HISTORY.rst +++ b/HISTORY.rst @@ -6,6 +6,11 @@ Release History Upcoming ++++++++ +1.4.1 (2016-02-11) +++++++++++++++++++ + +- Files now support getting a direct download url. + 1.4.0 (2016-01-05) ++++++++++++++++++ diff --git a/boxsdk/object/file.py b/boxsdk/object/file.py index 96f6079..2b28053 100644 --- a/boxsdk/object/file.py +++ b/boxsdk/object/file.py @@ -237,3 +237,54 @@ def metadata(self, scope='global', template='properties'): :class:`Metadata` """ return Metadata(self._session, self, scope, template) + + def get_shared_link_download_url( + self, + access=None, + etag=None, + unshared_at=None, + allow_preview=None, + password=None, + ): + """ + Get a shared link download url for the file with the given access permissions. + This url is a direct download url for the file. + + :param access: + Determines who can access the shared link. May be open, company, or collaborators. If no access is + specified, the default access will be used. + :type access: + `unicode` or None + :param etag: + If specified, instruct the Box API to create the link only if the current version's etag matches. + :type etag: + `unicode` or None + :param unshared_at: + The date on which this link should be disabled. May only be set if the current user is not a free user + and has permission to set expiration dates. + :type unshared_at: + :class:`datetime.date` or None + :param allow_preview: + Whether or not the item being shared can be previewed when accessed via the shared link. + If this parameter is None, the default setting will be used. + :type allow_preview: + `bool` or None + :param password: + The password required to view this link. If no password is specified then no password will be set. + Please notice that this is a premium feature, which might not be available to your app. + :type password: + `unicode` or None + :returns: + The URL of the shared link that allows direct download. + :rtype: + `unicode` + :raises: :class:`BoxAPIException` if the specified etag doesn't match the latest version of the item. + """ + item = self.create_shared_link( + access=access, + etag=etag, + unshared_at=unshared_at, + allow_preview=allow_preview, + password=password, + ) + return item.shared_link['download_url'] diff --git a/boxsdk/object/item.py b/boxsdk/object/item.py index 3883546..8487761 100644 --- a/boxsdk/object/item.py +++ b/boxsdk/object/item.py @@ -159,8 +159,17 @@ def move(self, parent_folder): } return self.update_info(data) - def get_shared_link(self, access=None, etag=None, unshared_at=None, allow_download=None, allow_preview=None, password=None): - """Get a shared link for the item with the given access permissions. + def create_shared_link( + self, + access=None, + etag=None, + unshared_at=None, + allow_download=None, + allow_preview=None, + password=None, + ): + """ + Create a shared link for the item with the given access permissions. :param access: Determines who can access the shared link. May be open, company, or collaborators. If no access is @@ -191,10 +200,11 @@ def get_shared_link(self, access=None, etag=None, unshared_at=None, allow_downlo Please notice that this is a premium feature, which might not be available to your app. :type password: `unicode` or None - :returns: - The URL of the shared link. + :return: + The updated object with s shared link. + Returns a new object of the same type, without modifying the original object passed as self. :rtype: - `unicode` + :class:`Item` :raises: :class:`BoxAPIException` if the specified etag doesn't match the latest version of the item. """ data = { @@ -216,7 +226,64 @@ def get_shared_link(self, access=None, etag=None, unshared_at=None, allow_downlo if password is not None: data['shared_link']['password'] = password - item = self.update_info(data, etag=etag) + return self.update_info(data, etag=etag) + + def get_shared_link( + self, + access=None, + etag=None, + unshared_at=None, + allow_download=None, + allow_preview=None, + password=None, + ): + """ + Get a shared link for the item with the given access permissions. + This url leads to a Box.com shared link page, where the item can be previewed, downloaded, etc. + + :param access: + Determines who can access the shared link. May be open, company, or collaborators. If no access is + specified, the default access will be used. + :type access: + `unicode` or None + :param etag: + If specified, instruct the Box API to create the link only if the current version's etag matches. + :type etag: + `unicode` or None + :param unshared_at: + The date on which this link should be disabled. May only be set if the current user is not a free user + and has permission to set expiration dates. + :type unshared_at: + :class:`datetime.date` or None + :param allow_download: + Whether or not the item being shared can be downloaded when accessed via the shared link. + If this parameter is None, the default setting will be used. + :type allow_download: + `bool` or None + :param allow_preview: + Whether or not the item being shared can be previewed when accessed via the shared link. + If this parameter is None, the default setting will be used. + :type allow_preview: + `bool` or None + :param password: + The password required to view this link. If no password is specified then no password will be set. + Please notice that this is a premium feature, which might not be available to your app. + :type password: + `unicode` or None + :returns: + The URL of the shared link. + :rtype: + `unicode` + :raises: :class:`BoxAPIException` if the specified etag doesn't match the latest version of the item. + """ + item = self.create_shared_link( + access=access, + etag=etag, + unshared_at=unshared_at, + allow_download=allow_download, + allow_preview=allow_preview, + password=password, + ) return item.shared_link['url'] def remove_shared_link(self, etag=None): diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000..c34b498 --- /dev/null +++ b/setup.cfg @@ -0,0 +1,5 @@ +[bdist_wheel] +# This flag says that the code is written to work on both Python 2 and Python +# 3. If at all possible, it is good practice to do this. If you cannot, you +# will need to generate wheels for each Python version that you support. +universal=1 \ No newline at end of file diff --git a/setup.py b/setup.py index 23b01bc..b3dc950 100644 --- a/setup.py +++ b/setup.py @@ -61,7 +61,7 @@ def main(): install_requires.append('ordereddict>=1.1') setup( name='boxsdk', - version='1.4.0', + version='1.4.1', description='Official Box Python SDK', long_description=open(join(base_dir, 'README.rst')).read(), author='Box',
Add support for getting file shared link download URLs The [get_shared_link()](https://github.com/box/box-python-sdk/blob/master/boxsdk/object/item.py#L162) method returns: ```python return item.shared_link['url'] ``` However sometimes there may be a use case for getting a **direct link** to a file, which should also be present in the response from the [Box API](https://box-content.readme.io/reference#create-a-shared-link-for-a-file): ```json { "type": "file", "id": "10559150999", ... "shared_link": { "url": "https://foo.box.com/s/7mcmdlavtye5o5i0ue8xmtwh2sx5bv8p", "download_url":"https://foo.box.com/shared/static/7mcmdlavtye5o5i0ue8xmtwh2sx5bv8p.png", ... } } ``` Currently the Box Python SDK has a common `get_shared_link` method on the `Item` class, and since `download_url` is not a valid attribute for a shared link on a folder this probably should be added *only* in the context of `File`. I'm beginning to implement this on my fork with the intent to submit back upstream. What seems like the least terrible approach would be to clone the `get_shared_link` method from the `Item` class into the `File` class, rename it `get_download_link` (or `get_download_url`?), and change the return value, but this seems like a lot of code duplication. Is there a better way? Any recommendations on approaches or proper patterns to stick to would be most appreciated! ** [Description of one specific use case for this feature](https://community.box.com/t5/Admin-Forum/quot-We-re-sorry-but-we-can-t-preview-zip-files-quot-is/m-p/11336)
box/box-python-sdk
diff --git a/test/unit/object/conftest.py b/test/unit/object/conftest.py index 0d625af..65e3863 100644 --- a/test/unit/object/conftest.py +++ b/test/unit/object/conftest.py @@ -1,6 +1,7 @@ # coding: utf-8 from __future__ import unicode_literals +from datetime import date import os from mock import Mock import pytest @@ -162,3 +163,28 @@ def file_size(request): def mock_group(mock_box_session, mock_group_id): group = Group(mock_box_session, mock_group_id) return group + + [email protected](params=(True, False, None)) +def shared_link_can_download(request): + return request.param + + [email protected](params=(True, False, None)) +def shared_link_can_preview(request): + return request.param + + [email protected](params=('open', None)) +def shared_link_access(request): + return request.param + + [email protected](params=('hunter2', None)) +def shared_link_password(request): + return request.param + + [email protected](params=(date(2015, 5, 5), None)) +def shared_link_unshared_at(request): + return request.param diff --git a/test/unit/object/test_file.py b/test/unit/object/test_file.py index ecbd4b2..44a8ec4 100644 --- a/test/unit/object/test_file.py +++ b/test/unit/object/test_file.py @@ -234,3 +234,43 @@ def test_preflight_check( expect_json_response=False, data=expected_data, ) + + +def test_get_shared_link_download_url( + test_file, + mock_box_session, + shared_link_access, + shared_link_unshared_at, + shared_link_password, + shared_link_can_preview, + test_url, + etag, + if_match_header, +): + # pylint:disable=redefined-outer-name, protected-access + expected_url = test_file.get_url() + mock_box_session.put.return_value.json.return_value = {'shared_link': {'url': None, 'download_url': test_url}} + expected_data = {'shared_link': {}} + if shared_link_access is not None: + expected_data['shared_link']['access'] = shared_link_access + if shared_link_unshared_at is not None: + expected_data['shared_link']['unshared_at'] = shared_link_unshared_at.isoformat() + if shared_link_can_preview is not None: + expected_data['shared_link']['permissions'] = permissions = {} + permissions['can_preview'] = shared_link_can_preview + if shared_link_password is not None: + expected_data['shared_link']['password'] = shared_link_password + url = test_file.get_shared_link_download_url( + etag=etag, + access=shared_link_access, + unshared_at=shared_link_unshared_at, + password=shared_link_password, + allow_preview=shared_link_can_preview, + ) + mock_box_session.put.assert_called_once_with( + expected_url, + data=json.dumps(expected_data), + headers=if_match_header, + params=None, + ) + assert url == test_url diff --git a/test/unit/object/test_item.py b/test/unit/object/test_item.py index abc080f..6226cef 100644 --- a/test/unit/object/test_item.py +++ b/test/unit/object/test_item.py @@ -1,7 +1,6 @@ # coding: utf-8 from __future__ import unicode_literals -from datetime import date import json import pytest @@ -56,31 +55,6 @@ def test_move_item(test_item_and_response, mock_box_session, test_folder, mock_o assert isinstance(move_response, test_item.__class__) [email protected](params=(True, False, None)) -def shared_link_can_download(request): - return request.param - - [email protected](params=(True, False, None)) -def shared_link_can_preview(request): - return request.param - - [email protected](params=('open', None)) -def shared_link_access(request): - return request.param - - [email protected](params=('hunter2', None)) -def shared_link_password(request): - return request.param - - [email protected](params=(date(2015, 5, 5), None)) -def shared_link_unshared_at(request): - return request.param - - def test_get_shared_link( test_item_and_response, mock_box_session,
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_hyperlinks", "has_media", "has_added_files", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 1, "test_score": 3 }, "num_modified_files": 4 }
1.4
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "pytest-xdist", "mock", "sqlalchemy", "bottle", "jsonpatch" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.5", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
async-timeout==4.0.2 attrs==22.2.0 bottle==0.13.2 -e git+https://github.com/box/box-python-sdk.git@8b6afa5cdecd1b622658542c0f4eb5e003a37258#egg=boxsdk certifi==2021.5.30 cffi==1.15.1 charset-normalizer==2.0.12 cryptography==40.0.2 execnet==1.9.0 greenlet==2.0.2 idna==3.10 importlib-metadata==4.8.3 iniconfig==1.1.1 jsonpatch==1.32 jsonpointer==2.3 mock==5.2.0 packaging==21.3 pluggy==1.0.0 py==1.11.0 pycparser==2.21 PyJWT==2.4.0 pyparsing==3.1.4 pytest==7.0.1 pytest-xdist==3.0.2 redis==4.3.6 requests==2.27.1 requests-toolbelt==1.0.0 six==1.17.0 SQLAlchemy==1.4.54 tomli==1.2.3 typing_extensions==4.1.1 urllib3==1.26.20 zipp==3.6.0
name: box-python-sdk channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - async-timeout==4.0.2 - attrs==22.2.0 - bottle==0.13.2 - cffi==1.15.1 - charset-normalizer==2.0.12 - cryptography==40.0.2 - execnet==1.9.0 - greenlet==2.0.2 - idna==3.10 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - jsonpatch==1.32 - jsonpointer==2.3 - mock==5.2.0 - packaging==21.3 - pluggy==1.0.0 - py==1.11.0 - pycparser==2.21 - pyjwt==2.4.0 - pyparsing==3.1.4 - pytest==7.0.1 - pytest-xdist==3.0.2 - redis==4.3.6 - requests==2.27.1 - requests-toolbelt==1.0.0 - six==1.17.0 - sqlalchemy==1.4.54 - tomli==1.2.3 - typing-extensions==4.1.1 - urllib3==1.26.20 - zipp==3.6.0 prefix: /opt/conda/envs/box-python-sdk
[ "test/unit/object/test_file.py::test_get_shared_link_download_url[open-shared_link_unshared_at0-hunter2-True-None]", "test/unit/object/test_file.py::test_get_shared_link_download_url[open-shared_link_unshared_at0-hunter2-True-etag]", "test/unit/object/test_file.py::test_get_shared_link_download_url[open-shared_link_unshared_at0-hunter2-False-None]", "test/unit/object/test_file.py::test_get_shared_link_download_url[open-shared_link_unshared_at0-hunter2-False-etag]", "test/unit/object/test_file.py::test_get_shared_link_download_url[open-shared_link_unshared_at0-hunter2-None-None]", "test/unit/object/test_file.py::test_get_shared_link_download_url[open-shared_link_unshared_at0-hunter2-None-etag]", "test/unit/object/test_file.py::test_get_shared_link_download_url[open-shared_link_unshared_at0-None-True-None]", "test/unit/object/test_file.py::test_get_shared_link_download_url[open-shared_link_unshared_at0-None-True-etag]", "test/unit/object/test_file.py::test_get_shared_link_download_url[open-shared_link_unshared_at0-None-False-None]", "test/unit/object/test_file.py::test_get_shared_link_download_url[open-shared_link_unshared_at0-None-False-etag]", "test/unit/object/test_file.py::test_get_shared_link_download_url[open-shared_link_unshared_at0-None-None-None]", "test/unit/object/test_file.py::test_get_shared_link_download_url[open-shared_link_unshared_at0-None-None-etag]", "test/unit/object/test_file.py::test_get_shared_link_download_url[open-None-hunter2-True-None]", "test/unit/object/test_file.py::test_get_shared_link_download_url[open-None-hunter2-True-etag]", "test/unit/object/test_file.py::test_get_shared_link_download_url[open-None-hunter2-False-None]", "test/unit/object/test_file.py::test_get_shared_link_download_url[open-None-hunter2-False-etag]", "test/unit/object/test_file.py::test_get_shared_link_download_url[open-None-hunter2-None-None]", "test/unit/object/test_file.py::test_get_shared_link_download_url[open-None-hunter2-None-etag]", "test/unit/object/test_file.py::test_get_shared_link_download_url[open-None-None-True-None]", "test/unit/object/test_file.py::test_get_shared_link_download_url[open-None-None-True-etag]", "test/unit/object/test_file.py::test_get_shared_link_download_url[open-None-None-False-None]", "test/unit/object/test_file.py::test_get_shared_link_download_url[open-None-None-False-etag]", "test/unit/object/test_file.py::test_get_shared_link_download_url[open-None-None-None-None]", "test/unit/object/test_file.py::test_get_shared_link_download_url[open-None-None-None-etag]", "test/unit/object/test_file.py::test_get_shared_link_download_url[None-shared_link_unshared_at0-hunter2-True-None]", "test/unit/object/test_file.py::test_get_shared_link_download_url[None-shared_link_unshared_at0-hunter2-True-etag]", "test/unit/object/test_file.py::test_get_shared_link_download_url[None-shared_link_unshared_at0-hunter2-False-None]", "test/unit/object/test_file.py::test_get_shared_link_download_url[None-shared_link_unshared_at0-hunter2-False-etag]", "test/unit/object/test_file.py::test_get_shared_link_download_url[None-shared_link_unshared_at0-hunter2-None-None]", "test/unit/object/test_file.py::test_get_shared_link_download_url[None-shared_link_unshared_at0-hunter2-None-etag]", "test/unit/object/test_file.py::test_get_shared_link_download_url[None-shared_link_unshared_at0-None-True-None]", "test/unit/object/test_file.py::test_get_shared_link_download_url[None-shared_link_unshared_at0-None-True-etag]", "test/unit/object/test_file.py::test_get_shared_link_download_url[None-shared_link_unshared_at0-None-False-None]", "test/unit/object/test_file.py::test_get_shared_link_download_url[None-shared_link_unshared_at0-None-False-etag]", "test/unit/object/test_file.py::test_get_shared_link_download_url[None-shared_link_unshared_at0-None-None-None]", "test/unit/object/test_file.py::test_get_shared_link_download_url[None-shared_link_unshared_at0-None-None-etag]", "test/unit/object/test_file.py::test_get_shared_link_download_url[None-None-hunter2-True-None]", "test/unit/object/test_file.py::test_get_shared_link_download_url[None-None-hunter2-True-etag]", "test/unit/object/test_file.py::test_get_shared_link_download_url[None-None-hunter2-False-None]", "test/unit/object/test_file.py::test_get_shared_link_download_url[None-None-hunter2-False-etag]", "test/unit/object/test_file.py::test_get_shared_link_download_url[None-None-hunter2-None-None]", "test/unit/object/test_file.py::test_get_shared_link_download_url[None-None-hunter2-None-etag]", "test/unit/object/test_file.py::test_get_shared_link_download_url[None-None-None-True-None]", "test/unit/object/test_file.py::test_get_shared_link_download_url[None-None-None-True-etag]", "test/unit/object/test_file.py::test_get_shared_link_download_url[None-None-None-False-None]", "test/unit/object/test_file.py::test_get_shared_link_download_url[None-None-None-False-etag]", "test/unit/object/test_file.py::test_get_shared_link_download_url[None-None-None-None-None]", "test/unit/object/test_file.py::test_get_shared_link_download_url[None-None-None-None-etag]" ]
[ "test/unit/object/test_file.py::test_update_contents_with_stream_does_preflight_check_if_specified[True-0-True]", "test/unit/object/test_file.py::test_update_contents_with_stream_does_preflight_check_if_specified[True-0-False]", "test/unit/object/test_file.py::test_update_contents_with_stream_does_preflight_check_if_specified[True-100-True]", "test/unit/object/test_file.py::test_update_contents_with_stream_does_preflight_check_if_specified[True-100-False]", "test/unit/object/test_file.py::test_update_contents_does_preflight_check_if_specified[True-0-True]", "test/unit/object/test_file.py::test_update_contents_does_preflight_check_if_specified[True-0-False]", "test/unit/object/test_file.py::test_update_contents_does_preflight_check_if_specified[True-100-True]", "test/unit/object/test_file.py::test_update_contents_does_preflight_check_if_specified[True-100-False]" ]
[ "test/unit/object/test_file.py::test_delete_file[None]", "test/unit/object/test_file.py::test_delete_file[etag]", "test/unit/object/test_file.py::test_download_to", "test/unit/object/test_file.py::test_get_content", "test/unit/object/test_file.py::test_update_contents[None-True-True-True]", "test/unit/object/test_file.py::test_update_contents[None-True-True-False]", "test/unit/object/test_file.py::test_update_contents[None-True-False-True]", "test/unit/object/test_file.py::test_update_contents[None-True-False-False]", "test/unit/object/test_file.py::test_update_contents[None-False-True-True]", "test/unit/object/test_file.py::test_update_contents[None-False-True-False]", "test/unit/object/test_file.py::test_update_contents[None-False-False-True]", "test/unit/object/test_file.py::test_update_contents[None-False-False-False]", "test/unit/object/test_file.py::test_update_contents[etag-True-True-True]", "test/unit/object/test_file.py::test_update_contents[etag-True-True-False]", "test/unit/object/test_file.py::test_update_contents[etag-True-False-True]", "test/unit/object/test_file.py::test_update_contents[etag-True-False-False]", "test/unit/object/test_file.py::test_update_contents[etag-False-True-True]", "test/unit/object/test_file.py::test_update_contents[etag-False-True-False]", "test/unit/object/test_file.py::test_update_contents[etag-False-False-True]", "test/unit/object/test_file.py::test_update_contents[etag-False-False-False]", "test/unit/object/test_file.py::test_update_contents_with_stream_does_preflight_check_if_specified[False-0-True]", "test/unit/object/test_file.py::test_update_contents_with_stream_does_preflight_check_if_specified[False-0-False]", "test/unit/object/test_file.py::test_update_contents_with_stream_does_preflight_check_if_specified[False-100-True]", "test/unit/object/test_file.py::test_update_contents_with_stream_does_preflight_check_if_specified[False-100-False]", "test/unit/object/test_file.py::test_update_contents_does_preflight_check_if_specified[False-0-True]", "test/unit/object/test_file.py::test_update_contents_does_preflight_check_if_specified[False-0-False]", "test/unit/object/test_file.py::test_update_contents_does_preflight_check_if_specified[False-100-True]", "test/unit/object/test_file.py::test_update_contents_does_preflight_check_if_specified[False-100-False]", "test/unit/object/test_file.py::test_lock[True]", "test/unit/object/test_file.py::test_lock[False]", "test/unit/object/test_file.py::test_unlock", "test/unit/object/test_file.py::test_preflight_check[100-foo.txt-{\"size\":", "test/unit/object/test_file.py::test_preflight_check[200-None-{\"size\":", "test/unit/object/test_item.py::test_update_info[file-None]", "test/unit/object/test_item.py::test_update_info[file-etag]", "test/unit/object/test_item.py::test_update_info[folder-None]", "test/unit/object/test_item.py::test_update_info[folder-etag]", "test/unit/object/test_item.py::test_rename_item[file]", "test/unit/object/test_item.py::test_rename_item[folder]", "test/unit/object/test_item.py::test_copy_item[file]", "test/unit/object/test_item.py::test_copy_item[folder]", "test/unit/object/test_item.py::test_move_item[file]", "test/unit/object/test_item.py::test_move_item[folder]", "test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-hunter2-True-True-None]", "test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-hunter2-True-True-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-hunter2-True-False-None]", "test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-hunter2-True-False-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-hunter2-True-None-None]", "test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-hunter2-True-None-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-hunter2-False-True-None]", "test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-hunter2-False-True-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-hunter2-False-False-None]", "test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-hunter2-False-False-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-hunter2-False-None-None]", "test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-hunter2-False-None-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-hunter2-None-True-None]", "test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-hunter2-None-True-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-hunter2-None-False-None]", "test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-hunter2-None-False-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-hunter2-None-None-None]", "test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-hunter2-None-None-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-None-True-True-None]", "test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-None-True-True-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-None-True-False-None]", "test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-None-True-False-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-None-True-None-None]", "test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-None-True-None-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-None-False-True-None]", "test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-None-False-True-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-None-False-False-None]", "test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-None-False-False-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-None-False-None-None]", "test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-None-False-None-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-None-None-True-None]", "test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-None-None-True-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-None-None-False-None]", "test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-None-None-False-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-None-None-None-None]", "test/unit/object/test_item.py::test_get_shared_link[file-open-shared_link_unshared_at0-None-None-None-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-open-None-hunter2-True-True-None]", "test/unit/object/test_item.py::test_get_shared_link[file-open-None-hunter2-True-True-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-open-None-hunter2-True-False-None]", "test/unit/object/test_item.py::test_get_shared_link[file-open-None-hunter2-True-False-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-open-None-hunter2-True-None-None]", "test/unit/object/test_item.py::test_get_shared_link[file-open-None-hunter2-True-None-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-open-None-hunter2-False-True-None]", "test/unit/object/test_item.py::test_get_shared_link[file-open-None-hunter2-False-True-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-open-None-hunter2-False-False-None]", "test/unit/object/test_item.py::test_get_shared_link[file-open-None-hunter2-False-False-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-open-None-hunter2-False-None-None]", "test/unit/object/test_item.py::test_get_shared_link[file-open-None-hunter2-False-None-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-open-None-hunter2-None-True-None]", "test/unit/object/test_item.py::test_get_shared_link[file-open-None-hunter2-None-True-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-open-None-hunter2-None-False-None]", "test/unit/object/test_item.py::test_get_shared_link[file-open-None-hunter2-None-False-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-open-None-hunter2-None-None-None]", "test/unit/object/test_item.py::test_get_shared_link[file-open-None-hunter2-None-None-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-open-None-None-True-True-None]", "test/unit/object/test_item.py::test_get_shared_link[file-open-None-None-True-True-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-open-None-None-True-False-None]", "test/unit/object/test_item.py::test_get_shared_link[file-open-None-None-True-False-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-open-None-None-True-None-None]", "test/unit/object/test_item.py::test_get_shared_link[file-open-None-None-True-None-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-open-None-None-False-True-None]", "test/unit/object/test_item.py::test_get_shared_link[file-open-None-None-False-True-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-open-None-None-False-False-None]", "test/unit/object/test_item.py::test_get_shared_link[file-open-None-None-False-False-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-open-None-None-False-None-None]", "test/unit/object/test_item.py::test_get_shared_link[file-open-None-None-False-None-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-open-None-None-None-True-None]", "test/unit/object/test_item.py::test_get_shared_link[file-open-None-None-None-True-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-open-None-None-None-False-None]", "test/unit/object/test_item.py::test_get_shared_link[file-open-None-None-None-False-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-open-None-None-None-None-None]", "test/unit/object/test_item.py::test_get_shared_link[file-open-None-None-None-None-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-hunter2-True-True-None]", "test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-hunter2-True-True-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-hunter2-True-False-None]", "test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-hunter2-True-False-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-hunter2-True-None-None]", "test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-hunter2-True-None-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-hunter2-False-True-None]", "test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-hunter2-False-True-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-hunter2-False-False-None]", "test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-hunter2-False-False-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-hunter2-False-None-None]", "test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-hunter2-False-None-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-hunter2-None-True-None]", "test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-hunter2-None-True-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-hunter2-None-False-None]", "test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-hunter2-None-False-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-hunter2-None-None-None]", "test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-hunter2-None-None-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-None-True-True-None]", "test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-None-True-True-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-None-True-False-None]", "test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-None-True-False-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-None-True-None-None]", "test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-None-True-None-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-None-False-True-None]", "test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-None-False-True-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-None-False-False-None]", "test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-None-False-False-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-None-False-None-None]", "test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-None-False-None-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-None-None-True-None]", "test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-None-None-True-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-None-None-False-None]", "test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-None-None-False-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-None-None-None-None]", "test/unit/object/test_item.py::test_get_shared_link[file-None-shared_link_unshared_at0-None-None-None-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-None-None-hunter2-True-True-None]", "test/unit/object/test_item.py::test_get_shared_link[file-None-None-hunter2-True-True-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-None-None-hunter2-True-False-None]", "test/unit/object/test_item.py::test_get_shared_link[file-None-None-hunter2-True-False-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-None-None-hunter2-True-None-None]", "test/unit/object/test_item.py::test_get_shared_link[file-None-None-hunter2-True-None-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-None-None-hunter2-False-True-None]", "test/unit/object/test_item.py::test_get_shared_link[file-None-None-hunter2-False-True-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-None-None-hunter2-False-False-None]", "test/unit/object/test_item.py::test_get_shared_link[file-None-None-hunter2-False-False-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-None-None-hunter2-False-None-None]", "test/unit/object/test_item.py::test_get_shared_link[file-None-None-hunter2-False-None-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-None-None-hunter2-None-True-None]", "test/unit/object/test_item.py::test_get_shared_link[file-None-None-hunter2-None-True-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-None-None-hunter2-None-False-None]", "test/unit/object/test_item.py::test_get_shared_link[file-None-None-hunter2-None-False-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-None-None-hunter2-None-None-None]", "test/unit/object/test_item.py::test_get_shared_link[file-None-None-hunter2-None-None-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-None-None-None-True-True-None]", "test/unit/object/test_item.py::test_get_shared_link[file-None-None-None-True-True-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-None-None-None-True-False-None]", "test/unit/object/test_item.py::test_get_shared_link[file-None-None-None-True-False-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-None-None-None-True-None-None]", "test/unit/object/test_item.py::test_get_shared_link[file-None-None-None-True-None-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-None-None-None-False-True-None]", "test/unit/object/test_item.py::test_get_shared_link[file-None-None-None-False-True-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-None-None-None-False-False-None]", "test/unit/object/test_item.py::test_get_shared_link[file-None-None-None-False-False-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-None-None-None-False-None-None]", "test/unit/object/test_item.py::test_get_shared_link[file-None-None-None-False-None-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-None-None-None-None-True-None]", "test/unit/object/test_item.py::test_get_shared_link[file-None-None-None-None-True-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-None-None-None-None-False-None]", "test/unit/object/test_item.py::test_get_shared_link[file-None-None-None-None-False-etag]", "test/unit/object/test_item.py::test_get_shared_link[file-None-None-None-None-None-None]", "test/unit/object/test_item.py::test_get_shared_link[file-None-None-None-None-None-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-hunter2-True-True-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-hunter2-True-True-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-hunter2-True-False-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-hunter2-True-False-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-hunter2-True-None-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-hunter2-True-None-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-hunter2-False-True-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-hunter2-False-True-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-hunter2-False-False-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-hunter2-False-False-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-hunter2-False-None-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-hunter2-False-None-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-hunter2-None-True-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-hunter2-None-True-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-hunter2-None-False-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-hunter2-None-False-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-hunter2-None-None-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-hunter2-None-None-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-None-True-True-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-None-True-True-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-None-True-False-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-None-True-False-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-None-True-None-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-None-True-None-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-None-False-True-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-None-False-True-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-None-False-False-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-None-False-False-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-None-False-None-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-None-False-None-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-None-None-True-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-None-None-True-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-None-None-False-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-None-None-False-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-None-None-None-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-shared_link_unshared_at0-None-None-None-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-None-hunter2-True-True-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-None-hunter2-True-True-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-None-hunter2-True-False-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-None-hunter2-True-False-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-None-hunter2-True-None-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-None-hunter2-True-None-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-None-hunter2-False-True-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-None-hunter2-False-True-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-None-hunter2-False-False-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-None-hunter2-False-False-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-None-hunter2-False-None-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-None-hunter2-False-None-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-None-hunter2-None-True-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-None-hunter2-None-True-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-None-hunter2-None-False-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-None-hunter2-None-False-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-None-hunter2-None-None-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-None-hunter2-None-None-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-None-None-True-True-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-None-None-True-True-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-None-None-True-False-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-None-None-True-False-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-None-None-True-None-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-None-None-True-None-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-None-None-False-True-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-None-None-False-True-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-None-None-False-False-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-None-None-False-False-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-None-None-False-None-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-None-None-False-None-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-None-None-None-True-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-None-None-None-True-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-None-None-None-False-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-None-None-None-False-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-None-None-None-None-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-open-None-None-None-None-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-hunter2-True-True-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-hunter2-True-True-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-hunter2-True-False-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-hunter2-True-False-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-hunter2-True-None-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-hunter2-True-None-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-hunter2-False-True-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-hunter2-False-True-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-hunter2-False-False-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-hunter2-False-False-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-hunter2-False-None-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-hunter2-False-None-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-hunter2-None-True-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-hunter2-None-True-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-hunter2-None-False-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-hunter2-None-False-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-hunter2-None-None-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-hunter2-None-None-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-None-True-True-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-None-True-True-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-None-True-False-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-None-True-False-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-None-True-None-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-None-True-None-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-None-False-True-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-None-False-True-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-None-False-False-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-None-False-False-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-None-False-None-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-None-False-None-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-None-None-True-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-None-None-True-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-None-None-False-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-None-None-False-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-None-None-None-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-shared_link_unshared_at0-None-None-None-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-None-hunter2-True-True-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-None-hunter2-True-True-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-None-hunter2-True-False-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-None-hunter2-True-False-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-None-hunter2-True-None-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-None-hunter2-True-None-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-None-hunter2-False-True-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-None-hunter2-False-True-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-None-hunter2-False-False-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-None-hunter2-False-False-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-None-hunter2-False-None-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-None-hunter2-False-None-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-None-hunter2-None-True-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-None-hunter2-None-True-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-None-hunter2-None-False-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-None-hunter2-None-False-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-None-hunter2-None-None-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-None-hunter2-None-None-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-None-None-True-True-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-None-None-True-True-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-None-None-True-False-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-None-None-True-False-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-None-None-True-None-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-None-None-True-None-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-None-None-False-True-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-None-None-False-True-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-None-None-False-False-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-None-None-False-False-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-None-None-False-None-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-None-None-False-None-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-None-None-None-True-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-None-None-None-True-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-None-None-None-False-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-None-None-None-False-etag]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-None-None-None-None-None]", "test/unit/object/test_item.py::test_get_shared_link[folder-None-None-None-None-None-etag]", "test/unit/object/test_item.py::test_remove_shared_link[file-None]", "test/unit/object/test_item.py::test_remove_shared_link[file-etag]", "test/unit/object/test_item.py::test_remove_shared_link[folder-None]", "test/unit/object/test_item.py::test_remove_shared_link[folder-etag]", "test/unit/object/test_item.py::test_get[file-None-None]", "test/unit/object/test_item.py::test_get[file-None-fields1]", "test/unit/object/test_item.py::test_get[file-etag-None]", "test/unit/object/test_item.py::test_get[file-etag-fields1]", "test/unit/object/test_item.py::test_get[folder-None-None]", "test/unit/object/test_item.py::test_get[folder-None-fields1]", "test/unit/object/test_item.py::test_get[folder-etag-None]", "test/unit/object/test_item.py::test_get[folder-etag-fields1]" ]
[]
Apache License 2.0
423
marshmallow-code__apispec-50
684da986e6c77232cc3e8618a51e271e9cd474f8
2016-02-10 21:48:12
684da986e6c77232cc3e8618a51e271e9cd474f8
diff --git a/apispec/core.py b/apispec/core.py index 029d311..fbb3cc3 100644 --- a/apispec/core.py +++ b/apispec/core.py @@ -136,11 +136,21 @@ class APISpec(object): """Add a new path object to the spec. https://github.com/swagger-api/swagger-spec/blob/master/versions/2.0.md#paths-object- + + :param str|Path|None path: URL Path component or Path instance + :param dict|None operations: describes the http methods and options for `path` + :param dict kwargs: parameters used by any path helpers see :meth:`register_path_helper` """ - if path and 'basePath' in self.options: + p = path + if isinstance(path, Path): + p = path.path + if p and 'basePath' in self.options: pattern = '^{0}'.format(re.escape(self.options['basePath'])) - path = re.sub(pattern, '', path) - path = Path(path=path, operations=operations) + p = re.sub(pattern, '', p) + if isinstance(path, Path): + path.path = p + else: + path = Path(path=p, operations=operations) # Execute plugins' helpers for func in self._path_helpers: try:
How should I add an existing Path instance to an APISpec instance I tried to do something like: ```python spec = APISpec(**kwargs) spec.add_path(Path(**kwargs)) ``` And I received the following error (as I should have) ``` File "/Users/Josh/Developer/Kaplan/AWS-Lambda-APIGateway-POC/env/lib/python2.7/site-packages/apispec/core.py", line 169, in add_path self._paths.setdefault(path.path, path).update(path) TypeError: unhashable type: 'Path' ``` Is there an easy way to add an existing `Path` object or do I need to duplicate the logic of `self._paths.setdefault(path.path, path).update(path)`? If this functionality seems worthwhile, I can submit a PR to update `APISpec.add_path` to accept `Path` objects.
marshmallow-code/apispec
diff --git a/tests/test_core.py b/tests/test_core.py index 7efe15b..07dbf47 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -179,6 +179,24 @@ class TestPath: assert '/pets' in spec._paths assert '/v1/pets' not in spec._paths + def test_add_path_accepts_path(self, spec): + route = '/pet/{petId}' + route_spec = self.paths[route] + path = Path(path=route, operations={'get': route_spec['get']}) + spec.add_path(path) + + p = spec._paths[path.path] + assert path.path == p.path + assert 'get' in p + + def test_add_path_strips_path_base_path(self, spec): + spec.options['basePath'] = '/v1' + path = Path(path='/v1/pets') + spec.add_path(path) + assert '/pets' in spec._paths + assert '/v1/pets' not in spec._paths + + def test_add_parameters(self, spec): route_spec = self.paths['/pet/{petId}']['get']
{ "commit_name": "head_commit", "failed_lite_validators": [], "has_test_patch": true, "is_lite": true, "llm_score": { "difficulty_score": 0, "issue_text_score": 1, "test_score": 0 }, "num_modified_files": 1 }
0.6
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": null, "python": "3.9", "reqs_path": [ "dev-requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
-e git+https://github.com/marshmallow-code/apispec.git@684da986e6c77232cc3e8618a51e271e9cd474f8#egg=apispec backports.tarfile==1.2.0 blinker==1.9.0 cachetools==5.5.2 certifi==2025.1.31 cffi==1.17.1 chardet==5.2.0 charset-normalizer==3.4.1 click==8.1.8 colorama==0.4.6 cryptography==44.0.2 distlib==0.3.9 docutils==0.21.2 exceptiongroup==1.2.2 filelock==3.18.0 flake8==2.4.1 Flask==3.1.0 id==1.5.0 idna==3.10 importlib_metadata==8.6.1 iniconfig==2.1.0 invoke==2.2.0 itsdangerous==2.2.0 jaraco.classes==3.4.0 jaraco.context==6.0.1 jaraco.functools==4.1.0 jeepney==0.9.0 Jinja2==3.1.6 keyring==25.6.0 markdown-it-py==3.0.0 MarkupSafe==3.0.2 marshmallow==3.26.1 mccabe==0.3.1 mdurl==0.1.2 mock==5.2.0 more-itertools==10.6.0 nh3==0.2.21 packaging==24.2 pep8==1.7.1 platformdirs==4.3.7 pluggy==1.5.0 pycparser==2.22 pyflakes==0.8.1 Pygments==2.19.1 pyproject-api==1.9.0 pytest==8.3.5 PyYAML==6.0.2 readme_renderer==44.0 requests==2.32.3 requests-toolbelt==1.0.0 rfc3986==2.0.0 rich==14.0.0 SecretStorage==3.3.3 tomli==2.2.1 tox==4.25.0 twine==6.1.0 typing_extensions==4.13.0 urllib3==2.3.0 virtualenv==20.29.3 webargs==8.6.0 Werkzeug==3.1.3 zipp==3.21.0
name: apispec channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - backports-tarfile==1.2.0 - blinker==1.9.0 - cachetools==5.5.2 - certifi==2025.1.31 - cffi==1.17.1 - chardet==5.2.0 - charset-normalizer==3.4.1 - click==8.1.8 - colorama==0.4.6 - cryptography==44.0.2 - distlib==0.3.9 - docutils==0.21.2 - exceptiongroup==1.2.2 - filelock==3.18.0 - flake8==2.4.1 - flask==3.1.0 - id==1.5.0 - idna==3.10 - importlib-metadata==8.6.1 - iniconfig==2.1.0 - invoke==2.2.0 - itsdangerous==2.2.0 - jaraco-classes==3.4.0 - jaraco-context==6.0.1 - jaraco-functools==4.1.0 - jeepney==0.9.0 - jinja2==3.1.6 - keyring==25.6.0 - markdown-it-py==3.0.0 - markupsafe==3.0.2 - marshmallow==3.26.1 - mccabe==0.3.1 - mdurl==0.1.2 - mock==5.2.0 - more-itertools==10.6.0 - nh3==0.2.21 - packaging==24.2 - pep8==1.7.1 - platformdirs==4.3.7 - pluggy==1.5.0 - pycparser==2.22 - pyflakes==0.8.1 - pygments==2.19.1 - pyproject-api==1.9.0 - pytest==8.3.5 - pyyaml==6.0.2 - readme-renderer==44.0 - requests==2.32.3 - requests-toolbelt==1.0.0 - rfc3986==2.0.0 - rich==14.0.0 - secretstorage==3.3.3 - tomli==2.2.1 - tox==4.25.0 - twine==6.1.0 - typing-extensions==4.13.0 - urllib3==2.3.0 - virtualenv==20.29.3 - webargs==8.6.0 - werkzeug==3.1.3 - zipp==3.21.0 prefix: /opt/conda/envs/apispec
[ "tests/test_core.py::TestPath::test_add_path_accepts_path", "tests/test_core.py::TestPath::test_add_path_strips_path_base_path" ]
[]
[ "tests/test_core.py::TestMetadata::test_swagger_version", "tests/test_core.py::TestMetadata::test_swagger_metadata", "tests/test_core.py::TestDefinitions::test_definition", "tests/test_core.py::TestDefinitions::test_definition_stores_enum", "tests/test_core.py::TestPath::test_add_path", "tests/test_core.py::TestPath::test_add_path_merges_paths", "tests/test_core.py::TestPath::test_add_path_ensures_path_parameters_required", "tests/test_core.py::TestPath::test_add_path_with_no_path_raises_error", "tests/test_core.py::TestPath::test_add_path_strips_base_path", "tests/test_core.py::TestPath::test_add_parameters", "tests/test_core.py::TestExtensions::test_setup_plugin", "tests/test_core.py::TestExtensions::test_setup_plugin_doesnt_exist", "tests/test_core.py::TestExtensions::test_register_definition_helper", "tests/test_core.py::TestExtensions::test_register_path_helper", "tests/test_core.py::TestExtensions::test_multiple_path_helpers_w_different_signatures", "tests/test_core.py::TestExtensions::test_multiple_definition_helpers_w_different_signatures", "tests/test_core.py::TestDefinitionHelpers::test_definition_helpers_are_used", "tests/test_core.py::TestDefinitionHelpers::test_multiple_definition_helpers", "tests/test_core.py::TestPathHelpers::test_path_helper_is_used", "tests/test_core.py::TestResponseHelpers::test_response_helper_is_used" ]
[]
MIT License
424
sympy__sympy-10574
703e7bc01544575cc2bc77a242001667477ef3cb
2016-02-12 03:01:52
0241b35cae5f2adc04dc37b25f7dc9c5f00bd746
diff --git a/sympy/core/basic.py b/sympy/core/basic.py index 9699c354db..3a4a6af4f7 100644 --- a/sympy/core/basic.py +++ b/sympy/core/basic.py @@ -849,15 +849,20 @@ def subs(self, *args, **kwargs): sequence = list(sequence) for i in range(len(sequence)): - o, n = sequence[i] - so, sn = sympify(o), sympify(n) - if not isinstance(so, Basic): - if type(o) is str: - so = Symbol(o) - sequence[i] = (so, sn) - if _aresame(so, sn): - sequence[i] = None - continue + s = list(sequence[i]) + for j, si in enumerate(s): + try: + si = sympify(si, strict=True) + except SympifyError: + if type(si) is str: + si = Symbol(si) + else: + # if it can't be sympified, skip it + sequence[i] = None + break + s[j] = si + else: + sequence[i] = None if _aresame(*s) else tuple(s) sequence = list(filter(None, sequence)) if unordered: diff --git a/sympy/physics/vector/vector.py b/sympy/physics/vector/vector.py index e835c3999f..93dc3d24c3 100644 --- a/sympy/physics/vector/vector.py +++ b/sympy/physics/vector/vector.py @@ -128,7 +128,10 @@ def __eq__(self, other): if other == 0: other = Vector(0) - other = _check_vector(other) + try: + other = _check_vector(other) + except TypeError: + return False if (self.args == []) and (other.args == []): return True elif (self.args == []) or (other.args == []):
Symbol subs Vector exception ``` A = ReferenceFrame('A') v = A.x s, t = symbols('s t') ``` ```(s / t)``` output: s / t ```(v / t)``` output: 1/t n̂x ```(s / t).subs(s, v)``` output: ``` anaconda3/lib/python3.4/site-packages/sympy/parsing/sympy_parser.py in eval_expr(code, local_dict, global_dict) 689 """ 690 expr = eval( --> 691 code, global_dict, local_dict) # take local objects in preference 692 693 return expr <string> in <module>() AttributeError: 'function' object has no attribute 'x' ```
sympy/sympy
diff --git a/sympy/core/tests/test_subs.py b/sympy/core/tests/test_subs.py index 73238f0289..d25846d850 100644 --- a/sympy/core/tests/test_subs.py +++ b/sympy/core/tests/test_subs.py @@ -678,3 +678,15 @@ def test_RootOf_issue_10092(): eq = x**3 - 17*x**2 + 81*x - 118 r = RootOf(eq, 0) assert (x < r).subs(x, r) is S.false + + +def test_issue_8886(): + from sympy.physics.mechanics import ReferenceFrame as R + from sympy.abc import x + # if something can't be sympified we assume that it + # doesn't play well with SymPy and disallow the + # substitution + v = R('A').x + assert x.subs(x, v) == x + assert v.subs(v, x) == v + assert v.__eq__(x) is False
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 0, "test_score": 2 }, "num_modified_files": 2 }
0.7
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "pytest" ], "pre_install": null, "python": "3.6", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work certifi==2021.5.30 importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work mpmath==1.3.0 packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work py @ file:///opt/conda/conda-bld/py_1644396412707/work pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work pytest==6.2.4 -e git+https://github.com/sympy/sympy.git@703e7bc01544575cc2bc77a242001667477ef3cb#egg=sympy toml @ file:///tmp/build/80754af9/toml_1616166611790/work typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
name: sympy channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - attrs=21.4.0=pyhd3eb1b0_0 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - importlib-metadata=4.8.1=py36h06a4308_0 - importlib_metadata=4.8.1=hd3eb1b0_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - more-itertools=8.12.0=pyhd3eb1b0_0 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - packaging=21.3=pyhd3eb1b0_0 - pip=21.2.2=py36h06a4308_0 - pluggy=0.13.1=py36h06a4308_0 - py=1.11.0=pyhd3eb1b0_0 - pyparsing=3.0.4=pyhd3eb1b0_0 - pytest=6.2.4=py36h06a4308_2 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - toml=0.10.2=pyhd3eb1b0_0 - typing_extensions=4.1.1=pyh06a4308_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zipp=3.6.0=pyhd3eb1b0_0 - zlib=1.2.13=h5eee18b_1 - pip: - mpmath==1.3.0 prefix: /opt/conda/envs/sympy
[ "sympy/core/tests/test_subs.py::test_issue_8886" ]
[]
[ "sympy/core/tests/test_subs.py::test_subs", "sympy/core/tests/test_subs.py::test_subs_AccumBounds", "sympy/core/tests/test_subs.py::test_trigonometric", "sympy/core/tests/test_subs.py::test_powers", "sympy/core/tests/test_subs.py::test_logexppow", "sympy/core/tests/test_subs.py::test_bug", "sympy/core/tests/test_subs.py::test_subbug1", "sympy/core/tests/test_subs.py::test_subbug2", "sympy/core/tests/test_subs.py::test_dict_set", "sympy/core/tests/test_subs.py::test_dict_ambigous", "sympy/core/tests/test_subs.py::test_deriv_sub_bug3", "sympy/core/tests/test_subs.py::test_equality_subs1", "sympy/core/tests/test_subs.py::test_equality_subs2", "sympy/core/tests/test_subs.py::test_issue_3742", "sympy/core/tests/test_subs.py::test_subs_dict1", "sympy/core/tests/test_subs.py::test_mul", "sympy/core/tests/test_subs.py::test_subs_simple", "sympy/core/tests/test_subs.py::test_subs_constants", "sympy/core/tests/test_subs.py::test_subs_commutative", "sympy/core/tests/test_subs.py::test_subs_noncommutative", "sympy/core/tests/test_subs.py::test_subs_basic_funcs", "sympy/core/tests/test_subs.py::test_subs_wild", "sympy/core/tests/test_subs.py::test_subs_mixed", "sympy/core/tests/test_subs.py::test_division", "sympy/core/tests/test_subs.py::test_add", "sympy/core/tests/test_subs.py::test_subs_issue_4009", "sympy/core/tests/test_subs.py::test_functions_subs", "sympy/core/tests/test_subs.py::test_derivative_subs", "sympy/core/tests/test_subs.py::test_derivative_subs2", "sympy/core/tests/test_subs.py::test_derivative_subs3", "sympy/core/tests/test_subs.py::test_issue_5284", "sympy/core/tests/test_subs.py::test_subs_iter", "sympy/core/tests/test_subs.py::test_subs_dict", "sympy/core/tests/test_subs.py::test_no_arith_subs_on_floats", "sympy/core/tests/test_subs.py::test_issue_5651", "sympy/core/tests/test_subs.py::test_issue_6075", "sympy/core/tests/test_subs.py::test_issue_6079", "sympy/core/tests/test_subs.py::test_issue_4680", "sympy/core/tests/test_subs.py::test_issue_6158", "sympy/core/tests/test_subs.py::test_Function_subs", "sympy/core/tests/test_subs.py::test_simultaneous_subs", "sympy/core/tests/test_subs.py::test_issue_6419_6421", "sympy/core/tests/test_subs.py::test_issue_6559", "sympy/core/tests/test_subs.py::test_issue_5261", "sympy/core/tests/test_subs.py::test_issue_6923", "sympy/core/tests/test_subs.py::test_2arg_hack", "sympy/core/tests/test_subs.py::test_noncommutative_subs", "sympy/core/tests/test_subs.py::test_issue_2877", "sympy/core/tests/test_subs.py::test_issue_5910", "sympy/core/tests/test_subs.py::test_issue_5217", "sympy/core/tests/test_subs.py::test_pow_eval_subs_no_cache", "sympy/core/tests/test_subs.py::test_RootOf_issue_10092" ]
[]
BSD
425
scieloorg__xylose-88
09b42b365b375904f5d7102277e3f4e4a3d59e7f
2016-02-12 14:15:41
09b42b365b375904f5d7102277e3f4e4a3d59e7f
diff --git a/setup.py b/setup.py index 7516cbd..d1e1950 100755 --- a/setup.py +++ b/setup.py @@ -7,7 +7,7 @@ except ImportError: setup( name="xylose", - version='0.42', + version='0.43', description="A SciELO library to abstract a JSON data structure that is a product of the ISIS2JSON conversion using the ISIS2JSON type 3 data model.", author="SciELO", author_email="[email protected]", diff --git a/xylose/scielodocument.py b/xylose/scielodocument.py index 03c2510..9b4cf43 100644 --- a/xylose/scielodocument.py +++ b/xylose/scielodocument.py @@ -195,10 +195,8 @@ class Journal(object): This method retrieves the original language of the given article. This method deals with the legacy fields (v400). """ - if not 'v400' in self.data: - return None - return self.data['v400'][0]['_'] + return self.data.get('v400', [{'_': None}])[0]['_'] def url(self, language='en'): """ @@ -235,15 +233,6 @@ class Journal(object): if 'v854' in self.data: return [area['_'] for area in self.data['v854']] - @property - def abbreviated_title(self): - """ - This method retrieves the journal abbreviated title of the given article, if it exists. - This method deals with the legacy fields (150). - """ - if 'v150' in self.data: - return self.data['v150'][0]['_'] - @property def wos_citation_indexes(self): """ @@ -273,8 +262,7 @@ class Journal(object): This method deals with the legacy fields (480). """ - if 'v480' in self.data: - return self.data['v480'][0]['_'] + return self.data.get('v480', [{'_': None}])[0]['_'] @property def publisher_loc(self): @@ -284,8 +272,7 @@ class Journal(object): This method deals with the legacy fields (490). """ - if 'v490' in self.data: - return self.data['v490'][0]['_'] + return self.data.get('v490', [{'_': None}])[0]['_'] @property def title(self): @@ -295,8 +282,30 @@ class Journal(object): This method deals with the legacy fields (100). """ - if 'v100' in self.data: - return self.data['v100'][0]['_'] + return self.data.get('v100', [{'_': None}])[0]['_'] + + @property + def subtitle(self): + """ + This method retrieves the journal subtitle. + This method deals with the legacy fields (v110). + """ + + return self.data.get('v110', [{'_': None}])[0]['_'] + + @property + def fulltitle(self): + """ + This method retrieves the join of the journal title plus the subtitle. + This method deals with the legacy fields (v100, v110). + """ + + data = [] + + data.append(self.title) + data.append(self.subtitle) + + return ' - '.join([i for i in data if i]) @property def title_nlm(self): @@ -306,8 +315,25 @@ class Journal(object): This method deals with the legacy fields (421). """ - if 'v421' in self.data: - return self.data['v421'][0]['_'] + return self.data.get('v421', [{'_': None}])[0]['_'] + + @property + def abbreviated_title(self): + """ + This method retrieves the journal abbreviated title of the given article, if it exists. + This method deals with the legacy fields (150). + """ + + return self.data.get('v150', [{'_': None}])[0]['_'] + + @property + def abbreviated_iso_title(self): + """ + This method retrieves the journal abbreviated title of the given article, if it exists. + This method deals with the legacy fields (151). + """ + + return self.data.get('v151', [{'_': None}])[0]['_'] @property def acronym(self): @@ -317,8 +343,7 @@ class Journal(object): This method deals with the legacy fields (68). """ - if 'v68' in self.data: - return self.data['v68'][0]['_'].lower() + return self.data.get('v68', [{'_': None}])[0]['_'] @property def periodicity(self): @@ -401,6 +426,7 @@ class Journal(object): return tools.get_date(self.data['v941'][0]['_']) + class Article(object): def __init__(self, data, iso_format=None):
Incluir metodo para recuperar subtitulo do periódico v110 Incluir metodo para recuperar subtitulo do periódico v110 (subtitle) Incluir metodo para recuperar titulo do periódico concatenado com subtitulo do periódico v110 (full_title)
scieloorg/xylose
diff --git a/tests/test_document.py b/tests/test_document.py index 6ade9eb..7405ad9 100644 --- a/tests/test_document.py +++ b/tests/test_document.py @@ -623,11 +623,47 @@ class JournalTests(unittest.TestCase): def test_journal_title_nlm(self): self.fulldoc['title']['v421'] = [{u'_': u'Acta Limnologica Brasiliensia NLM'}] - + journal = Journal(self.fulldoc['title']) self.assertEqual(journal.title_nlm, u'Acta Limnologica Brasiliensia NLM') + def test_journal_fulltitle(self): + self.fulldoc['title']['v100'] = [{u'_': u'Title'}] + self.fulldoc['title']['v110'] = [{u'_': u'SubTitle'}] + + journal = Journal(self.fulldoc['title']) + + self.assertEqual(journal.fulltitle, u'Title - SubTitle') + + def test_journal_fulltitle_without_title(self): + del(self.fulldoc['title']['v100']) + self.fulldoc['title']['v110'] = [{u'_': u'SubTitle'}] + + journal = Journal(self.fulldoc['title']) + + self.assertEqual(journal.fulltitle, u'SubTitle') + + def test_journal_fulltitle_without_subtitle(self): + self.fulldoc['title']['v100'] = [{u'_': u'Title'}] + + journal = Journal(self.fulldoc['title']) + + self.assertEqual(journal.fulltitle, u'Title') + + def test_journal_subtitle(self): + self.fulldoc['title']['v110'] = [{u'_': u'SubTitle'}] + + journal = Journal(self.fulldoc['title']) + + self.assertEqual(journal.subtitle, u'SubTitle') + + def test_journal_without_subtitle(self): + + journal = Journal(self.fulldoc['title']) + + self.assertEqual(journal.subtitle, None) + def test_without_journal_title_nlm(self): journal = self.journal
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 3, "test_score": 0 }, "num_modified_files": 2 }
0.42
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "pytest-cov" ], "pre_install": [ "apt-get update", "apt-get install -y gcc pandoc" ], "python": "3.9", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
coverage==7.8.0 exceptiongroup==1.2.2 iniconfig==2.1.0 packaging==24.2 pluggy==1.5.0 pytest==8.3.5 pytest-cov==6.0.0 tomli==2.2.1 -e git+https://github.com/scieloorg/xylose.git@09b42b365b375904f5d7102277e3f4e4a3d59e7f#egg=xylose
name: xylose channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - coverage==7.8.0 - exceptiongroup==1.2.2 - iniconfig==2.1.0 - packaging==24.2 - pluggy==1.5.0 - pytest==8.3.5 - pytest-cov==6.0.0 - tomli==2.2.1 prefix: /opt/conda/envs/xylose
[ "tests/test_document.py::JournalTests::test_journal_fulltitle", "tests/test_document.py::JournalTests::test_journal_fulltitle_without_subtitle", "tests/test_document.py::JournalTests::test_journal_fulltitle_without_title", "tests/test_document.py::JournalTests::test_journal_subtitle", "tests/test_document.py::JournalTests::test_journal_without_subtitle" ]
[]
[ "tests/test_document.py::ToolsTests::test_get_date_wrong_day", "tests/test_document.py::ToolsTests::test_get_date_wrong_day_month", "tests/test_document.py::ToolsTests::test_get_date_wrong_day_month_not_int", "tests/test_document.py::ToolsTests::test_get_date_wrong_day_not_int", "tests/test_document.py::ToolsTests::test_get_date_wrong_month_not_int", "tests/test_document.py::ToolsTests::test_get_date_year", "tests/test_document.py::ToolsTests::test_get_date_year_day", "tests/test_document.py::ToolsTests::test_get_date_year_month", "tests/test_document.py::ToolsTests::test_get_date_year_month_day", "tests/test_document.py::ToolsTests::test_get_date_year_month_day_31", "tests/test_document.py::ToolsTests::test_get_language_iso639_1_defined", "tests/test_document.py::ToolsTests::test_get_language_iso639_1_undefined", "tests/test_document.py::ToolsTests::test_get_language_iso639_2_defined", "tests/test_document.py::ToolsTests::test_get_language_iso639_2_undefined", "tests/test_document.py::ToolsTests::test_get_language_without_iso_format", "tests/test_document.py::JournalTests::test_any_issn_priority_electronic", "tests/test_document.py::JournalTests::test_any_issn_priority_electronic_without_electronic", "tests/test_document.py::JournalTests::test_any_issn_priority_print", "tests/test_document.py::JournalTests::test_any_issn_priority_print_without_print", "tests/test_document.py::JournalTests::test_collection_acronym", "tests/test_document.py::JournalTests::test_creation_date", "tests/test_document.py::JournalTests::test_current_status", "tests/test_document.py::JournalTests::test_current_status_lots_of_changes_study_case_1", "tests/test_document.py::JournalTests::test_current_status_some_changes", "tests/test_document.py::JournalTests::test_current_without_v51", "tests/test_document.py::JournalTests::test_journal", "tests/test_document.py::JournalTests::test_journal_abbreviated_title", "tests/test_document.py::JournalTests::test_journal_acronym", "tests/test_document.py::JournalTests::test_journal_title", "tests/test_document.py::JournalTests::test_journal_title_nlm", "tests/test_document.py::JournalTests::test_journal_url", "tests/test_document.py::JournalTests::test_languages", "tests/test_document.py::JournalTests::test_languages_without_v350", "tests/test_document.py::JournalTests::test_load_issn_with_v935_and_v35_ONLINE", "tests/test_document.py::JournalTests::test_load_issn_with_v935_and_v35_PRINT", "tests/test_document.py::JournalTests::test_load_issn_with_v935_equal_v400_and_v35_ONLINE", "tests/test_document.py::JournalTests::test_load_issn_with_v935_equal_v400_and_v35_PRINT", "tests/test_document.py::JournalTests::test_load_issn_with_v935_without_v35", "tests/test_document.py::JournalTests::test_load_issn_without_v935_and_v35_ONLINE", "tests/test_document.py::JournalTests::test_load_issn_without_v935_and_v35_PRINT", "tests/test_document.py::JournalTests::test_load_issn_without_v935_without_v35", "tests/test_document.py::JournalTests::test_periodicity", "tests/test_document.py::JournalTests::test_periodicity_out_of_choices", "tests/test_document.py::JournalTests::test_permission_id", "tests/test_document.py::JournalTests::test_permission_t0", "tests/test_document.py::JournalTests::test_permission_t1", "tests/test_document.py::JournalTests::test_permission_t2", "tests/test_document.py::JournalTests::test_permission_t3", "tests/test_document.py::JournalTests::test_permission_t4", "tests/test_document.py::JournalTests::test_permission_text", "tests/test_document.py::JournalTests::test_permission_url", "tests/test_document.py::JournalTests::test_permission_without_v540", "tests/test_document.py::JournalTests::test_permission_without_v540_t", "tests/test_document.py::JournalTests::test_publisher_loc", "tests/test_document.py::JournalTests::test_publisher_name", "tests/test_document.py::JournalTests::test_scielo_issn", "tests/test_document.py::JournalTests::test_status", "tests/test_document.py::JournalTests::test_status_lots_of_changes", "tests/test_document.py::JournalTests::test_status_lots_of_changes_study_case_1", "tests/test_document.py::JournalTests::test_status_lots_of_changes_with_reason", "tests/test_document.py::JournalTests::test_status_some_changes", "tests/test_document.py::JournalTests::test_status_without_v51", "tests/test_document.py::JournalTests::test_subject_areas", "tests/test_document.py::JournalTests::test_update_date", "tests/test_document.py::JournalTests::test_without_journal_abbreviated_title", "tests/test_document.py::JournalTests::test_without_journal_acronym", "tests/test_document.py::JournalTests::test_without_journal_title", "tests/test_document.py::JournalTests::test_without_journal_title_nlm", "tests/test_document.py::JournalTests::test_without_journal_url", "tests/test_document.py::JournalTests::test_without_periodicity", "tests/test_document.py::JournalTests::test_without_publisher_loc", "tests/test_document.py::JournalTests::test_without_publisher_name", "tests/test_document.py::JournalTests::test_without_scielo_domain", "tests/test_document.py::JournalTests::test_without_scielo_domain_title_v690", "tests/test_document.py::JournalTests::test_without_subject_areas", "tests/test_document.py::JournalTests::test_without_wos_citation_indexes", "tests/test_document.py::JournalTests::test_without_wos_subject_areas", "tests/test_document.py::JournalTests::test_wos_citation_indexes", "tests/test_document.py::JournalTests::test_wos_subject_areas", "tests/test_document.py::ArticleTests::test_acceptance_date", "tests/test_document.py::ArticleTests::test_affiliation_just_with_affiliation_name", "tests/test_document.py::ArticleTests::test_affiliation_without_affiliation_name", "tests/test_document.py::ArticleTests::test_affiliations", "tests/test_document.py::ArticleTests::test_ahead_publication_date", "tests/test_document.py::ArticleTests::test_article", "tests/test_document.py::ArticleTests::test_author_with_two_affiliations", "tests/test_document.py::ArticleTests::test_author_with_two_role", "tests/test_document.py::ArticleTests::test_author_without_affiliations", "tests/test_document.py::ArticleTests::test_author_without_surname_and_given_names", "tests/test_document.py::ArticleTests::test_authors", "tests/test_document.py::ArticleTests::test_collection_acronym", "tests/test_document.py::ArticleTests::test_collection_acronym_priorizing_collection", "tests/test_document.py::ArticleTests::test_collection_acronym_retrieving_v992", "tests/test_document.py::ArticleTests::test_collection_name_brazil", "tests/test_document.py::ArticleTests::test_collection_name_undefined", "tests/test_document.py::ArticleTests::test_corporative_authors", "tests/test_document.py::ArticleTests::test_creation_date", "tests/test_document.py::ArticleTests::test_creation_date_1", "tests/test_document.py::ArticleTests::test_creation_date_2", "tests/test_document.py::ArticleTests::test_document_type", "tests/test_document.py::ArticleTests::test_doi", "tests/test_document.py::ArticleTests::test_doi_clean_1", "tests/test_document.py::ArticleTests::test_doi_clean_2", "tests/test_document.py::ArticleTests::test_doi_v237", "tests/test_document.py::ArticleTests::test_e_location", "tests/test_document.py::ArticleTests::test_end_page_loaded_crazy_legacy_way_1", "tests/test_document.py::ArticleTests::test_end_page_loaded_crazy_legacy_way_2", "tests/test_document.py::ArticleTests::test_end_page_loaded_through_xml", "tests/test_document.py::ArticleTests::test_file_code", "tests/test_document.py::ArticleTests::test_file_code_crazy_slashs_1", "tests/test_document.py::ArticleTests::test_file_code_crazy_slashs_2", "tests/test_document.py::ArticleTests::test_first_author", "tests/test_document.py::ArticleTests::test_first_author_without_author", "tests/test_document.py::ArticleTests::test_fulltexts_field_fulltexts", "tests/test_document.py::ArticleTests::test_fulltexts_without_field_fulltexts", "tests/test_document.py::ArticleTests::test_html_url", "tests/test_document.py::ArticleTests::test_invalid_document_type", "tests/test_document.py::ArticleTests::test_is_ahead", "tests/test_document.py::ArticleTests::test_issue", "tests/test_document.py::ArticleTests::test_issue_label_field_v4", "tests/test_document.py::ArticleTests::test_issue_label_without_field_v4", "tests/test_document.py::ArticleTests::test_issue_url", "tests/test_document.py::ArticleTests::test_journal_abbreviated_title", "tests/test_document.py::ArticleTests::test_journal_acronym", "tests/test_document.py::ArticleTests::test_journal_title", "tests/test_document.py::ArticleTests::test_keywords", "tests/test_document.py::ArticleTests::test_keywords_iso639_2", "tests/test_document.py::ArticleTests::test_keywords_with_undefined_language", "tests/test_document.py::ArticleTests::test_keywords_without_subfield_k", "tests/test_document.py::ArticleTests::test_keywords_without_subfield_l", "tests/test_document.py::ArticleTests::test_languages_field_fulltexts", "tests/test_document.py::ArticleTests::test_languages_field_v40", "tests/test_document.py::ArticleTests::test_last_page", "tests/test_document.py::ArticleTests::test_mixed_affiliations_1", "tests/test_document.py::ArticleTests::test_normalized_affiliations", "tests/test_document.py::ArticleTests::test_normalized_affiliations_undefined_ISO_3166_CODE", "tests/test_document.py::ArticleTests::test_normalized_affiliations_without_p", "tests/test_document.py::ArticleTests::test_order", "tests/test_document.py::ArticleTests::test_original_abstract_with_just_one_language_defined", "tests/test_document.py::ArticleTests::test_original_abstract_with_language_defined", "tests/test_document.py::ArticleTests::test_original_abstract_with_language_defined_but_different_of_the_article_original_language", "tests/test_document.py::ArticleTests::test_original_abstract_without_language_defined", "tests/test_document.py::ArticleTests::test_original_html_field_body", "tests/test_document.py::ArticleTests::test_original_language_invalid_iso639_2", "tests/test_document.py::ArticleTests::test_original_language_iso639_2", "tests/test_document.py::ArticleTests::test_original_language_original", "tests/test_document.py::ArticleTests::test_original_section_field_v49", "tests/test_document.py::ArticleTests::test_original_title_subfield_t", "tests/test_document.py::ArticleTests::test_original_title_with_just_one_language_defined", "tests/test_document.py::ArticleTests::test_original_title_with_language_defined", "tests/test_document.py::ArticleTests::test_original_title_with_language_defined_but_different_of_the_article_original_language", "tests/test_document.py::ArticleTests::test_original_title_without_language_defined", "tests/test_document.py::ArticleTests::test_pdf_url", "tests/test_document.py::ArticleTests::test_processing_date", "tests/test_document.py::ArticleTests::test_processing_date_1", "tests/test_document.py::ArticleTests::test_project_name", "tests/test_document.py::ArticleTests::test_project_sponsors", "tests/test_document.py::ArticleTests::test_publication_contract", "tests/test_document.py::ArticleTests::test_publication_date", "tests/test_document.py::ArticleTests::test_publisher_id", "tests/test_document.py::ArticleTests::test_publisher_loc", "tests/test_document.py::ArticleTests::test_publisher_name", "tests/test_document.py::ArticleTests::test_receive_date", "tests/test_document.py::ArticleTests::test_review_date", "tests/test_document.py::ArticleTests::test_secion_code_field_v49", "tests/test_document.py::ArticleTests::test_section_code_nd_field_v49", "tests/test_document.py::ArticleTests::test_section_code_without_field_v49", "tests/test_document.py::ArticleTests::test_section_field_v49", "tests/test_document.py::ArticleTests::test_section_nd_field_v49", "tests/test_document.py::ArticleTests::test_section_without_field_v49", "tests/test_document.py::ArticleTests::test_start_page", "tests/test_document.py::ArticleTests::test_start_page_loaded_crazy_legacy_way_1", "tests/test_document.py::ArticleTests::test_start_page_loaded_crazy_legacy_way_2", "tests/test_document.py::ArticleTests::test_start_page_loaded_through_xml", "tests/test_document.py::ArticleTests::test_subject_areas", "tests/test_document.py::ArticleTests::test_supplement_issue", "tests/test_document.py::ArticleTests::test_supplement_volume", "tests/test_document.py::ArticleTests::test_thesis_degree", "tests/test_document.py::ArticleTests::test_thesis_organization", "tests/test_document.py::ArticleTests::test_thesis_organization_and_division", "tests/test_document.py::ArticleTests::test_thesis_organization_without_name", "tests/test_document.py::ArticleTests::test_translated_abstracts", "tests/test_document.py::ArticleTests::test_translated_abstracts_without_v83", "tests/test_document.py::ArticleTests::test_translated_abtracts_iso639_2", "tests/test_document.py::ArticleTests::test_translated_htmls_field_body", "tests/test_document.py::ArticleTests::test_translated_section_field_v49", "tests/test_document.py::ArticleTests::test_translated_titles", "tests/test_document.py::ArticleTests::test_translated_titles_iso639_2", "tests/test_document.py::ArticleTests::test_translated_titles_without_v12", "tests/test_document.py::ArticleTests::test_update_date", "tests/test_document.py::ArticleTests::test_update_date_1", "tests/test_document.py::ArticleTests::test_update_date_2", "tests/test_document.py::ArticleTests::test_update_date_3", "tests/test_document.py::ArticleTests::test_volume", "tests/test_document.py::ArticleTests::test_whitwout_acceptance_date", "tests/test_document.py::ArticleTests::test_whitwout_ahead_publication_date", "tests/test_document.py::ArticleTests::test_whitwout_receive_date", "tests/test_document.py::ArticleTests::test_whitwout_review_date", "tests/test_document.py::ArticleTests::test_without_affiliations", "tests/test_document.py::ArticleTests::test_without_authors", "tests/test_document.py::ArticleTests::test_without_citations", "tests/test_document.py::ArticleTests::test_without_collection_acronym", "tests/test_document.py::ArticleTests::test_without_corporative_authors", "tests/test_document.py::ArticleTests::test_without_document_type", "tests/test_document.py::ArticleTests::test_without_doi", "tests/test_document.py::ArticleTests::test_without_e_location", "tests/test_document.py::ArticleTests::test_without_html_url", "tests/test_document.py::ArticleTests::test_without_issue", "tests/test_document.py::ArticleTests::test_without_issue_url", "tests/test_document.py::ArticleTests::test_without_journal_abbreviated_title", "tests/test_document.py::ArticleTests::test_without_journal_acronym", "tests/test_document.py::ArticleTests::test_without_journal_title", "tests/test_document.py::ArticleTests::test_without_keywords", "tests/test_document.py::ArticleTests::test_without_last_page", "tests/test_document.py::ArticleTests::test_without_normalized_affiliations", "tests/test_document.py::ArticleTests::test_without_order", "tests/test_document.py::ArticleTests::test_without_original_abstract", "tests/test_document.py::ArticleTests::test_without_original_title", "tests/test_document.py::ArticleTests::test_without_pages", "tests/test_document.py::ArticleTests::test_without_pdf_url", "tests/test_document.py::ArticleTests::test_without_processing_date", "tests/test_document.py::ArticleTests::test_without_project_name", "tests/test_document.py::ArticleTests::test_without_project_sponsor", "tests/test_document.py::ArticleTests::test_without_publication_contract", "tests/test_document.py::ArticleTests::test_without_publication_date", "tests/test_document.py::ArticleTests::test_without_publisher_id", "tests/test_document.py::ArticleTests::test_without_publisher_loc", "tests/test_document.py::ArticleTests::test_without_publisher_name", "tests/test_document.py::ArticleTests::test_without_scielo_domain", "tests/test_document.py::ArticleTests::test_without_scielo_domain_article_v69", "tests/test_document.py::ArticleTests::test_without_scielo_domain_article_v69_and_with_title_v690", "tests/test_document.py::ArticleTests::test_without_scielo_domain_title_v690", "tests/test_document.py::ArticleTests::test_without_start_page", "tests/test_document.py::ArticleTests::test_without_subject_areas", "tests/test_document.py::ArticleTests::test_without_suplement_issue", "tests/test_document.py::ArticleTests::test_without_supplement_volume", "tests/test_document.py::ArticleTests::test_without_thesis_degree", "tests/test_document.py::ArticleTests::test_without_thesis_organization", "tests/test_document.py::ArticleTests::test_without_volume", "tests/test_document.py::ArticleTests::test_without_wos_citation_indexes", "tests/test_document.py::ArticleTests::test_without_wos_subject_areas", "tests/test_document.py::ArticleTests::test_wos_citation_indexes", "tests/test_document.py::ArticleTests::test_wos_subject_areas", "tests/test_document.py::CitationTest::test_a_link_access_date", "tests/test_document.py::CitationTest::test_analytic_institution_for_a_article_citation", "tests/test_document.py::CitationTest::test_analytic_institution_for_a_book_citation", "tests/test_document.py::CitationTest::test_article_title", "tests/test_document.py::CitationTest::test_article_without_title", "tests/test_document.py::CitationTest::test_authors_article", "tests/test_document.py::CitationTest::test_authors_book", "tests/test_document.py::CitationTest::test_authors_link", "tests/test_document.py::CitationTest::test_authors_thesis", "tests/test_document.py::CitationTest::test_book_chapter_title", "tests/test_document.py::CitationTest::test_book_edition", "tests/test_document.py::CitationTest::test_book_volume", "tests/test_document.py::CitationTest::test_book_without_chapter_title", "tests/test_document.py::CitationTest::test_citation_sample_congress", "tests/test_document.py::CitationTest::test_citation_sample_link", "tests/test_document.py::CitationTest::test_citation_sample_link_without_comment", "tests/test_document.py::CitationTest::test_conference_edition", "tests/test_document.py::CitationTest::test_conference_name", "tests/test_document.py::CitationTest::test_conference_sponsor", "tests/test_document.py::CitationTest::test_conference_without_name", "tests/test_document.py::CitationTest::test_conference_without_sponsor", "tests/test_document.py::CitationTest::test_date", "tests/test_document.py::CitationTest::test_doi", "tests/test_document.py::CitationTest::test_editor", "tests/test_document.py::CitationTest::test_elocation_14", "tests/test_document.py::CitationTest::test_elocation_514", "tests/test_document.py::CitationTest::test_end_page_14", "tests/test_document.py::CitationTest::test_end_page_514", "tests/test_document.py::CitationTest::test_end_page_withdout_data", "tests/test_document.py::CitationTest::test_first_author_article", "tests/test_document.py::CitationTest::test_first_author_book", "tests/test_document.py::CitationTest::test_first_author_link", "tests/test_document.py::CitationTest::test_first_author_thesis", "tests/test_document.py::CitationTest::test_first_author_without_monographic_authors", "tests/test_document.py::CitationTest::test_first_author_without_monographic_authors_but_not_a_book_citation", "tests/test_document.py::CitationTest::test_index_number", "tests/test_document.py::CitationTest::test_institutions_all_fields", "tests/test_document.py::CitationTest::test_institutions_v11", "tests/test_document.py::CitationTest::test_institutions_v17", "tests/test_document.py::CitationTest::test_institutions_v29", "tests/test_document.py::CitationTest::test_institutions_v50", "tests/test_document.py::CitationTest::test_institutions_v58", "tests/test_document.py::CitationTest::test_invalid_edition", "tests/test_document.py::CitationTest::test_isbn", "tests/test_document.py::CitationTest::test_isbn_but_not_a_book", "tests/test_document.py::CitationTest::test_issn", "tests/test_document.py::CitationTest::test_issn_but_not_an_article", "tests/test_document.py::CitationTest::test_issue_part", "tests/test_document.py::CitationTest::test_issue_title", "tests/test_document.py::CitationTest::test_journal_issue", "tests/test_document.py::CitationTest::test_journal_volume", "tests/test_document.py::CitationTest::test_link", "tests/test_document.py::CitationTest::test_link_title", "tests/test_document.py::CitationTest::test_link_without_title", "tests/test_document.py::CitationTest::test_monographic_authors", "tests/test_document.py::CitationTest::test_monographic_first_author", "tests/test_document.py::CitationTest::test_pages_14", "tests/test_document.py::CitationTest::test_pages_514", "tests/test_document.py::CitationTest::test_pages_withdout_data", "tests/test_document.py::CitationTest::test_publication_type_article", "tests/test_document.py::CitationTest::test_publication_type_book", "tests/test_document.py::CitationTest::test_publication_type_conference", "tests/test_document.py::CitationTest::test_publication_type_link", "tests/test_document.py::CitationTest::test_publication_type_thesis", "tests/test_document.py::CitationTest::test_publication_type_undefined", "tests/test_document.py::CitationTest::test_publisher", "tests/test_document.py::CitationTest::test_publisher_address", "tests/test_document.py::CitationTest::test_publisher_address_without_e", "tests/test_document.py::CitationTest::test_series_book", "tests/test_document.py::CitationTest::test_series_but_neither_journal_book_or_conference_citation", "tests/test_document.py::CitationTest::test_series_conference", "tests/test_document.py::CitationTest::test_series_journal", "tests/test_document.py::CitationTest::test_source_book_title", "tests/test_document.py::CitationTest::test_source_journal", "tests/test_document.py::CitationTest::test_source_journal_without_journal_title", "tests/test_document.py::CitationTest::test_sponsor", "tests/test_document.py::CitationTest::test_start_page_14", "tests/test_document.py::CitationTest::test_start_page_514", "tests/test_document.py::CitationTest::test_start_page_withdout_data", "tests/test_document.py::CitationTest::test_thesis_institution", "tests/test_document.py::CitationTest::test_thesis_title", "tests/test_document.py::CitationTest::test_thesis_without_title", "tests/test_document.py::CitationTest::test_title_when_article_citation", "tests/test_document.py::CitationTest::test_title_when_conference_citation", "tests/test_document.py::CitationTest::test_title_when_link_citation", "tests/test_document.py::CitationTest::test_title_when_thesis_citation", "tests/test_document.py::CitationTest::test_with_volume_but_not_a_journal_article_neither_a_book", "tests/test_document.py::CitationTest::test_without_analytic_institution", "tests/test_document.py::CitationTest::test_without_authors", "tests/test_document.py::CitationTest::test_without_date", "tests/test_document.py::CitationTest::test_without_doi", "tests/test_document.py::CitationTest::test_without_edition", "tests/test_document.py::CitationTest::test_without_editor", "tests/test_document.py::CitationTest::test_without_first_author", "tests/test_document.py::CitationTest::test_without_index_number", "tests/test_document.py::CitationTest::test_without_institutions", "tests/test_document.py::CitationTest::test_without_issue", "tests/test_document.py::CitationTest::test_without_issue_part", "tests/test_document.py::CitationTest::test_without_issue_title", "tests/test_document.py::CitationTest::test_without_link", "tests/test_document.py::CitationTest::test_without_monographic_authors", "tests/test_document.py::CitationTest::test_without_monographic_authors_but_not_a_book_citation", "tests/test_document.py::CitationTest::test_without_publisher", "tests/test_document.py::CitationTest::test_without_publisher_address", "tests/test_document.py::CitationTest::test_without_series", "tests/test_document.py::CitationTest::test_without_sponsor", "tests/test_document.py::CitationTest::test_without_thesis_institution", "tests/test_document.py::CitationTest::test_without_volume" ]
[]
BSD 2-Clause "Simplified" License
426
dask__dask-986
d82cf2ac3fa3a61912b7934afe7b2fe9e14cc4ff
2016-02-12 18:20:20
6dc9229362f2d3b1dfa466a8a63831c3c832b4be
diff --git a/dask/bag/core.py b/dask/bag/core.py index fcc3119fb..2c4c9115b 100644 --- a/dask/bag/core.py +++ b/dask/bag/core.py @@ -5,11 +5,11 @@ import itertools import math import bz2 import os +import uuid from fnmatch import fnmatchcase from glob import glob from collections import Iterable, Iterator, defaultdict from functools import wraps, partial -from itertools import repeat from ..utils import ignoring @@ -23,16 +23,12 @@ with ignoring(ImportError): from ..base import Base, normalize_token, tokenize from ..compatibility import (apply, BytesIO, unicode, urlopen, urlparse, - StringIO, GzipFile, BZ2File) + GzipFile) from ..core import list2, quote, istask, get_dependencies, reverse_dict from ..multiprocessing import get as mpget from ..optimize import fuse, cull, inline from ..utils import (file_size, infer_compression, open, system_encoding, - takes_multiple_arguments, textblock) - -names = ('bag-%d' % i for i in itertools.count(1)) -tokens = ('-%d' % i for i in itertools.count(1)) -load_names = ('load-%d' % i for i in itertools.count(1)) + takes_multiple_arguments, textblock, funcname) no_default = '__no__default__' @@ -164,7 +160,7 @@ def to_textfiles(b, path, name_function=str, compression='infer', compression = infer_compression(path) return compression - name = next(names) + name = 'to-textfiles-' + uuid.uuid4().hex dsk = dict(((name, i), (write, (b.name, i), path, get_compression(path), encoding)) for i, path in enumerate(paths)) @@ -199,7 +195,7 @@ class Item(Base): return [self.key] def apply(self, func): - name = next(names) + name = 'apply-{0}-{1}'.format(funcname(func), tokenize(self, func)) dsk = {name: (func, self.key)} return Item(merge(self.dask, dsk), name) @@ -254,7 +250,7 @@ class Bag(Base): >>> list(b.map(lambda x: x * 10)) # doctest: +SKIP [0, 10, 20, 30, 40] """ - name = next(names) + name = 'map-{0}-{1}'.format(funcname(func), tokenize(self, func)) if takes_multiple_arguments(func): func = partial(apply, func) dsk = dict(((name, i), (reify, (map, func, (self.name, i)))) @@ -276,7 +272,8 @@ class Bag(Base): >>> list(b.filter(iseven)) # doctest: +SKIP [0, 2, 4] """ - name = next(names) + name = 'filter-{0}-{1}'.format(funcname(predicate), + tokenize(self, predicate)) dsk = dict(((name, i), (reify, (filter, predicate, (self.name, i)))) for i in range(self.npartitions)) return type(self)(merge(self.dask, dsk), name, self.npartitions) @@ -292,7 +289,8 @@ class Bag(Base): >>> list(b.remove(iseven)) # doctest: +SKIP [1, 3] """ - name = next(names) + name = 'remove-{0}-{1}'.format(funcname(predicate), + tokenize(self, predicate)) dsk = dict(((name, i), (reify, (remove, predicate, (self.name, i)))) for i in range(self.npartitions)) return type(self)(merge(self.dask, dsk), name, self.npartitions) @@ -305,7 +303,8 @@ class Bag(Base): >>> b.map_partitions(myfunc) # doctest: +SKIP """ - name = next(names) + name = 'map-partitions-{0}-{1}'.format(funcname(func), + tokenize(self, func)) dsk = dict(((name, i), (func, (self.name, i))) for i in range(self.npartitions)) return type(self)(merge(self.dask, dsk), name, self.npartitions) @@ -320,7 +319,7 @@ class Bag(Base): >>> list(b.pluck('credits').pluck(0)) # doctest: +SKIP [1, 10] """ - name = next(names) + name = 'pluck-' + tokenize(self, key, default) key = quote(key) if default == no_default: dsk = dict(((name, i), (list, (pluck, key, (self.name, i)))) @@ -391,8 +390,10 @@ class Bag(Base): Bag.foldby """ - a = next(names) - b = next(names) + token = tokenize(self, binop, combine, initial) + combine = combine or binop + a = 'foldbinop-{0}-{1}'.format(funcname(binop), token) + b = 'foldcombine-{0}-{1}'.format(funcname(combine), token) initial = quote(initial) if initial is not no_default: dsk = dict(((a, i), (reduce, binop, (self.name, i), initial)) @@ -400,7 +401,7 @@ class Bag(Base): else: dsk = dict(((a, i), (reduce, binop, (self.name, i))) for i in range(self.npartitions)) - dsk2 = {b: (reduce, combine or binop, list(dsk.keys()))} + dsk2 = {b: (reduce, combine, list(dsk.keys()))} return Item(merge(self.dask, dsk, dsk2), b) def frequencies(self, split_every=None): @@ -426,8 +427,9 @@ class Bag(Base): >>> list(b.topk(2, lambda x: -x)) # doctest: +SKIP [3, 4] """ - a = next(names) - b = next(names) + token = tokenize(self, k, key) + a = 'topk-a-' + token + b = 'topk-b-' + token if key: if callable(key) and takes_multiple_arguments(key): key = partial(apply, key) @@ -448,9 +450,10 @@ class Bag(Base): >>> sorted(b.distinct()) ['Alice', 'Bob'] """ - a = next(names) + token = tokenize(self) + a = 'distinct-a-' + token + b = 'distinct-b-' + token dsk = dict(((a, i), (set, key)) for i, key in enumerate(self._keys())) - b = next(names) dsk2 = {(b, 0): (apply, set.union, quote(list(dsk.keys())))} return type(self)(merge(self.dask, dsk, dsk2), b, 1) @@ -482,19 +485,22 @@ class Bag(Base): """ if split_every is None: split_every = 8 - a = next(names) - b = next(names) + token = tokenize(self, perpartition, aggregate, split_every) + a = 'reduction-part-{0}-{1}'.format(funcname(perpartition), token) dsk = dict(((a, i), (perpartition, (self.name, i))) for i in range(self.npartitions)) k = self.npartitions b = a + fmt = 'reduction-agg-{0}-'.format(funcname(aggregate)) + '-{0}-' + token + depth = 0 while k > 1: - c = next(names) + c = fmt.format(depth) dsk2 = dict(((c, i), (aggregate, [(b, j) for j in inds])) for i, inds in enumerate(partition_all(split_every, range(k)))) dsk.update(dsk2) k = len(dsk2) b = c + depth += 1 if out_type is Item: dsk[c] = dsk.pop((c, 0)) @@ -502,7 +508,6 @@ class Bag(Base): else: return Bag(merge(self.dask, dsk), c, 1) - @wraps(sum) def sum(self, split_every=None): return self.reduction(sum, sum, split_every=split_every) @@ -578,7 +583,7 @@ class Bag(Base): assert not isinstance(other, Bag) if on_other is None: on_other = on_self - name = next(names) + name = 'join-' + tokenize(self, other, on_self, on_other) dsk = dict(((name, i), (list, (join, on_other, other, on_self, (self.name, i)))) for i in range(self.npartitions)) @@ -587,7 +592,7 @@ class Bag(Base): def product(self, other): """ Cartesian product between two bags """ assert isinstance(other, Bag) - name = next(names) + name = 'product-' + tokenize(self, other) n, m = self.npartitions, other.npartitions dsk = dict(((name, i*m + j), (list, (itertools.product, (self.name, i), @@ -667,8 +672,9 @@ class Bag(Base): toolz.reduceby pyspark.combineByKey """ - a = next(names) - b = next(names) + token = tokenize(self, key, binop, initial, combine, combine_initial) + a = 'foldby-a-' + token + b = 'foldby-b-' + token if combine is None: combine = binop if initial is not no_default: @@ -705,7 +711,7 @@ class Bag(Base): >>> b.take(3) # doctest: +SKIP (0, 1, 2) """ - name = next(names) + name = 'take-' + tokenize(self, k) dsk = {(name, 0): (list, (take, k, (self.name, 0)))} b = Bag(merge(self.dask, dsk), name, 1) if compute: @@ -726,7 +732,7 @@ class Bag(Base): >>> list(b.concat()) [1, 2, 3] """ - name = next(names) + name = 'concat-' + tokenize(self) dsk = dict(((name, i), (list, (toolz.concat, (self.name, i)))) for i in range(self.npartitions)) return type(self)(merge(self.dask, dsk), name, self.npartitions) @@ -751,22 +757,23 @@ class Bag(Base): """ if npartitions is None: npartitions = self.npartitions + token = tokenize(self, grouper, npartitions, blocksize) import partd - p = ('partd' + next(tokens),) + p = ('partd-' + token,) try: dsk1 = {p: (partd.Python, (partd.Snappy, partd.File()))} except AttributeError: dsk1 = {p: (partd.Python, partd.File())} # Partition data on disk - name = next(names) + name = 'groupby-part-{0}-{1}'.format(funcname(grouper), token) dsk2 = dict(((name, i), (partition, grouper, (self.name, i), npartitions, p, blocksize)) for i in range(self.npartitions)) # Barrier - barrier_token = 'barrier' + next(tokens) + barrier_token = 'groupby-barrier-' + token def barrier(args): return 0 @@ -774,7 +781,7 @@ class Bag(Base): dsk3 = {barrier_token: (barrier, list(dsk2))} # Collect groups - name = next(names) + name = 'groupby-collect-' + token dsk4 = dict(((name, i), (collect, grouper, i, p, barrier_token)) for i in range(npartitions)) @@ -816,7 +823,7 @@ class Bag(Base): columns = sorted(head) elif isinstance(head, (tuple, list)): columns = list(range(len(head))) - name = next(names) + name = 'to_dataframe-' + tokenize(self, columns) DataFrame = partial(pd.DataFrame, columns=columns) dsk = dict(((name, i), (DataFrame, (list2, (self.name, i)))) for i in range(self.npartitions)) @@ -885,7 +892,7 @@ def from_filenames(filenames, chunkbytes=None, compression='infer', full_filenames = [os.path.abspath(f) for f in filenames] - name = 'from-filename' + next(tokens) + name = 'from-filenames-' + uuid.uuid4().hex # Make sure `linesep` is not a byte string because `io.TextIOWrapper` in # python versions other than 2.7 dislike byte strings for the `newline` @@ -992,7 +999,7 @@ def from_s3(bucket_name, paths='*', aws_access_key=None, aws_secret_key=None, get_key = partial(_get_key, bucket_name, conn_args) - name = next(load_names) + name = 'from_s3-' + uuid.uuid4().hex dsk = dict(((name, i), (list, (get_key, k))) for i, k in enumerate(paths)) return Bag(dsk, name, len(paths)) @@ -1035,7 +1042,7 @@ def from_hdfs(path, hdfs=None, host='localhost', port='50070', user_name=None): if not filenames: raise ValueError("No files found for path %s" % path) - name = next(names) + name = 'from_hdfs-' + uuid.uuid4().hex dsk = dict() for i, fn in enumerate(filenames): ext = fn.split('.')[-1] @@ -1106,7 +1113,7 @@ def from_sequence(seq, partition_size=None, npartitions=None): partition_size = int(len(seq) / 100) parts = list(partition_all(partition_size, seq)) - name = next(load_names) + name = 'from_sequence-' + tokenize(seq, partition_size) d = dict(((name, i), part) for i, part in enumerate(parts)) return Bag(d, name, len(d)) @@ -1131,7 +1138,8 @@ def from_castra(x, columns=None, index=False): if columns is None: columns = x.columns - name = 'from-castra-' + next(tokens) + name = 'from-castra-' + tokenize(os.path.getmtime(x.path), x.path, + columns, index) dsk = dict(((name, i), (load_castra_partition, x, part, columns, index)) for i, part in enumerate(x.partitions)) return Bag(dsk, name, len(x.partitions)) @@ -1181,7 +1189,7 @@ def from_url(urls): """ if isinstance(urls, str): urls = [urls] - name = next(load_names) + name = 'from_url-' + uuid.uuid4().hex dsk = {} for i, u in enumerate(urls): dsk[(name, i)] = (list, (urlopen, u)) @@ -1208,7 +1216,7 @@ def concat(bags): >>> list(c) [1, 2, 3, 4, 5, 6] """ - name = next(names) + name = 'concat-' + tokenize(*bags) counter = itertools.count(0) dsk = dict(((name, next(counter)), key) for bag in bags for key in sorted(bag._keys()))
dask.bag does not use hashed keys We should use `tokenize` rather than the current `tokens` within dask.bag cc @jcrist
dask/dask
diff --git a/dask/bag/tests/test_bag.py b/dask/bag/tests/test_bag.py index 6b0273e34..6f46f4434 100644 --- a/dask/bag/tests/test_bag.py +++ b/dask/bag/tests/test_bag.py @@ -59,6 +59,7 @@ def test_map(): expected = merge(dsk, dict(((c.name, i), (reify, (map, inc, (b.name, i)))) for i in range(b.npartitions))) assert c.dask == expected + assert c.name == b.map(inc).name def test_map_function_with_multiple_arguments(): @@ -108,10 +109,14 @@ def test_filter(): (reify, (filter, iseven, (b.name, i)))) for i in range(b.npartitions))) assert c.dask == expected + assert c.name == b.filter(iseven).name def test_remove(): - assert list(b.remove(lambda x: x % 2 == 0)) == [1, 3] * 3 + f = lambda x: x % 2 == 0 + c = b.remove(f) + assert list(c) == [1, 3] * 3 + assert c.name == b.remove(f).name def test_iter(): @@ -126,17 +131,26 @@ def test_pluck(): assert set(b.pluck(0)) == set([1, 2, 3, 4]) assert set(b.pluck(1)) == set([10, 20, 30, 40]) assert set(b.pluck([1, 0])) == set([(10, 1), (20, 2), (30, 3), (40, 4)]) + assert b.pluck([1, 0]).name == b.pluck([1, 0]).name def test_pluck_with_default(): b = db.from_sequence(['Hello', '', 'World']) assert raises(IndexError, lambda: list(b.pluck(0))) assert list(b.pluck(0, None)) == ['H', None, 'W'] + assert b.pluck(0, None).name == b.pluck(0, None).name + assert b.pluck(0).name != b.pluck(0, None).name def test_fold(): - assert b.fold(add).compute() == sum(L) - assert b.fold(add, initial=10).compute() == sum(L) + 10 * b.npartitions + c = b.fold(add) + assert c.compute() == sum(L) + assert c.key == b.fold(add).key + + c2 = b.fold(add, initial=10) + assert c2.key != c.key + assert c2.compute() == sum(L) + 10 * b.npartitions + assert c2.key == b.fold(add, initial=10).key c = db.from_sequence(range(5), npartitions=3) def binop(acc, x): @@ -144,7 +158,9 @@ def test_fold(): acc.add(x) return acc - assert c.fold(binop, set.union, initial=set()).compute() == set(c) + d = c.fold(binop, set.union, initial=set()) + assert d.compute() == set(c) + assert d.key == c.fold(binop, set.union, initial=set()).key d = db.from_sequence('hello') assert set(d.fold(lambda a, b: ''.join([a, b]), initial='').compute()) == set('hello') @@ -156,23 +172,31 @@ def test_fold(): def test_distinct(): assert sorted(b.distinct()) == [0, 1, 2, 3, 4] + assert b.distinct().name == b.distinct().name def test_frequencies(): - assert dict(b.frequencies()) == {0: 3, 1: 3, 2: 3, 3: 3, 4: 3} - assert dict(b.frequencies(split_every=2)) == {0: 3, 1: 3, 2: 3, 3: 3, 4: 3} + c = b.frequencies() + assert dict(c) == {0: 3, 1: 3, 2: 3, 3: 3, 4: 3} + c2 = b.frequencies(split_every=2) + assert dict(c2) == {0: 3, 1: 3, 2: 3, 3: 3, 4: 3} + assert c.name == b.frequencies().name + assert c.name != c2.name + assert c2.name == b.frequencies(split_every=2).name def test_topk(): assert list(b.topk(4)) == [4, 4, 4, 3] assert list(b.topk(4, key=lambda x: -x).compute(get=dask.get)) == \ [0, 0, 0, 1] + assert b.topk(4).name == b.topk(4).name def test_topk_with_non_callable_key(): b = db.from_sequence([(1, 10), (2, 9), (3, 8)], npartitions=2) assert list(b.topk(2, key=1)) == [(1, 10), (2, 9)] assert list(b.topk(2, key=0)) == [(3, 8), (2, 9)] + assert b.topk(2, key=1).name == b.topk(2, key=1).name def test_topk_with_multiarg_lambda(): @@ -183,6 +207,7 @@ def test_topk_with_multiarg_lambda(): def test_lambdas(): assert list(b.map(lambda x: x + 1)) == list(b.map(inc)) + def test_reductions(): assert int(b.count()) == 15 assert int(b.sum()) == 30 @@ -190,6 +215,8 @@ def test_reductions(): assert int(b.min()) == 0 assert int(b.any()) == True assert int(b.all()) == False # some zeros exist + assert b.all().key == b.all().key + assert b.all().key != b.any().key def test_tree_reductions(): @@ -208,30 +235,39 @@ def test_tree_reductions(): assert c.compute() == d.compute() assert len(c.dask) > len(d.dask) + assert c.key != d.key + assert c.key == b.sum(split_every=2).key + assert c.key != b.sum().key + def test_mean(): assert b.mean().compute(get=dask.get) == 2.0 assert float(b.mean()) == 2.0 + def test_std(): assert b.std().compute(get=dask.get) == math.sqrt(2.0) assert float(b.std()) == math.sqrt(2.0) + def test_var(): assert b.var().compute(get=dask.get) == 2.0 assert float(b.var()) == 2.0 def test_join(): - assert list(b.join([1, 2, 3], on_self=isodd, on_other=iseven)) == \ - list(join(iseven, [1, 2, 3], isodd, list(b))) + c = b.join([1, 2, 3], on_self=isodd, on_other=iseven) + assert list(c) == list(join(iseven, [1, 2, 3], isodd, list(b))) assert list(b.join([1, 2, 3], isodd)) == \ list(join(isodd, [1, 2, 3], isodd, list(b))) + assert c.name == b.join([1, 2, 3], on_self=isodd, on_other=iseven).name + def test_foldby(): c = b.foldby(iseven, add, 0, add, 0) assert (reduceby, iseven, add, (b.name, 0), 0) in list(c.dask.values()) assert set(c) == set(reduceby(iseven, lambda acc, x: acc + x, L, 0).items()) + assert c.name == b.foldby(iseven, add, 0, add, 0).name c = b.foldby(iseven, lambda acc, x: acc + x) assert set(c) == set(reduceby(iseven, lambda acc, x: acc + x, L, 0).items()) @@ -239,6 +275,8 @@ def test_foldby(): def test_map_partitions(): assert list(b.map_partitions(len)) == [5, 5, 5] + assert b.map_partitions(len).name == b.map_partitions(len).name + assert b.map_partitions(lambda a: len(a) + 1).name != b.map_partitions(len).name def test_lazify_task(): @@ -296,6 +334,7 @@ def test_map_is_lazy(): from dask.bag.core import map assert isinstance(map(lambda x: x, [1, 2, 3]), Iterator) + def test_can_use_dict_to_make_concrete(): assert isinstance(dict(b.frequencies()), dict) @@ -318,6 +357,8 @@ def test_from_castra(): list(default) == [(i, str(i)) for i in range(100)]) assert list(with_columns) == list(range(100)) assert list(with_index) == list(zip(range(100), range(100))) + assert default.name != with_columns.name != with_index.name + assert with_index.name == db.from_castra(c, 'x', index=True).name @pytest.mark.slow @@ -477,6 +518,8 @@ def test_product(): z = x.product(y) assert set(z) == set([(i, j) for i in [1, 2, 3, 4] for j in [10, 20, 30]]) + assert z.name != b2.name + assert z.name == x.product(y).name def test_partition_collect(): @@ -491,14 +534,16 @@ def test_partition_collect(): def test_groupby(): - c = b.groupby(lambda x: x) + c = b.groupby(identity) result = dict(c) assert result == {0: [0, 0 ,0], 1: [1, 1, 1], 2: [2, 2, 2], 3: [3, 3, 3], 4: [4, 4, 4]} - assert b.groupby(lambda x: x).npartitions == b.npartitions + assert c.npartitions == b.npartitions + assert c.name == b.groupby(identity).name + assert c.name != b.groupby(lambda x: x + 1).name def test_groupby_with_indexer(): @@ -507,6 +552,7 @@ def test_groupby_with_indexer(): assert valmap(sorted, result) == {1: [[1, 2, 3], [1, 4, 9]], 2: [[2, 3, 4]]} + def test_groupby_with_npartitions_changed(): result = b.groupby(lambda x: x, npartitions=1) result2 = dict(result) @@ -525,6 +571,10 @@ def test_concat(): c = db.concat([a, b]) assert list(c) == [1, 2, 3, 4, 5, 6] + assert c.name == db.concat([a, b]).name + assert b.concat().name != a.concat().name + assert b.concat().name == b.concat().name + b = db.from_sequence([1, 2, 3]).map(lambda x: x * [1, 2, 3]) assert list(b.concat()) == [1, 2, 3] * sum([1, 2, 3]) @@ -570,6 +620,10 @@ def test_to_dataframe(): assert (df2.compute().values == df.compute().values).all() + assert df2._name == b.to_dataframe()._name + assert df2._name != df._name + + def test_to_textfiles(): b = db.from_sequence(['abc', '123', 'xyz'], npartitions=2) dir = mkdtemp() @@ -648,6 +702,8 @@ def test_string_namespace(): assert list(b.str.match('*Smith')) == ['Alice Smith', 'Charlie Smith'] assert raises(AttributeError, lambda: b.str.sfohsofhf) + assert b.str.match('*Smith').name == b.str.match('*Smith').name + assert b.str.match('*Smith').name != b.str.match('*John').name def test_string_namespace_with_unicode(): @@ -743,6 +799,7 @@ def test_from_imperative(): from dask.imperative import value a, b, c = value([1, 2, 3]), value([4, 5, 6]), value([7, 8, 9]) bb = from_imperative([a, b, c]) + assert bb.name == from_imperative([a, b, c]).name assert isinstance(bb, Bag) assert list(bb) == [1, 2, 3, 4, 5, 6, 7, 8, 9]
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 3, "test_score": 1 }, "num_modified_files": 1 }
1.8
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[complete]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "numpy>=1.16.0 pandas>=1.0.0 cloudpickle partd distributed s3fs toolz psutil pytables bokeh bcolz scipy h5py ipython", "pip_packages": [ "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y graphviz liblzma-dev" ], "python": "3.5", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
aiobotocore @ file:///opt/conda/conda-bld/aiobotocore_1643638228694/work aiohttp @ file:///tmp/build/80754af9/aiohttp_1632748060317/work aioitertools @ file:///tmp/build/80754af9/aioitertools_1607109665762/work async-timeout==3.0.1 attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work backcall @ file:///home/ktietz/src/ci/backcall_1611930011877/work bcolz==1.2.1 bokeh @ file:///tmp/build/80754af9/bokeh_1620710048147/work botocore @ file:///opt/conda/conda-bld/botocore_1642672735464/work brotlipy==0.7.0 certifi==2021.5.30 cffi @ file:///tmp/build/80754af9/cffi_1625814693874/work chardet @ file:///tmp/build/80754af9/chardet_1607706739153/work click==8.0.3 cloudpickle @ file:///tmp/build/80754af9/cloudpickle_1632508026186/work contextvars==2.4 cryptography @ file:///tmp/build/80754af9/cryptography_1635366128178/work cytoolz==0.11.0 -e git+https://github.com/dask/dask.git@d82cf2ac3fa3a61912b7934afe7b2fe9e14cc4ff#egg=dask decorator @ file:///opt/conda/conda-bld/decorator_1643638310831/work distributed @ file:///tmp/build/80754af9/distributed_1615054599257/work fsspec @ file:///opt/conda/conda-bld/fsspec_1642510437511/work h5py==2.10.0 HeapDict @ file:///Users/ktietz/demo/mc3/conda-bld/heapdict_1630598515714/work idna @ file:///tmp/build/80754af9/idna_1637925883363/work idna-ssl @ file:///tmp/build/80754af9/idna_ssl_1611752490495/work immutables @ file:///tmp/build/80754af9/immutables_1628888996840/work importlib-metadata==4.8.3 iniconfig==1.1.1 ipython @ file:///tmp/build/80754af9/ipython_1593447367857/work ipython-genutils @ file:///tmp/build/80754af9/ipython_genutils_1606773439826/work jedi @ file:///tmp/build/80754af9/jedi_1606932572482/work Jinja2 @ file:///opt/conda/conda-bld/jinja2_1647436528585/work jmespath @ file:///Users/ktietz/demo/mc3/conda-bld/jmespath_1630583964805/work locket==0.2.1 MarkupSafe @ file:///tmp/build/80754af9/markupsafe_1621528150516/work mock @ file:///tmp/build/80754af9/mock_1607622725907/work msgpack @ file:///tmp/build/80754af9/msgpack-python_1612287171716/work multidict @ file:///tmp/build/80754af9/multidict_1607367768400/work numexpr @ file:///tmp/build/80754af9/numexpr_1618853194344/work numpy @ file:///tmp/build/80754af9/numpy_and_numpy_base_1603483703303/work olefile @ file:///Users/ktietz/demo/mc3/conda-bld/olefile_1629805411829/work packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work pandas==1.1.5 parso==0.7.0 partd @ file:///opt/conda/conda-bld/partd_1647245470509/work pexpect @ file:///tmp/build/80754af9/pexpect_1605563209008/work pickleshare @ file:///tmp/build/80754af9/pickleshare_1606932040724/work Pillow @ file:///tmp/build/80754af9/pillow_1625670622947/work pluggy==1.0.0 prompt-toolkit @ file:///tmp/build/80754af9/prompt-toolkit_1633440160888/work psutil @ file:///tmp/build/80754af9/psutil_1612297621795/work ptyprocess @ file:///tmp/build/80754af9/ptyprocess_1609355006118/work/dist/ptyprocess-0.7.0-py2.py3-none-any.whl py==1.11.0 pycparser @ file:///tmp/build/80754af9/pycparser_1636541352034/work Pygments @ file:///opt/conda/conda-bld/pygments_1644249106324/work pyOpenSSL @ file:///opt/conda/conda-bld/pyopenssl_1643788558760/work pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work PySocks @ file:///tmp/build/80754af9/pysocks_1605305763431/work pytest==7.0.1 python-dateutil @ file:///tmp/build/80754af9/python-dateutil_1626374649649/work pytz==2021.3 PyYAML==5.4.1 s3fs @ file:///opt/conda/conda-bld/s3fs_1643701468749/work scipy @ file:///tmp/build/80754af9/scipy_1597686635649/work six @ file:///tmp/build/80754af9/six_1644875935023/work sortedcontainers @ file:///tmp/build/80754af9/sortedcontainers_1623949099177/work tables==3.6.1 tblib @ file:///Users/ktietz/demo/mc3/conda-bld/tblib_1629402031467/work tomli==1.2.3 toolz @ file:///tmp/build/80754af9/toolz_1636545406491/work tornado @ file:///tmp/build/80754af9/tornado_1606942266872/work traitlets @ file:///tmp/build/80754af9/traitlets_1632746497744/work typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work urllib3 @ file:///opt/conda/conda-bld/urllib3_1643638302206/work wcwidth @ file:///Users/ktietz/demo/mc3/conda-bld/wcwidth_1629357192024/work wrapt==1.12.1 yarl @ file:///tmp/build/80754af9/yarl_1606939915466/work zict==2.0.0 zipp==3.6.0
name: dask channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - aiobotocore=2.1.0=pyhd3eb1b0_0 - aiohttp=3.7.4.post0=py36h7f8727e_2 - aioitertools=0.7.1=pyhd3eb1b0_0 - async-timeout=3.0.1=py36h06a4308_0 - attrs=21.4.0=pyhd3eb1b0_0 - backcall=0.2.0=pyhd3eb1b0_0 - bcolz=1.2.1=py36h04863e7_0 - blas=1.0=openblas - blosc=1.21.3=h6a678d5_0 - bokeh=2.3.2=py36h06a4308_0 - botocore=1.23.24=pyhd3eb1b0_0 - brotlipy=0.7.0=py36h27cfd23_1003 - bzip2=1.0.8=h5eee18b_6 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - cffi=1.14.6=py36h400218f_0 - chardet=4.0.0=py36h06a4308_1003 - click=8.0.3=pyhd3eb1b0_0 - cloudpickle=2.0.0=pyhd3eb1b0_0 - contextvars=2.4=py_0 - cryptography=35.0.0=py36hd23ed53_0 - cytoolz=0.11.0=py36h7b6447c_0 - decorator=5.1.1=pyhd3eb1b0_0 - distributed=2021.3.0=py36h06a4308_0 - freetype=2.12.1=h4a9f257_0 - fsspec=2022.1.0=pyhd3eb1b0_0 - giflib=5.2.2=h5eee18b_0 - h5py=2.10.0=py36h7918eee_0 - hdf5=1.10.4=hb1b8bf9_0 - heapdict=1.0.1=pyhd3eb1b0_0 - idna=3.3=pyhd3eb1b0_0 - idna_ssl=1.1.0=py36h06a4308_0 - immutables=0.16=py36h7f8727e_0 - ipython=7.16.1=py36h5ca1d4c_0 - ipython_genutils=0.2.0=pyhd3eb1b0_1 - jedi=0.17.2=py36h06a4308_1 - jinja2=3.0.3=pyhd3eb1b0_0 - jmespath=0.10.0=pyhd3eb1b0_0 - jpeg=9e=h5eee18b_3 - lcms2=2.16=hb9589c4_0 - ld_impl_linux-64=2.40=h12ee557_0 - lerc=4.0.0=h6a678d5_0 - libdeflate=1.22=h5eee18b_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgfortran-ng=7.5.0=ha8ba4b0_17 - libgfortran4=7.5.0=ha8ba4b0_17 - libgomp=11.2.0=h1234567_1 - libopenblas=0.3.18=hf726d26_0 - libpng=1.6.39=h5eee18b_0 - libstdcxx-ng=11.2.0=h1234567_1 - libtiff=4.5.1=hffd6297_1 - libwebp=1.2.4=h11a3e52_1 - libwebp-base=1.2.4=h5eee18b_1 - locket=0.2.1=py36h06a4308_1 - lz4-c=1.9.4=h6a678d5_1 - lzo=2.10=h7b6447c_2 - markupsafe=2.0.1=py36h27cfd23_0 - mock=4.0.3=pyhd3eb1b0_0 - msgpack-python=1.0.2=py36hff7bd54_1 - multidict=5.1.0=py36h27cfd23_2 - ncurses=6.4=h6a678d5_0 - numexpr=2.7.3=py36h4be448d_1 - numpy=1.19.2=py36h6163131_0 - numpy-base=1.19.2=py36h75fe3a5_0 - olefile=0.46=pyhd3eb1b0_0 - openssl=1.1.1w=h7f8727e_0 - packaging=21.3=pyhd3eb1b0_0 - pandas=1.1.5=py36ha9443f7_0 - parso=0.7.0=py_0 - partd=1.2.0=pyhd3eb1b0_1 - pexpect=4.8.0=pyhd3eb1b0_3 - pickleshare=0.7.5=pyhd3eb1b0_1003 - pillow=8.3.1=py36h5aabda8_0 - pip=21.2.2=py36h06a4308_0 - prompt-toolkit=3.0.20=pyhd3eb1b0_0 - psutil=5.8.0=py36h27cfd23_1 - ptyprocess=0.7.0=pyhd3eb1b0_2 - pycparser=2.21=pyhd3eb1b0_0 - pygments=2.11.2=pyhd3eb1b0_0 - pyopenssl=22.0.0=pyhd3eb1b0_0 - pyparsing=3.0.4=pyhd3eb1b0_0 - pysocks=1.7.1=py36h06a4308_0 - pytables=3.6.1=py36h71ec239_0 - python=3.6.13=h12debd9_1 - python-dateutil=2.8.2=pyhd3eb1b0_0 - pytz=2021.3=pyhd3eb1b0_0 - pyyaml=5.4.1=py36h27cfd23_1 - readline=8.2=h5eee18b_0 - s3fs=2022.1.0=pyhd3eb1b0_0 - scipy=1.5.2=py36habc2bb6_0 - setuptools=58.0.4=py36h06a4308_0 - six=1.16.0=pyhd3eb1b0_1 - sortedcontainers=2.4.0=pyhd3eb1b0_0 - sqlite=3.45.3=h5eee18b_0 - tblib=1.7.0=pyhd3eb1b0_0 - tk=8.6.14=h39e8969_0 - toolz=0.11.2=pyhd3eb1b0_0 - tornado=6.1=py36h27cfd23_0 - traitlets=4.3.3=py36h06a4308_0 - typing-extensions=4.1.1=hd3eb1b0_0 - typing_extensions=4.1.1=pyh06a4308_0 - urllib3=1.26.8=pyhd3eb1b0_0 - wcwidth=0.2.5=pyhd3eb1b0_0 - wheel=0.37.1=pyhd3eb1b0_0 - wrapt=1.12.1=py36h7b6447c_1 - xz=5.6.4=h5eee18b_1 - yaml=0.2.5=h7b6447c_0 - yarl=1.6.3=py36h27cfd23_0 - zict=2.0.0=pyhd3eb1b0_0 - zlib=1.2.13=h5eee18b_1 - zstd=1.5.6=hc292b87_0 - pip: - importlib-metadata==4.8.3 - iniconfig==1.1.1 - pluggy==1.0.0 - py==1.11.0 - pytest==7.0.1 - tomli==1.2.3 - zipp==3.6.0 prefix: /opt/conda/envs/dask
[ "dask/bag/tests/test_bag.py::test_map", "dask/bag/tests/test_bag.py::test_filter", "dask/bag/tests/test_bag.py::test_remove", "dask/bag/tests/test_bag.py::test_pluck", "dask/bag/tests/test_bag.py::test_pluck_with_default", "dask/bag/tests/test_bag.py::test_fold", "dask/bag/tests/test_bag.py::test_distinct", "dask/bag/tests/test_bag.py::test_frequencies", "dask/bag/tests/test_bag.py::test_topk", "dask/bag/tests/test_bag.py::test_topk_with_non_callable_key", "dask/bag/tests/test_bag.py::test_reductions", "dask/bag/tests/test_bag.py::test_tree_reductions", "dask/bag/tests/test_bag.py::test_join", "dask/bag/tests/test_bag.py::test_foldby", "dask/bag/tests/test_bag.py::test_map_partitions", "dask/bag/tests/test_bag.py::test_product", "dask/bag/tests/test_bag.py::test_groupby", "dask/bag/tests/test_bag.py::test_concat", "dask/bag/tests/test_bag.py::test_string_namespace" ]
[]
[ "dask/bag/tests/test_bag.py::test_Bag", "dask/bag/tests/test_bag.py::test_keys", "dask/bag/tests/test_bag.py::test_map_function_with_multiple_arguments", "dask/bag/tests/test_bag.py::test_map_with_constructors", "dask/bag/tests/test_bag.py::test_map_with_builtins", "dask/bag/tests/test_bag.py::test_iter", "dask/bag/tests/test_bag.py::test_topk_with_multiarg_lambda", "dask/bag/tests/test_bag.py::test_lambdas", "dask/bag/tests/test_bag.py::test_mean", "dask/bag/tests/test_bag.py::test_std", "dask/bag/tests/test_bag.py::test_var", "dask/bag/tests/test_bag.py::test_lazify_task", "dask/bag/tests/test_bag.py::test_lazify", "dask/bag/tests/test_bag.py::test_inline_singleton_lists", "dask/bag/tests/test_bag.py::test_take", "dask/bag/tests/test_bag.py::test_map_is_lazy", "dask/bag/tests/test_bag.py::test_can_use_dict_to_make_concrete", "dask/bag/tests/test_bag.py::test_from_filenames", "dask/bag/tests/test_bag.py::test_from_filenames_gzip", "dask/bag/tests/test_bag.py::test_from_filenames_bz2", "dask/bag/tests/test_bag.py::test_from_filenames_large", "dask/bag/tests/test_bag.py::test_from_filenames_encoding", "dask/bag/tests/test_bag.py::test_from_filenames_large_gzip", "dask/bag/tests/test_bag.py::test__parse_s3_URI", "dask/bag/tests/test_bag.py::test_from_sequence", "dask/bag/tests/test_bag.py::test_from_long_sequence", "dask/bag/tests/test_bag.py::test_partition_collect", "dask/bag/tests/test_bag.py::test_groupby_with_indexer", "dask/bag/tests/test_bag.py::test_groupby_with_npartitions_changed", "dask/bag/tests/test_bag.py::test_concat_after_map", "dask/bag/tests/test_bag.py::test_args", "dask/bag/tests/test_bag.py::test_to_dataframe", "dask/bag/tests/test_bag.py::test_to_textfiles", "dask/bag/tests/test_bag.py::test_to_textfiles_encoding", "dask/bag/tests/test_bag.py::test_to_textfiles_inputs", "dask/bag/tests/test_bag.py::test_bz2_stream", "dask/bag/tests/test_bag.py::test_string_namespace_with_unicode", "dask/bag/tests/test_bag.py::test_str_empty_split", "dask/bag/tests/test_bag.py::test_stream_decompress", "dask/bag/tests/test_bag.py::test_map_with_iterator_function", "dask/bag/tests/test_bag.py::test_ensure_compute_output_is_concrete", "dask/bag/tests/test_bag.py::test_bag_class_extend", "dask/bag/tests/test_bag.py::test_gh715", "dask/bag/tests/test_bag.py::test_bag_compute_forward_kwargs", "dask/bag/tests/test_bag.py::test_to_imperative", "dask/bag/tests/test_bag.py::test_from_imperative" ]
[]
BSD 3-Clause "New" or "Revised" License
427
sympy__sympy-10580
5631818c6f486ab17597602ddcc9fbfee7e4d392
2016-02-12 18:59:52
0241b35cae5f2adc04dc37b25f7dc9c5f00bd746
asmeurer: Does this do the right thing if the condition doesn't evaluate to True or False for some element of the finite set? AnishShah: > Does this do the right thing if the condition doesn't evaluate to True or False for some element of the finite set? Yes, It does the right thing. I have added a test for that. AnishShah: I have updated the PR. I have added some more tests and I used `fuzzy_bool` & `sift` instead of `intersect`. AnishShah: test_pretty_ConditionSet fails .. I might be because of my changes. I will have a look. Should I include those changes in this PR itself? AnishShah: @asmeurer @smichr I have changed `test_pretty_Condition` because those tests were failing because of my changes. Let me know if it is OK to include it in this PR itself. AnishShah: @smichr I have updated the PR
diff --git a/sympy/combinatorics/perm_groups.py b/sympy/combinatorics/perm_groups.py index 37f07c0bc0..b1565fd52f 100644 --- a/sympy/combinatorics/perm_groups.py +++ b/sympy/combinatorics/perm_groups.py @@ -1306,14 +1306,14 @@ def generate_schreier_sims(self, af=False): yield x._array_form else: yield x - return + raise StopIteration if len(u) == 1: for i in basic_orbits[0]: if af: yield u[0][i]._array_form else: yield u[0][i] - return + raise StopIteration u = list(reversed(u)) basic_orbits = basic_orbits[::-1] @@ -1327,7 +1327,7 @@ def generate_schreier_sims(self, af=False): # backtrack when finished iterating over coset if pos[h] >= posmax[h]: if h == 0: - return + raise StopIteration pos[h] = 0 h -= 1 stg.pop() diff --git a/sympy/core/expr.py b/sympy/core/expr.py index 32954b2e7a..55ed6db91e 100644 --- a/sympy/core/expr.py +++ b/sympy/core/expr.py @@ -2622,7 +2622,7 @@ def _eval_lseries(self, x, logx=None): yield series.removeO() else: yield series - return + raise StopIteration while series.is_Order: n += 1 diff --git a/sympy/geometry/polygon.py b/sympy/geometry/polygon.py index e3b65055dd..fbc93fbaf0 100644 --- a/sympy/geometry/polygon.py +++ b/sympy/geometry/polygon.py @@ -1648,7 +1648,6 @@ class Triangle(Polygon): incircle medians medial - nine_point_circle Raises ====== @@ -2250,38 +2249,6 @@ def medial(self): s = self.sides return Triangle(s[0].midpoint, s[1].midpoint, s[2].midpoint) - @property - def nine_point_circle(self): - """The nine-point circle of the triangle. - - Nine-point circle is the circumcircle of the medial triangle, which - passes through the feet of altitudes and the middle points of segments - connecting the vertices and the orthocenter. - - Returns - ======= - - nine_point_circle : Circle - - See also - ======== - - sympy.geometry.line.Segment.midpoint - sympy.geometry.polygon.Triangle.medial - sympy.geometry.polygon.Triangle.orthocenter - - Examples - ======== - - >>> from sympy.geometry import Point, Triangle - >>> p1, p2, p3 = Point(0, 0), Point(1, 0), Point(0, 1) - >>> t = Triangle(p1, p2, p3) - >>> t.nine_point_circle - Circle(Point2D(1/4, 1/4), sqrt(2)/4) - - """ - return Circle(*self.medial.vertices) - def rad(d): """Return the radian value for the given degrees (pi = 180 degrees).""" diff --git a/sympy/polys/subresultants_qq_zz.py b/sympy/polys/subresultants_qq_zz.py index a8bc77be1c..1d71909588 100644 --- a/sympy/polys/subresultants_qq_zz.py +++ b/sympy/polys/subresultants_qq_zz.py @@ -1,214 +1,13 @@ # -*- coding: utf-8 -*- """ -This module contains functions for the computation -of Euclidean, generalized Sturmian and (modified) subresultant -polynomial remainder sequences (prs's). - -The pseudo-remainder function prem() of sympy is _not_ used -by any of the functions in the module. - -Instead of prem() we use the function - -rem_z(). - -Included is also the function quo_z(). - -1. Theoretical background: -========================== -Consider the polynomials f, g ∈ Z[x] of degrees deg(f) = n and -deg(g) = m with n ≥ m. - -Definition 1: -============= -The sign sequence of a polynomial remainder sequence (prs) is the -sequence of signs of the leading coefficients of its polynomials. - -Sign sequences can be computed with the function: - -sign_seq(poly_seq, x) - -Definition 2: -============= -A polynomial remainder sequence (prs) is called complete if the -degree difference between any two consecutive polynomials is 1; -otherwise, it called incomplete. - -It is understood that f, g belong to the sequences mentioned in -the two definitions. - -1A. Euclidean and subresultant prs's: -===================================== -The subresultant prs of f, g is a sequence of polynomials in Z[x] -analogous to the Euclidean prs, the sequence obtained by applying -on f, g Euclid’s algorithm for polynomial greatest common divisors -(gcd) in Q[x]. - -The subresultant prs differs from the Euclidean prs in that the -coefficients of each polynomial in the former sequence are determinants ---- also referred to as subresultants --- of appropriately selected -sub-matrices of sylvester1(f, g, x), Sylvester’s matrix of 1840 of -dimensions (n + m) × (n + m). - -Recall that the determinant of sylvester1(f, g, x) itself is -called the resultant of f, g and serves as a criterion of whether -the two polynomials have common roots or not. - -For complete prs’s the sign sequence of the Euclidean prs of f, g -is identical to the sign sequence of the subresultant prs of f, g -and the coefficients of one sequence are easily computed from the -coefficients of the other. - -For incomplete prs’s the polynomials in the subresultant prs, generally -differ in sign from those of the Euclidean prs, and --- unlike the -case of complete prs’s --- it is not at all obvious how to compute -the coefficients of one sequence from the coefficients of the other. - -1B. Sturmian and modified subresultant prs's: -============================================= -For the same polynomials f, g ∈ Z[x] mentioned above, their ``modified'' -subresultant prs is a sequence of polynomials similar to the Sturmian -prs, the sequence obtained by applying in Q[x] Sturm’s algorithm on f, g. - -The two sequences differ in that the coefficients of each polynomial -in the modified subresultant prs are the determinants --- also referred -to as modified subresultants --- of appropriately selected sub-matrices -of sylvester2(f, g, x), Sylvester’s matrix of 1853 of dimensions 2n × 2n. - -The determinant of sylvester2 itself is called the modified resultant -of f, g and it also can serve as a criterion of whether the two -polynomials have common roots or not. - -For complete prs’s the sign sequence of the Sturmian prs of f, g is -identical to the sign sequence of the modified subresultant prs of -f, g and the coefficients of one sequence are easily computed from -the coefficients of the other. - -For incomplete prs’s the polynomials in the modified subresultant prs, -generally differ in sign from those of the Sturmian prs, and --- unlike -the case of complete prs’s --- it is not at all obvious how to compute -the coefficients of one sequence from the coefficients of the other. - -As Sylvester pointed out, the coefficients of the polynomial remainders -obtained as (modified) subresultants are the smallest possible without -introducing rationals and without computing (integer) greatest common -divisors. - -1C. On terminology: -=================== -Whence the terminology? Well generalized Sturmian prs's are -``modifications'' of Euclidean prs's; the hint came from the title -of the Pell-Gordon paper of 1917. - -In the literature one also encounters the name ``non signed'' and -``signed'' prs for Euclidean and Sturmian prs respectively. - -Likewise ``non signed'' and ``signed'' subresultant prs for -subresultant and modified subresultant prs respectively. - -2. Functions in the module: -=========================== -No function utilizes sympy's function prem(). - -2A. Matrices: -============= -The functions sylvester(f, g, x, method=1) and -sylvester(f, g, x, method=2) compute either Sylvester matrix. -They can be used to compute (modified) subresultant prs's by -direct determinant evaluation. - -The function bezout(f, g, x, method='prs') provides a matrix of -smaller dimensions than either Sylvester matrix. It is the function -of choice for computing (modified) subresultant prs's by direct -determinant evaluation. - -sylvester(f, g, x, method=1) -sylvester(f, g, x, method=2) -bezout(f, g, x, method='prs') - -The following identity holds: - -bezout(f, g, x, method='prs') = -backward_eye(deg(f))*bezout(f, g, x, method='bz')*backward_eye(deg(f)) - -2B. Subresultant and modified subresultant prs's by -=================================================== -determinant evaluation: -======================= -Instead of utilizing the Sylvester matrices, we employ -the Bezout matrix of smaller dimensions. - -subresultants_bezout(f, g, x) -modified_subresultants_bezout(f, g, x) - -2C. Subresultant prs's by ONE determinant evaluation: -===================================================== -All three functions in this section evaluate one determinant -per remainder polynomial; this is the determinant of an -appropriately selected sub-matrix of sylvester1(f, g, x), -Sylvester’s matrix of 1840. - -To compute the remainder polynomials the function -subresultants_rem(f, g, x) employs rem(f, g, x). -By contrast, the other two functions implement Van Vleck’s ideas -of 1900 and compute the remainder polynomials by trinagularizing -sylvester2(f, g, x), Sylvester’s matrix of 1853. - - -subresultants_rem(f, g, x) -subresultants_vv(f, g, x) -subresultants_vv_2(f, g, x). - -2E. Euclidean, Sturmian prs's in Q[x]: -====================================== -euclid_q(f, g, x) -sturm_q(f, g, x) - -2F. Euclidean, Sturmian and (modified) subresultant prs's P-G: -============================================================== -All functions in this section are based on the Pell-Gordon (P-G) -theorem of 1917. -Computations are done in Q[x], employing the function rem(f, g, x) -for the computation of the remainder polynomials. - -euclid_pg(f, g, x) -sturm pg(f, g, x) -subresultants_pg(f, g, x) -modified_subresultants_pg(f, g, x) - -2G. Euclidean, Sturmian and (modified) subresultant prs's A-M-V: -================================================================ -All functions in this section are based on the Akritas-Malaschonok- -Vigklas (A-M-V) theorem of 2015. -Computations are done in Z[x], employing the function rem_z(f, g, x) -for the computation of the remainder polynomials. - -euclid_amv(f, g, x) -sturm_amv(f, g, x) -subresultants_amv(f, g, x) -modified_subresultants_amv(f, g, x) - -2Ga. Exception: -=============== -subresultants_amv_q(f, g, x) - -This function employs rem(f, g, x) for the computation of -the remainder polynomials, despite the fact that it implements -the A-M-V Theorem. - -It is included in our module in order to show that theorems P-G -and A-M-V can be implemented utilizing either the function -rem(f, g, x) or the function rem_z(f, g, x). - -For clearly historical reasons --- since the Collins-Brown-Traub -coefficients-reduction factor β_i was not available in 1917 --- -we have implemented the Pell-Gordon theorem with the function -rem(f, g, x) and the A-M-V Theorem with the function rem_z(f, g, x). -""" +Created on Mon Dec 28 13:25:02 2015 +@author: alkis +""" from __future__ import print_function, division -from sympy import (Abs, degree, expand, eye, floor, LC, Matrix, nan, Poly, pprint) +from sympy import (Abs, degree, expand, floor, LC, Matrix, nan, Poly, pprint) from sympy import (QQ, quo, rem, S, sign, simplify, summation, var, zeros) def sylvester(f, g, x, method = 1): @@ -324,251 +123,6 @@ def sign_seq(poly_seq, x): """ return [sign(LC(poly_seq[i], x)) for i in range(len(poly_seq))] -def bezout(p, q, x, method='bz'): - """ - The input polynomials p, q are in Z[x] or in Q[x]. It is assumed - that degree(p, x) >= degree(q, x). - - The default option bezout(p, q, x, method='bz') returns Bezout's - symmetric matrix of p and q, of dimensions deg(p) x deg(p). The - determinant of this matrix is equal to the determinant of sylvester2, - Sylvester's matrix of 1853, whose dimensions are 2*deg(p) x 2*deg(p); - however, the subresultants of these two matrices may vary in sign. - - The other option, bezout(p, q, x, 'prs'), is of interest to us - in this module because it returns a matrix equivalent to sylvester2. - In this case all subresultants of the two matrices are identical. - - Both the subresultant polynomial remainder sequence (prs) and - the modified subresultant prs of p and q can be computed by - evaluating determinants of appropriately selected submatrices of - bezout(p, q, x, 'prs') --- one determinant per coefficient of the - remainder polynomials. - - The matrices bezout(p, q, x, 'bz') and bezout(p, q, x, 'prs') - are related by the formula - - bezout(p, q, x, 'prs') = - backward_eye(deg(p)) * bezout(p, q, x, 'bz') * backward_eye(deg(p)), - - where backward_eye() is the backward identity function. - - References: - =========== - 1. G.M.Diaz-Toca,L.Gonzalez-Vega: Various New Expressions for Subresultants - and Their Applications. Appl. Algebra in Engin., Communic. and Comp., - Vol. 15, 233–266, 2004. - - """ - y = var('y') - degP = degree(p, x) - - # expr is 0 when x = y - expr = p * q.subs({x:y}) - p.subs({x:y}) * q - - # hence expr is exactly divisible by x - y - poly = Poly( quo(expr, x-y), x, y) - - # form Bezout matrix and store them in B as indicated to get - # the LC coefficient of each poly in the first position of each row - B = zeros(degP) - for i in range(degP): - for j in range(degP): - if method == 'prs': - B[degP - 1 - i, degP - 1 - j] = poly.nth(i, j) - else: - B[i, j] = poly.nth(i, j) - return B - -def backward_eye(n): - ''' - Returns the backward identity matrix of dimensions n x n. - - Needed to "turn" the Bezout matrices - so that the leading coefficients are first. - See docstring of the function bezout(p, q, x, method='bz'). - ''' - M = eye(n) # identity matrix of order n - - for i in range(int(M.rows / 2)): - M.row_swap(0 + i, M.rows - 1 - i) - - return M - -def process_bezout_output(poly_seq, x): - """ - poly_seq is a polynomial remainder sequence computed either by - subresultants_bezout or by modified_subresultants_bezout. - - This function removes from poly_seq all zero polynomials as well - as all those whose degree is equal to the degree of a previous - polynomial in poly_seq, as we scan it from left to right. - - """ - L = poly_seq[:] # get a copy of the input sequence - d = degree(L[1], x) - i = 2 - while i < len(L): - d_i = degree(L[i], x) - if d_i < 0: # zero poly - L.remove(L[i]) - i = i - 1 - if d == d_i: # poly degree equals degree of previous poly - L.remove(L[i]) - i = i - 1 - if d_i >= 0: - d = d_i - i = i + 1 - - return L - -def subresultants_bezout(p, q, x): - """ - The input polynomials p, q are in Z[x] or in Q[x]. It is assumed - that degree(p, x) >= degree(q, x). - - Computes the subresultant polynomial remainder sequence - of p, q by evaluating determinants of appropriately selected - submatrices of bezout(p, q, x, 'prs'). The dimensions of the - latter are deg(p) x deg(p). - - Each coefficient is computed by evaluating the determinant of the - corresponding submatrix of bezout(p, q, x, 'prs'). - - bezout(p, q, x, 'prs) is used instead of sylvester(p, q, x, 1), - Sylvester's matrix of 1840, because the dimensions of the latter - are (deg(p) + deg(q)) x (deg(p) + deg(q)). - - If the subresultant prs is complete, then the output coincides - with the Euclidean sequence of the polynomials p, q. - - References: - =========== - 1. G.M.Diaz-Toca,L.Gonzalez-Vega: Various New Expressions for Subresultants - and Their Applications. Appl. Algebra in Engin., Communic. and Comp., - Vol. 15, 233–266, 2004. - - """ - # make sure neither p nor q is 0 - if p == 0 or q == 0: - return [p, q] - - f, g = p, q - n = degF = degree(f, x) - m = degG = degree(g, x) - - # make sure proper degrees - if n == 0 and m == 0: - return [f, g] - if n < m: - n, m, degF, degG, f, g = m, n, degG, degF, g, f - if n > 0 and m == 0: - return [f, g] - - SR_L = [f, g] # subresultant list - F = LC(f, x)**(degF - degG) - - # form the bezout matrix - B = bezout(f, g, x, 'prs') - - # pick appropriate submatrices of B - # and form subresultant polys - if degF > degG: - j = 2 - if degF == degG: - j = 1 - while j <= degF: - M = B[0:j, :] - k, coeff_L = j - 1, [] - while k <= degF - 1: - coeff_L.append(M[: ,0 : j].det()) - if k < degF - 1: - M.col_swap(j - 1, k + 1) - k = k + 1 - - # apply Theorem 2.1 in the paper by Toca & Vega 2004 - # to get correct signs - SR_L.append((int((-1)**(j*(j-1)/2)) * Poly(coeff_L, x) / F).as_expr()) - j = j + 1 - - return process_bezout_output(SR_L, x) - -def modified_subresultants_bezout(p, q, x): - """ - The input polynomials p, q are in Z[x] or in Q[x]. It is assumed - that degree(p, x) >= degree(q, x). - - Computes the modified subresultant polynomial remainder sequence - of p, q by evaluating determinants of appropriately selected - submatrices of bezout(p, q, x, 'prs'). The dimensions of the - latter are deg(p) x deg(p). - - Each coefficient is computed by evaluating the determinant of the - corresponding submatrix of bezout(p, q, x, 'prs'). - - bezout(p, q, x, 'prs') is used instead of sylvester(p, q, x, 2), - Sylvester's matrix of 1853, because the dimensions of the latter - are 2*deg(p) x 2*deg(p). - - If the modified subresultant prs is complete, and LC( p ) > 0, the output - coincides with the (generalized) Sturm's sequence of the polynomials p, q. - - References: - =========== - 1. Akritas, A. G., G.I. Malaschonok and P.S. Vigklas: ``Sturm Sequences - and Modified Subresultant Polynomial Remainder Sequences.'' - Serdica Journal of Computing, Vol. 8, No 1, 29–46, 2014. - - 2. G.M.Diaz-Toca,L.Gonzalez-Vega: Various New Expressions for Subresultants - and Their Applications. Appl. Algebra in Engin., Communic. and Comp., - Vol. 15, 233–266, 2004. - - - """ - # make sure neither p nor q is 0 - if p == 0 or q == 0: - return [p, q] - - f, g = p, q - n = degF = degree(f, x) - m = degG = degree(g, x) - - # make sure proper degrees - if n == 0 and m == 0: - return [f, g] - if n < m: - n, m, degF, degG, f, g = m, n, degG, degF, g, f - if n > 0 and m == 0: - return [f, g] - - SR_L = [f, g] # subresultant list - - # form the bezout matrix - B = bezout(f, g, x, 'prs') - - # pick appropriate submatrices of B - # and form subresultant polys - if degF > degG: - j = 2 - if degF == degG: - j = 1 - while j <= degF: - M = B[0:j, :] - k, coeff_L = j - 1, [] - while k <= degF - 1: - coeff_L.append(M[: ,0 : j].det()) - if k < degF - 1: - M.col_swap(j - 1, k + 1) - k = k + 1 - - ## Theorem 2.1 in the paper by Toca & Vega 2004 is _not needed_ - ## in this case since - ## the bezout matrix is equivalent to sylvester2 - SR_L.append(( Poly(coeff_L, x)).as_expr()) - j = j + 1 - - return process_bezout_output(SR_L, x) - def sturm_pg(p, q, x, method=0): """ p, q are polynomials in Z[x] or Q[x]. diff --git a/sympy/sets/conditionset.py b/sympy/sets/conditionset.py index 1fa874f219..2bb96861fc 100644 --- a/sympy/sets/conditionset.py +++ b/sympy/sets/conditionset.py @@ -3,9 +3,11 @@ from sympy import S from sympy.core.basic import Basic from sympy.core.function import Lambda +from sympy.core.logic import fuzzy_bool from sympy.logic.boolalg import And from sympy.sets.sets import (Set, Interval, Intersection, EmptySet, Union, FiniteSet) +from sympy.utilities.iterables import sift class ConditionSet(Set): @@ -34,6 +36,15 @@ def __new__(cls, sym, condition, base_set): return S.EmptySet if condition == S.true: return base_set + if isinstance(base_set, EmptySet): + return base_set + if isinstance(base_set, FiniteSet): + sifted = sift(base_set, lambda _: fuzzy_bool(condition.subs(sym, _))) + if sifted[None]: + return Union(FiniteSet(*sifted[True]), + Basic.__new__(cls, sym, condition, FiniteSet(*sifted[None]))) + else: + return FiniteSet(*sifted[True]) return Basic.__new__(cls, sym, condition, base_set) sym = property(lambda self: self.args[0])
simplification of ConditionSet with FiniteSet Should the following just simplify (automatically) to {0}? ``` >>> ConditionSet(x, And(x < 1, x > -3), FiniteSet(0, 1, 2)) ConditionSet(x, And(x < 1, x > -3), {0, 1, 2}) ``` If the set (last arg) is a FiniteSet, and all elements cause the condition to be evaluated (True or False) then perhaps the filtered FIniteSet should be returned.
sympy/sympy
diff --git a/sympy/geometry/tests/test_polygon.py b/sympy/geometry/tests/test_polygon.py index 12365695a6..cef5a5cf05 100644 --- a/sympy/geometry/tests/test_polygon.py +++ b/sympy/geometry/tests/test_polygon.py @@ -228,10 +228,6 @@ def test_polygon(): assert intersection(m[p1], m[p2], m[p3]) == [t1.centroid] assert t1.medial == Triangle(Point(2.5, 0), Point(0, 2.5), Point(2.5, 2.5)) - # Nine-point circle - assert t1.nine_point_circle == Circle(Point(2.5, 0), Point(0, 2.5), Point(2.5, 2.5)) - assert t1.nine_point_circle == Circle(Point(0, 0), Point(0, 2.5), Point(2.5, 2.5)) - # Perpendicular altitudes = t1.altitudes assert altitudes[p1] == Segment(p1, Point(Rational(5, 2), Rational(5, 2))) diff --git a/sympy/polys/tests/test_subresultants_qq_zz.py b/sympy/polys/tests/test_subresultants_qq_zz.py index a377a936c2..737bdf7752 100644 --- a/sympy/polys/tests/test_subresultants_qq_zz.py +++ b/sympy/polys/tests/test_subresultants_qq_zz.py @@ -1,8 +1,6 @@ from sympy import var, sturm, subresultants, prem, pquo -from sympy.matrices import Matrix, eye -from sympy.polys.subresultants_qq_zz import (sylvester, bezout, - subresultants_bezout, modified_subresultants_bezout, - process_bezout_output, backward_eye, +from sympy.matrices import Matrix +from sympy.polys.subresultants_qq_zz import (sylvester, sturm_pg, sturm_q, sturm_amv, euclid_pg, euclid_q, euclid_amv, modified_subresultants_pg, subresultants_pg, subresultants_amv_q, quo_z, rem_z, subresultants_amv, @@ -34,44 +32,6 @@ def test_sylvester(): assert sylvester(x**3 - 7*x + 7, 3*x**2 - 7, x, 2) == Matrix([ [1, 0, -7, 7, 0, 0], [0, 3, 0, -7, 0, 0], [0, 1, 0, -7, 7, 0], [0, 0, 3, 0, -7, 0], [0, 0, 1, 0, -7, 7], [0, 0, 0, 3, 0, -7]]) -def test_bezout(): - x = var('x') - - p = -2*x**5+7*x**3+9*x**2-3*x+1 - q = -10*x**4+21*x**2+18*x-3 - assert bezout(p, q, x, 'bz').det() == sylvester(p, q, x, 2).det() - assert bezout(p, q, x, 'bz').det() != sylvester(p, q, x, 1).det() - assert bezout(p, q, x, 'prs') == backward_eye(5) * bezout(p, q, x, 'bz') * backward_eye(5) - -def test_subresultants_bezout(): - x = var('x') - - p = x**8 + x**6 - 3*x**4 - 3*x**3 + 8*x**2 + 2*x - 5 - q = 3*x**6 + 5*x**4 - 4*x**2 - 9*x + 21 - assert subresultants_bezout(p, q, x) == subresultants(p, q, x) - assert subresultants_bezout(p, q, x)[-1] == sylvester(p, q, x).det() - assert subresultants_bezout(p, q, x) != euclid_amv(p, q, x) - amv_factors = [1, 1, -1, 1, -1, 1] - assert subresultants_bezout(p, q, x) == [i*j for i, j in zip(amv_factors, modified_subresultants_amv(p, q, x))] - - p = x**3 - 7*x + 7 - q = 3*x**2 - 7 - assert subresultants_bezout(p, q, x) == euclid_amv(p, q, x) - -def test_modified_subresultants_bezout(): - x = var('x') - - p = x**8 + x**6 - 3*x**4 - 3*x**3 + 8*x**2 + 2*x - 5 - q = 3*x**6 + 5*x**4 - 4*x**2 - 9*x + 21 - amv_factors = [1, 1, -1, 1, -1, 1] - assert modified_subresultants_bezout(p, q, x) == [i*j for i, j in zip(amv_factors, subresultants_amv(p, q, x))] - assert modified_subresultants_bezout(p, q, x)[-1] != sylvester(p + x**8, q, x).det() - assert modified_subresultants_bezout(p, q, x) != sturm_amv(p, q, x) - - p = x**3 - 7*x + 7 - q = 3*x**2 - 7 - assert modified_subresultants_bezout(p, q, x) == sturm_amv(p, q, x) - assert modified_subresultants_bezout(-p, q, x) != sturm_amv(-p, q, x) def test_sturm_pg(): x = var('x') diff --git a/sympy/printing/pretty/tests/test_pretty.py b/sympy/printing/pretty/tests/test_pretty.py index 959c8b7fbc..941ba32cd0 100644 --- a/sympy/printing/pretty/tests/test_pretty.py +++ b/sympy/printing/pretty/tests/test_pretty.py @@ -3199,17 +3199,14 @@ def test_pretty_ConditionSet(): assert pretty(ConditionSet(x, Eq(sin(x), 0), S.Reals)) == ascii_str assert upretty(ConditionSet(x, Eq(sin(x), 0), S.Reals)) == ucode_str - assert pretty(ConditionSet(x, Contains(x, S.Reals, evaluate=False), FiniteSet(1))) == \ - '{x | x in {1} and Contains(x, (-oo, oo))}' - assert upretty(ConditionSet(x, Contains(x, S.Reals, evaluate=False), FiniteSet(1))) == u('{x | x ∊ {1} ∧ (x ∈ ℝ)}') + assert pretty(ConditionSet(x, Contains(x, S.Reals, evaluate=False), FiniteSet(1))) == '{1}' + assert upretty(ConditionSet(x, Contains(x, S.Reals, evaluate=False), FiniteSet(1))) == u('{1}') - assert pretty(ConditionSet(x, And(x > 1, x < -1), FiniteSet(1, 2, 3))) ==\ - '{x | x in {1, 2, 3} and And(x > 1, x < -1)}' - assert upretty(ConditionSet(x, And(x > 1, x < -1), FiniteSet(1, 2, 3))) == \ - u('{x | x ∊ {1, 2, 3} ∧ (x > 1 ∧ x < -1)}') + assert pretty(ConditionSet(x, And(x > 1, x < -1), FiniteSet(1, 2, 3))) == "EmptySet()" + assert upretty(ConditionSet(x, And(x > 1, x < -1), FiniteSet(1, 2, 3))) == u("∅") - assert pretty(ConditionSet(x, Or(x > 1, x < -1), FiniteSet(1, 2))) == '{x | x in {1, 2} and Or(x > 1, x < -1)}' - assert upretty(ConditionSet(x, Or(x > 1, x < -1), FiniteSet(1, 2))) == u('{x | x ∊ {1, 2} ∧ (x > 1 ∨ x < -1)}') + assert pretty(ConditionSet(x, Or(x > 1, x < -1), FiniteSet(1, 2))) == '{2}' + assert upretty(ConditionSet(x, Or(x > 1, x < -1), FiniteSet(1, 2))) == u('{2}') def test_pretty_ComplexRegion(): diff --git a/sympy/sets/tests/test_conditionset.py b/sympy/sets/tests/test_conditionset.py index c7d02044b4..557f87ed30 100644 --- a/sympy/sets/tests/test_conditionset.py +++ b/sympy/sets/tests/test_conditionset.py @@ -1,5 +1,5 @@ -from sympy.sets import (ConditionSet, Intersection) -from sympy import (Symbol, Eq, S, Abs, sin, pi, Lambda, Interval) +from sympy.sets import (ConditionSet, Intersection, FiniteSet, EmptySet, Union) +from sympy import (Symbol, Eq, S, Abs, sin, pi, Lambda, Interval, And, Mod) x = Symbol('x') @@ -24,3 +24,13 @@ def test_CondSet_intersect(): def test_issue_9849(): assert ConditionSet(x, Eq(x, x), S.Naturals) == S.Naturals assert ConditionSet(x, Eq(Abs(sin(x)), -1), S.Naturals) == S.EmptySet + +def test_simplified_FiniteSet_in_CondSet(): + assert ConditionSet(x, And(x < 1, x > -3), FiniteSet(0, 1, 2)) == FiniteSet(0) + assert ConditionSet(x, x < 0, FiniteSet(0, 1, 2)) == EmptySet() + assert ConditionSet(x, And(x < -3), EmptySet()) == EmptySet() + y = Symbol('y') + assert (ConditionSet(x, And(x > 0), FiniteSet(-1, 0, 1, y)) == + Union(FiniteSet(1), ConditionSet(x, And(x > 0), FiniteSet(y)))) + assert (ConditionSet(x, Eq(Mod(x, 3), 1), FiniteSet(1, 4, 2, y)) == + Union(FiniteSet(1, 4), ConditionSet(x, Eq(Mod(x, 3), 1), FiniteSet(y))))
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 1, "test_score": 2 }, "num_modified_files": 5 }
0.7
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "mpmath>=0.19", "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.7", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs @ file:///croot/attrs_1668696182826/work certifi @ file:///croot/certifi_1671487769961/work/certifi flit_core @ file:///opt/conda/conda-bld/flit-core_1644941570762/work/source/flit_core importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1648562407465/work iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work mpmath==1.3.0 packaging @ file:///croot/packaging_1671697413597/work pluggy @ file:///tmp/build/80754af9/pluggy_1648042572264/work py @ file:///opt/conda/conda-bld/py_1644396412707/work pytest==7.1.2 -e git+https://github.com/sympy/sympy.git@5631818c6f486ab17597602ddcc9fbfee7e4d392#egg=sympy tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work typing_extensions @ file:///croot/typing_extensions_1669924550328/work zipp @ file:///croot/zipp_1672387121353/work
name: sympy channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - attrs=22.1.0=py37h06a4308_0 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2022.12.7=py37h06a4308_0 - flit-core=3.6.0=pyhd3eb1b0_0 - importlib-metadata=4.11.3=py37h06a4308_0 - importlib_metadata=4.11.3=hd3eb1b0_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - packaging=22.0=py37h06a4308_0 - pip=22.3.1=py37h06a4308_0 - pluggy=1.0.0=py37h06a4308_1 - py=1.11.0=pyhd3eb1b0_0 - pytest=7.1.2=py37h06a4308_0 - python=3.7.16=h7a1cb2a_0 - readline=8.2=h5eee18b_0 - setuptools=65.6.3=py37h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tomli=2.0.1=py37h06a4308_0 - typing_extensions=4.4.0=py37h06a4308_0 - wheel=0.38.4=py37h06a4308_0 - xz=5.6.4=h5eee18b_1 - zipp=3.11.0=py37h06a4308_0 - zlib=1.2.13=h5eee18b_1 - pip: - mpmath==1.3.0 prefix: /opt/conda/envs/sympy
[ "sympy/printing/pretty/tests/test_pretty.py::test_pretty_ConditionSet", "sympy/sets/tests/test_conditionset.py::test_simplified_FiniteSet_in_CondSet" ]
[]
[ "sympy/geometry/tests/test_polygon.py::test_polygon", "sympy/geometry/tests/test_polygon.py::test_convex_hull", "sympy/geometry/tests/test_polygon.py::test_encloses", "sympy/geometry/tests/test_polygon.py::test_triangle_kwargs", "sympy/geometry/tests/test_polygon.py::test_transform", "sympy/geometry/tests/test_polygon.py::test_reflect", "sympy/polys/tests/test_subresultants_qq_zz.py::test_sylvester", "sympy/polys/tests/test_subresultants_qq_zz.py::test_sturm_pg", "sympy/polys/tests/test_subresultants_qq_zz.py::test_sturm_q", "sympy/polys/tests/test_subresultants_qq_zz.py::test_sturm_amv", "sympy/polys/tests/test_subresultants_qq_zz.py::test_euclid_pg", "sympy/polys/tests/test_subresultants_qq_zz.py::test_euclid_q", "sympy/polys/tests/test_subresultants_qq_zz.py::test_euclid_amv", "sympy/polys/tests/test_subresultants_qq_zz.py::test_modified_subresultants_pg", "sympy/polys/tests/test_subresultants_qq_zz.py::test_subresultants_pg", "sympy/polys/tests/test_subresultants_qq_zz.py::test_subresultants_amv_q", "sympy/polys/tests/test_subresultants_qq_zz.py::test_rem_z", "sympy/polys/tests/test_subresultants_qq_zz.py::test_quo_z", "sympy/polys/tests/test_subresultants_qq_zz.py::test_subresultants_amv", "sympy/polys/tests/test_subresultants_qq_zz.py::test_modified_subresultants_amv", "sympy/polys/tests/test_subresultants_qq_zz.py::test_subresultants_rem", "sympy/polys/tests/test_subresultants_qq_zz.py::test_subresultants_vv", "sympy/polys/tests/test_subresultants_qq_zz.py::test_subresultants_vv_2", "sympy/printing/pretty/tests/test_pretty.py::test_pretty_ascii_str", "sympy/printing/pretty/tests/test_pretty.py::test_pretty_unicode_str", "sympy/printing/pretty/tests/test_pretty.py::test_upretty_greek", "sympy/printing/pretty/tests/test_pretty.py::test_upretty_multiindex", "sympy/printing/pretty/tests/test_pretty.py::test_upretty_sub_super", "sympy/printing/pretty/tests/test_pretty.py::test_upretty_subs_missing_in_24", "sympy/printing/pretty/tests/test_pretty.py::test_upretty_modifiers", "sympy/printing/pretty/tests/test_pretty.py::test_pretty_Cycle", "sympy/printing/pretty/tests/test_pretty.py::test_pretty_basic", "sympy/printing/pretty/tests/test_pretty.py::test_negative_fractions", "sympy/printing/pretty/tests/test_pretty.py::test_issue_5524", "sympy/printing/pretty/tests/test_pretty.py::test_pretty_ordering", "sympy/printing/pretty/tests/test_pretty.py::test_pretty_relational", "sympy/printing/pretty/tests/test_pretty.py::test_Assignment", "sympy/printing/pretty/tests/test_pretty.py::test_issue_7117", "sympy/printing/pretty/tests/test_pretty.py::test_pretty_rational", "sympy/printing/pretty/tests/test_pretty.py::test_pretty_functions", "sympy/printing/pretty/tests/test_pretty.py::test_pretty_sqrt", "sympy/printing/pretty/tests/test_pretty.py::test_pretty_sqrt_char_knob", "sympy/printing/pretty/tests/test_pretty.py::test_pretty_sqrt_longsymbol_no_sqrt_char", "sympy/printing/pretty/tests/test_pretty.py::test_pretty_KroneckerDelta", "sympy/printing/pretty/tests/test_pretty.py::test_pretty_product", "sympy/printing/pretty/tests/test_pretty.py::test_pretty_lambda", "sympy/printing/pretty/tests/test_pretty.py::test_pretty_order", "sympy/printing/pretty/tests/test_pretty.py::test_pretty_derivatives", "sympy/printing/pretty/tests/test_pretty.py::test_pretty_integrals", "sympy/printing/pretty/tests/test_pretty.py::test_pretty_matrix", "sympy/printing/pretty/tests/test_pretty.py::test_Adjoint", "sympy/printing/pretty/tests/test_pretty.py::test_pretty_Trace_issue_9044", "sympy/printing/pretty/tests/test_pretty.py::test_MatrixExpressions", "sympy/printing/pretty/tests/test_pretty.py::test_pretty_dotproduct", "sympy/printing/pretty/tests/test_pretty.py::test_pretty_piecewise", "sympy/printing/pretty/tests/test_pretty.py::test_pretty_seq", "sympy/printing/pretty/tests/test_pretty.py::test_any_object_in_sequence", "sympy/printing/pretty/tests/test_pretty.py::test_pretty_sets", "sympy/printing/pretty/tests/test_pretty.py::test_pretty_ComplexRegion", "sympy/printing/pretty/tests/test_pretty.py::test_pretty_Union_issue_10414", "sympy/printing/pretty/tests/test_pretty.py::test_pretty_Intersection_issue_10414", "sympy/printing/pretty/tests/test_pretty.py::test_ProductSet_paranthesis", "sympy/printing/pretty/tests/test_pretty.py::test_ProductSet_prod_char_issue_10413", "sympy/printing/pretty/tests/test_pretty.py::test_pretty_sequences", "sympy/printing/pretty/tests/test_pretty.py::test_pretty_FourierSeries", "sympy/printing/pretty/tests/test_pretty.py::test_pretty_FormalPowerSeries", "sympy/printing/pretty/tests/test_pretty.py::test_pretty_limits", "sympy/printing/pretty/tests/test_pretty.py::test_pretty_ComplexRootOf", "sympy/printing/pretty/tests/test_pretty.py::test_pretty_RootSum", "sympy/printing/pretty/tests/test_pretty.py::test_GroebnerBasis", "sympy/printing/pretty/tests/test_pretty.py::test_pretty_Boolean", "sympy/printing/pretty/tests/test_pretty.py::test_pretty_Domain", "sympy/printing/pretty/tests/test_pretty.py::test_pretty_prec", "sympy/printing/pretty/tests/test_pretty.py::test_pprint", "sympy/printing/pretty/tests/test_pretty.py::test_pretty_class", "sympy/printing/pretty/tests/test_pretty.py::test_pretty_no_wrap_line", "sympy/printing/pretty/tests/test_pretty.py::test_settings", "sympy/printing/pretty/tests/test_pretty.py::test_pretty_sum", "sympy/printing/pretty/tests/test_pretty.py::test_units", "sympy/printing/pretty/tests/test_pretty.py::test_pretty_Subs", "sympy/printing/pretty/tests/test_pretty.py::test_gammas", "sympy/printing/pretty/tests/test_pretty.py::test_hyper", "sympy/printing/pretty/tests/test_pretty.py::test_meijerg", "sympy/printing/pretty/tests/test_pretty.py::test_noncommutative", "sympy/printing/pretty/tests/test_pretty.py::test_pretty_special_functions", "sympy/printing/pretty/tests/test_pretty.py::test_pretty_geometry", "sympy/printing/pretty/tests/test_pretty.py::test_expint", "sympy/printing/pretty/tests/test_pretty.py::test_elliptic_functions", "sympy/printing/pretty/tests/test_pretty.py::test_RandomDomain", "sympy/printing/pretty/tests/test_pretty.py::test_PrettyPoly", "sympy/printing/pretty/tests/test_pretty.py::test_issue_6285", "sympy/printing/pretty/tests/test_pretty.py::test_issue_6359", "sympy/printing/pretty/tests/test_pretty.py::test_issue_6739", "sympy/printing/pretty/tests/test_pretty.py::test_complicated_symbol_unchanged", "sympy/printing/pretty/tests/test_pretty.py::test_categories", "sympy/printing/pretty/tests/test_pretty.py::test_PrettyModules", "sympy/printing/pretty/tests/test_pretty.py::test_QuotientRing", "sympy/printing/pretty/tests/test_pretty.py::test_Homomorphism", "sympy/printing/pretty/tests/test_pretty.py::test_Tr", "sympy/printing/pretty/tests/test_pretty.py::test_pretty_Add", "sympy/printing/pretty/tests/test_pretty.py::test_issue_7179", "sympy/printing/pretty/tests/test_pretty.py::test_issue_7180", "sympy/printing/pretty/tests/test_pretty.py::test_pretty_Complement", "sympy/printing/pretty/tests/test_pretty.py::test_pretty_SymmetricDifference", "sympy/printing/pretty/tests/test_pretty.py::test_pretty_Contains", "sympy/printing/pretty/tests/test_pretty.py::test_issue_8292", "sympy/printing/pretty/tests/test_pretty.py::test_issue_4335", "sympy/printing/pretty/tests/test_pretty.py::test_issue_8344", "sympy/printing/pretty/tests/test_pretty.py::test_issue_6324", "sympy/printing/pretty/tests/test_pretty.py::test_issue_7927", "sympy/printing/pretty/tests/test_pretty.py::test_issue_6134", "sympy/printing/pretty/tests/test_pretty.py::test_issue_9877", "sympy/sets/tests/test_conditionset.py::test_CondSet", "sympy/sets/tests/test_conditionset.py::test_CondSet_intersect", "sympy/sets/tests/test_conditionset.py::test_issue_9849" ]
[]
BSD
428
sympy__sympy-10581
425ee8695a04a17b71ea15fa045fa95efffbeb1d
2016-02-12 20:14:08
0241b35cae5f2adc04dc37b25f7dc9c5f00bd746
diff --git a/sympy/combinatorics/perm_groups.py b/sympy/combinatorics/perm_groups.py index 37f07c0bc0..b1565fd52f 100644 --- a/sympy/combinatorics/perm_groups.py +++ b/sympy/combinatorics/perm_groups.py @@ -1306,14 +1306,14 @@ def generate_schreier_sims(self, af=False): yield x._array_form else: yield x - return + raise StopIteration if len(u) == 1: for i in basic_orbits[0]: if af: yield u[0][i]._array_form else: yield u[0][i] - return + raise StopIteration u = list(reversed(u)) basic_orbits = basic_orbits[::-1] @@ -1327,7 +1327,7 @@ def generate_schreier_sims(self, af=False): # backtrack when finished iterating over coset if pos[h] >= posmax[h]: if h == 0: - return + raise StopIteration pos[h] = 0 h -= 1 stg.pop() diff --git a/sympy/core/expr.py b/sympy/core/expr.py index 32954b2e7a..55ed6db91e 100644 --- a/sympy/core/expr.py +++ b/sympy/core/expr.py @@ -2622,7 +2622,7 @@ def _eval_lseries(self, x, logx=None): yield series.removeO() else: yield series - return + raise StopIteration while series.is_Order: n += 1 diff --git a/sympy/logic/boolalg.py b/sympy/logic/boolalg.py index 4b85287590..dd6f385740 100644 --- a/sympy/logic/boolalg.py +++ b/sympy/logic/boolalg.py @@ -84,10 +84,31 @@ class BooleanAtom(Boolean): Base class of BooleanTrue and BooleanFalse. """ is_Boolean = True + _op_priority = 11 # higher than Expr + @property def canonical(self): return self + def _noop(self, other=None): + raise TypeError('BooleanAtom not allowed in this context.') + + __add__ = _noop + __radd__ = _noop + __sub__ = _noop + __rsub__ = _noop + __mul__ = _noop + __rmul__ = _noop + __pow__ = _noop + __rpow__ = _noop + __rdiv__ = _noop + __truediv__ = _noop + __div__ = _noop + __rtruediv__ = _noop + __mod__ = _noop + __rmod__ = _noop + _eval_power = _noop + class BooleanTrue(with_metaclass(Singleton, BooleanAtom)): """ diff --git a/sympy/polys/polyutils.py b/sympy/polys/polyutils.py index 28f9876313..587a1f3712 100644 --- a/sympy/polys/polyutils.py +++ b/sympy/polys/polyutils.py @@ -356,7 +356,10 @@ def _is_expandable_pow(expr): and expr.base.is_Add) if opt.expand is not False: - expr = expr.expand() + try: + expr = expr.expand() + except AttributeError: + raise PolynomialError('expression must support expand method') # TODO: Integrate this into expand() itself while any(_is_expandable_pow(i) or i.is_Mul and any(_is_expandable_pow(j) for j in i.args) for i in diff --git a/sympy/polys/subresultants_qq_zz.py b/sympy/polys/subresultants_qq_zz.py index a8bc77be1c..1d71909588 100644 --- a/sympy/polys/subresultants_qq_zz.py +++ b/sympy/polys/subresultants_qq_zz.py @@ -1,214 +1,13 @@ # -*- coding: utf-8 -*- """ -This module contains functions for the computation -of Euclidean, generalized Sturmian and (modified) subresultant -polynomial remainder sequences (prs's). - -The pseudo-remainder function prem() of sympy is _not_ used -by any of the functions in the module. - -Instead of prem() we use the function - -rem_z(). - -Included is also the function quo_z(). - -1. Theoretical background: -========================== -Consider the polynomials f, g ∈ Z[x] of degrees deg(f) = n and -deg(g) = m with n ≥ m. - -Definition 1: -============= -The sign sequence of a polynomial remainder sequence (prs) is the -sequence of signs of the leading coefficients of its polynomials. - -Sign sequences can be computed with the function: - -sign_seq(poly_seq, x) - -Definition 2: -============= -A polynomial remainder sequence (prs) is called complete if the -degree difference between any two consecutive polynomials is 1; -otherwise, it called incomplete. - -It is understood that f, g belong to the sequences mentioned in -the two definitions. - -1A. Euclidean and subresultant prs's: -===================================== -The subresultant prs of f, g is a sequence of polynomials in Z[x] -analogous to the Euclidean prs, the sequence obtained by applying -on f, g Euclid’s algorithm for polynomial greatest common divisors -(gcd) in Q[x]. - -The subresultant prs differs from the Euclidean prs in that the -coefficients of each polynomial in the former sequence are determinants ---- also referred to as subresultants --- of appropriately selected -sub-matrices of sylvester1(f, g, x), Sylvester’s matrix of 1840 of -dimensions (n + m) × (n + m). - -Recall that the determinant of sylvester1(f, g, x) itself is -called the resultant of f, g and serves as a criterion of whether -the two polynomials have common roots or not. - -For complete prs’s the sign sequence of the Euclidean prs of f, g -is identical to the sign sequence of the subresultant prs of f, g -and the coefficients of one sequence are easily computed from the -coefficients of the other. - -For incomplete prs’s the polynomials in the subresultant prs, generally -differ in sign from those of the Euclidean prs, and --- unlike the -case of complete prs’s --- it is not at all obvious how to compute -the coefficients of one sequence from the coefficients of the other. - -1B. Sturmian and modified subresultant prs's: -============================================= -For the same polynomials f, g ∈ Z[x] mentioned above, their ``modified'' -subresultant prs is a sequence of polynomials similar to the Sturmian -prs, the sequence obtained by applying in Q[x] Sturm’s algorithm on f, g. - -The two sequences differ in that the coefficients of each polynomial -in the modified subresultant prs are the determinants --- also referred -to as modified subresultants --- of appropriately selected sub-matrices -of sylvester2(f, g, x), Sylvester’s matrix of 1853 of dimensions 2n × 2n. - -The determinant of sylvester2 itself is called the modified resultant -of f, g and it also can serve as a criterion of whether the two -polynomials have common roots or not. - -For complete prs’s the sign sequence of the Sturmian prs of f, g is -identical to the sign sequence of the modified subresultant prs of -f, g and the coefficients of one sequence are easily computed from -the coefficients of the other. - -For incomplete prs’s the polynomials in the modified subresultant prs, -generally differ in sign from those of the Sturmian prs, and --- unlike -the case of complete prs’s --- it is not at all obvious how to compute -the coefficients of one sequence from the coefficients of the other. - -As Sylvester pointed out, the coefficients of the polynomial remainders -obtained as (modified) subresultants are the smallest possible without -introducing rationals and without computing (integer) greatest common -divisors. - -1C. On terminology: -=================== -Whence the terminology? Well generalized Sturmian prs's are -``modifications'' of Euclidean prs's; the hint came from the title -of the Pell-Gordon paper of 1917. - -In the literature one also encounters the name ``non signed'' and -``signed'' prs for Euclidean and Sturmian prs respectively. - -Likewise ``non signed'' and ``signed'' subresultant prs for -subresultant and modified subresultant prs respectively. - -2. Functions in the module: -=========================== -No function utilizes sympy's function prem(). - -2A. Matrices: -============= -The functions sylvester(f, g, x, method=1) and -sylvester(f, g, x, method=2) compute either Sylvester matrix. -They can be used to compute (modified) subresultant prs's by -direct determinant evaluation. - -The function bezout(f, g, x, method='prs') provides a matrix of -smaller dimensions than either Sylvester matrix. It is the function -of choice for computing (modified) subresultant prs's by direct -determinant evaluation. - -sylvester(f, g, x, method=1) -sylvester(f, g, x, method=2) -bezout(f, g, x, method='prs') - -The following identity holds: - -bezout(f, g, x, method='prs') = -backward_eye(deg(f))*bezout(f, g, x, method='bz')*backward_eye(deg(f)) - -2B. Subresultant and modified subresultant prs's by -=================================================== -determinant evaluation: -======================= -Instead of utilizing the Sylvester matrices, we employ -the Bezout matrix of smaller dimensions. - -subresultants_bezout(f, g, x) -modified_subresultants_bezout(f, g, x) - -2C. Subresultant prs's by ONE determinant evaluation: -===================================================== -All three functions in this section evaluate one determinant -per remainder polynomial; this is the determinant of an -appropriately selected sub-matrix of sylvester1(f, g, x), -Sylvester’s matrix of 1840. - -To compute the remainder polynomials the function -subresultants_rem(f, g, x) employs rem(f, g, x). -By contrast, the other two functions implement Van Vleck’s ideas -of 1900 and compute the remainder polynomials by trinagularizing -sylvester2(f, g, x), Sylvester’s matrix of 1853. - - -subresultants_rem(f, g, x) -subresultants_vv(f, g, x) -subresultants_vv_2(f, g, x). - -2E. Euclidean, Sturmian prs's in Q[x]: -====================================== -euclid_q(f, g, x) -sturm_q(f, g, x) - -2F. Euclidean, Sturmian and (modified) subresultant prs's P-G: -============================================================== -All functions in this section are based on the Pell-Gordon (P-G) -theorem of 1917. -Computations are done in Q[x], employing the function rem(f, g, x) -for the computation of the remainder polynomials. - -euclid_pg(f, g, x) -sturm pg(f, g, x) -subresultants_pg(f, g, x) -modified_subresultants_pg(f, g, x) - -2G. Euclidean, Sturmian and (modified) subresultant prs's A-M-V: -================================================================ -All functions in this section are based on the Akritas-Malaschonok- -Vigklas (A-M-V) theorem of 2015. -Computations are done in Z[x], employing the function rem_z(f, g, x) -for the computation of the remainder polynomials. - -euclid_amv(f, g, x) -sturm_amv(f, g, x) -subresultants_amv(f, g, x) -modified_subresultants_amv(f, g, x) - -2Ga. Exception: -=============== -subresultants_amv_q(f, g, x) - -This function employs rem(f, g, x) for the computation of -the remainder polynomials, despite the fact that it implements -the A-M-V Theorem. - -It is included in our module in order to show that theorems P-G -and A-M-V can be implemented utilizing either the function -rem(f, g, x) or the function rem_z(f, g, x). - -For clearly historical reasons --- since the Collins-Brown-Traub -coefficients-reduction factor β_i was not available in 1917 --- -we have implemented the Pell-Gordon theorem with the function -rem(f, g, x) and the A-M-V Theorem with the function rem_z(f, g, x). -""" +Created on Mon Dec 28 13:25:02 2015 +@author: alkis +""" from __future__ import print_function, division -from sympy import (Abs, degree, expand, eye, floor, LC, Matrix, nan, Poly, pprint) +from sympy import (Abs, degree, expand, floor, LC, Matrix, nan, Poly, pprint) from sympy import (QQ, quo, rem, S, sign, simplify, summation, var, zeros) def sylvester(f, g, x, method = 1): @@ -324,251 +123,6 @@ def sign_seq(poly_seq, x): """ return [sign(LC(poly_seq[i], x)) for i in range(len(poly_seq))] -def bezout(p, q, x, method='bz'): - """ - The input polynomials p, q are in Z[x] or in Q[x]. It is assumed - that degree(p, x) >= degree(q, x). - - The default option bezout(p, q, x, method='bz') returns Bezout's - symmetric matrix of p and q, of dimensions deg(p) x deg(p). The - determinant of this matrix is equal to the determinant of sylvester2, - Sylvester's matrix of 1853, whose dimensions are 2*deg(p) x 2*deg(p); - however, the subresultants of these two matrices may vary in sign. - - The other option, bezout(p, q, x, 'prs'), is of interest to us - in this module because it returns a matrix equivalent to sylvester2. - In this case all subresultants of the two matrices are identical. - - Both the subresultant polynomial remainder sequence (prs) and - the modified subresultant prs of p and q can be computed by - evaluating determinants of appropriately selected submatrices of - bezout(p, q, x, 'prs') --- one determinant per coefficient of the - remainder polynomials. - - The matrices bezout(p, q, x, 'bz') and bezout(p, q, x, 'prs') - are related by the formula - - bezout(p, q, x, 'prs') = - backward_eye(deg(p)) * bezout(p, q, x, 'bz') * backward_eye(deg(p)), - - where backward_eye() is the backward identity function. - - References: - =========== - 1. G.M.Diaz-Toca,L.Gonzalez-Vega: Various New Expressions for Subresultants - and Their Applications. Appl. Algebra in Engin., Communic. and Comp., - Vol. 15, 233–266, 2004. - - """ - y = var('y') - degP = degree(p, x) - - # expr is 0 when x = y - expr = p * q.subs({x:y}) - p.subs({x:y}) * q - - # hence expr is exactly divisible by x - y - poly = Poly( quo(expr, x-y), x, y) - - # form Bezout matrix and store them in B as indicated to get - # the LC coefficient of each poly in the first position of each row - B = zeros(degP) - for i in range(degP): - for j in range(degP): - if method == 'prs': - B[degP - 1 - i, degP - 1 - j] = poly.nth(i, j) - else: - B[i, j] = poly.nth(i, j) - return B - -def backward_eye(n): - ''' - Returns the backward identity matrix of dimensions n x n. - - Needed to "turn" the Bezout matrices - so that the leading coefficients are first. - See docstring of the function bezout(p, q, x, method='bz'). - ''' - M = eye(n) # identity matrix of order n - - for i in range(int(M.rows / 2)): - M.row_swap(0 + i, M.rows - 1 - i) - - return M - -def process_bezout_output(poly_seq, x): - """ - poly_seq is a polynomial remainder sequence computed either by - subresultants_bezout or by modified_subresultants_bezout. - - This function removes from poly_seq all zero polynomials as well - as all those whose degree is equal to the degree of a previous - polynomial in poly_seq, as we scan it from left to right. - - """ - L = poly_seq[:] # get a copy of the input sequence - d = degree(L[1], x) - i = 2 - while i < len(L): - d_i = degree(L[i], x) - if d_i < 0: # zero poly - L.remove(L[i]) - i = i - 1 - if d == d_i: # poly degree equals degree of previous poly - L.remove(L[i]) - i = i - 1 - if d_i >= 0: - d = d_i - i = i + 1 - - return L - -def subresultants_bezout(p, q, x): - """ - The input polynomials p, q are in Z[x] or in Q[x]. It is assumed - that degree(p, x) >= degree(q, x). - - Computes the subresultant polynomial remainder sequence - of p, q by evaluating determinants of appropriately selected - submatrices of bezout(p, q, x, 'prs'). The dimensions of the - latter are deg(p) x deg(p). - - Each coefficient is computed by evaluating the determinant of the - corresponding submatrix of bezout(p, q, x, 'prs'). - - bezout(p, q, x, 'prs) is used instead of sylvester(p, q, x, 1), - Sylvester's matrix of 1840, because the dimensions of the latter - are (deg(p) + deg(q)) x (deg(p) + deg(q)). - - If the subresultant prs is complete, then the output coincides - with the Euclidean sequence of the polynomials p, q. - - References: - =========== - 1. G.M.Diaz-Toca,L.Gonzalez-Vega: Various New Expressions for Subresultants - and Their Applications. Appl. Algebra in Engin., Communic. and Comp., - Vol. 15, 233–266, 2004. - - """ - # make sure neither p nor q is 0 - if p == 0 or q == 0: - return [p, q] - - f, g = p, q - n = degF = degree(f, x) - m = degG = degree(g, x) - - # make sure proper degrees - if n == 0 and m == 0: - return [f, g] - if n < m: - n, m, degF, degG, f, g = m, n, degG, degF, g, f - if n > 0 and m == 0: - return [f, g] - - SR_L = [f, g] # subresultant list - F = LC(f, x)**(degF - degG) - - # form the bezout matrix - B = bezout(f, g, x, 'prs') - - # pick appropriate submatrices of B - # and form subresultant polys - if degF > degG: - j = 2 - if degF == degG: - j = 1 - while j <= degF: - M = B[0:j, :] - k, coeff_L = j - 1, [] - while k <= degF - 1: - coeff_L.append(M[: ,0 : j].det()) - if k < degF - 1: - M.col_swap(j - 1, k + 1) - k = k + 1 - - # apply Theorem 2.1 in the paper by Toca & Vega 2004 - # to get correct signs - SR_L.append((int((-1)**(j*(j-1)/2)) * Poly(coeff_L, x) / F).as_expr()) - j = j + 1 - - return process_bezout_output(SR_L, x) - -def modified_subresultants_bezout(p, q, x): - """ - The input polynomials p, q are in Z[x] or in Q[x]. It is assumed - that degree(p, x) >= degree(q, x). - - Computes the modified subresultant polynomial remainder sequence - of p, q by evaluating determinants of appropriately selected - submatrices of bezout(p, q, x, 'prs'). The dimensions of the - latter are deg(p) x deg(p). - - Each coefficient is computed by evaluating the determinant of the - corresponding submatrix of bezout(p, q, x, 'prs'). - - bezout(p, q, x, 'prs') is used instead of sylvester(p, q, x, 2), - Sylvester's matrix of 1853, because the dimensions of the latter - are 2*deg(p) x 2*deg(p). - - If the modified subresultant prs is complete, and LC( p ) > 0, the output - coincides with the (generalized) Sturm's sequence of the polynomials p, q. - - References: - =========== - 1. Akritas, A. G., G.I. Malaschonok and P.S. Vigklas: ``Sturm Sequences - and Modified Subresultant Polynomial Remainder Sequences.'' - Serdica Journal of Computing, Vol. 8, No 1, 29–46, 2014. - - 2. G.M.Diaz-Toca,L.Gonzalez-Vega: Various New Expressions for Subresultants - and Their Applications. Appl. Algebra in Engin., Communic. and Comp., - Vol. 15, 233–266, 2004. - - - """ - # make sure neither p nor q is 0 - if p == 0 or q == 0: - return [p, q] - - f, g = p, q - n = degF = degree(f, x) - m = degG = degree(g, x) - - # make sure proper degrees - if n == 0 and m == 0: - return [f, g] - if n < m: - n, m, degF, degG, f, g = m, n, degG, degF, g, f - if n > 0 and m == 0: - return [f, g] - - SR_L = [f, g] # subresultant list - - # form the bezout matrix - B = bezout(f, g, x, 'prs') - - # pick appropriate submatrices of B - # and form subresultant polys - if degF > degG: - j = 2 - if degF == degG: - j = 1 - while j <= degF: - M = B[0:j, :] - k, coeff_L = j - 1, [] - while k <= degF - 1: - coeff_L.append(M[: ,0 : j].det()) - if k < degF - 1: - M.col_swap(j - 1, k + 1) - k = k + 1 - - ## Theorem 2.1 in the paper by Toca & Vega 2004 is _not needed_ - ## in this case since - ## the bezout matrix is equivalent to sylvester2 - SR_L.append(( Poly(coeff_L, x)).as_expr()) - j = j + 1 - - return process_bezout_output(SR_L, x) - def sturm_pg(p, q, x, method=0): """ p, q are polynomials in Z[x] or Q[x].
printing: Add containing a Boolean fails I offer no particular opinion on whether they make sense but neither of these operations seem to be forbidden: ```` a = x*S.true b = x + S.true ```` But only `a` can be printed (and gives `x*True`) whereas `print(b)` and `pprint(b)` give: ```` AttributeError: 'BooleanTrue' object has no attribute 'as_coeff_Mul' ````
sympy/sympy
diff --git a/sympy/logic/tests/test_boolalg.py b/sympy/logic/tests/test_boolalg.py index 9ad681d7c3..e49da281c6 100644 --- a/sympy/logic/tests/test_boolalg.py +++ b/sympy/logic/tests/test_boolalg.py @@ -1,3 +1,5 @@ +from __future__ import division + from sympy.assumptions.ask import Q from sympy.core.numbers import oo from sympy.core.relational import Equality @@ -725,3 +727,23 @@ def test_truth_table(): assert list(truth_table(And(x, y), [x, y], input=False)) == [False, False, False, True] assert list(truth_table(x | y, [x, y], input=False)) == [False, True, True, True] assert list(truth_table(x >> y, [x, y], input=False)) == [True, True, False, True] + + +def test_issue_8571(): + x = symbols('x') + for t in (S.true, S.false): + raises(TypeError, lambda: +t) + raises(TypeError, lambda: -t) + raises(TypeError, lambda: abs(t)) + # use int(bool(t)) to get 0 or 1 + raises(TypeError, lambda: int(t)) + + for o in [S.Zero, S.One, x]: + for _ in range(2): + raises(TypeError, lambda: o + t) + raises(TypeError, lambda: o - t) + raises(TypeError, lambda: o % t) + raises(TypeError, lambda: o*t) + raises(TypeError, lambda: o/t) + raises(TypeError, lambda: o**t) + o, t = t, o # do again in reversed order diff --git a/sympy/polys/tests/test_polyutils.py b/sympy/polys/tests/test_polyutils.py index 7b27d42f14..bc807d4217 100644 --- a/sympy/polys/tests/test_polyutils.py +++ b/sympy/polys/tests/test_polyutils.py @@ -285,3 +285,4 @@ def test_dict_from_expr(): assert dict_from_expr(Eq(x, 1)) == \ ({(0,): -Integer(1), (1,): Integer(1)}, (x,)) raises(PolynomialError, lambda: dict_from_expr(A*B - B*A)) + raises(PolynomialError, lambda: dict_from_expr(S.true)) diff --git a/sympy/polys/tests/test_subresultants_qq_zz.py b/sympy/polys/tests/test_subresultants_qq_zz.py index a377a936c2..737bdf7752 100644 --- a/sympy/polys/tests/test_subresultants_qq_zz.py +++ b/sympy/polys/tests/test_subresultants_qq_zz.py @@ -1,8 +1,6 @@ from sympy import var, sturm, subresultants, prem, pquo -from sympy.matrices import Matrix, eye -from sympy.polys.subresultants_qq_zz import (sylvester, bezout, - subresultants_bezout, modified_subresultants_bezout, - process_bezout_output, backward_eye, +from sympy.matrices import Matrix +from sympy.polys.subresultants_qq_zz import (sylvester, sturm_pg, sturm_q, sturm_amv, euclid_pg, euclid_q, euclid_amv, modified_subresultants_pg, subresultants_pg, subresultants_amv_q, quo_z, rem_z, subresultants_amv, @@ -34,44 +32,6 @@ def test_sylvester(): assert sylvester(x**3 - 7*x + 7, 3*x**2 - 7, x, 2) == Matrix([ [1, 0, -7, 7, 0, 0], [0, 3, 0, -7, 0, 0], [0, 1, 0, -7, 7, 0], [0, 0, 3, 0, -7, 0], [0, 0, 1, 0, -7, 7], [0, 0, 0, 3, 0, -7]]) -def test_bezout(): - x = var('x') - - p = -2*x**5+7*x**3+9*x**2-3*x+1 - q = -10*x**4+21*x**2+18*x-3 - assert bezout(p, q, x, 'bz').det() == sylvester(p, q, x, 2).det() - assert bezout(p, q, x, 'bz').det() != sylvester(p, q, x, 1).det() - assert bezout(p, q, x, 'prs') == backward_eye(5) * bezout(p, q, x, 'bz') * backward_eye(5) - -def test_subresultants_bezout(): - x = var('x') - - p = x**8 + x**6 - 3*x**4 - 3*x**3 + 8*x**2 + 2*x - 5 - q = 3*x**6 + 5*x**4 - 4*x**2 - 9*x + 21 - assert subresultants_bezout(p, q, x) == subresultants(p, q, x) - assert subresultants_bezout(p, q, x)[-1] == sylvester(p, q, x).det() - assert subresultants_bezout(p, q, x) != euclid_amv(p, q, x) - amv_factors = [1, 1, -1, 1, -1, 1] - assert subresultants_bezout(p, q, x) == [i*j for i, j in zip(amv_factors, modified_subresultants_amv(p, q, x))] - - p = x**3 - 7*x + 7 - q = 3*x**2 - 7 - assert subresultants_bezout(p, q, x) == euclid_amv(p, q, x) - -def test_modified_subresultants_bezout(): - x = var('x') - - p = x**8 + x**6 - 3*x**4 - 3*x**3 + 8*x**2 + 2*x - 5 - q = 3*x**6 + 5*x**4 - 4*x**2 - 9*x + 21 - amv_factors = [1, 1, -1, 1, -1, 1] - assert modified_subresultants_bezout(p, q, x) == [i*j for i, j in zip(amv_factors, subresultants_amv(p, q, x))] - assert modified_subresultants_bezout(p, q, x)[-1] != sylvester(p + x**8, q, x).det() - assert modified_subresultants_bezout(p, q, x) != sturm_amv(p, q, x) - - p = x**3 - 7*x + 7 - q = 3*x**2 - 7 - assert modified_subresultants_bezout(p, q, x) == sturm_amv(p, q, x) - assert modified_subresultants_bezout(-p, q, x) != sturm_amv(-p, q, x) def test_sturm_pg(): x = var('x')
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 3, "issue_text_score": 0, "test_score": 3 }, "num_modified_files": 5 }
0.7
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "pytest" ], "pre_install": null, "python": "3.7", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs @ file:///croot/attrs_1668696182826/work certifi @ file:///croot/certifi_1671487769961/work/certifi flit_core @ file:///opt/conda/conda-bld/flit-core_1644941570762/work/source/flit_core importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1648562407465/work iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work mpmath==1.3.0 packaging @ file:///croot/packaging_1671697413597/work pluggy @ file:///tmp/build/80754af9/pluggy_1648042572264/work py @ file:///opt/conda/conda-bld/py_1644396412707/work pytest==7.1.2 -e git+https://github.com/sympy/sympy.git@425ee8695a04a17b71ea15fa045fa95efffbeb1d#egg=sympy tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work typing_extensions @ file:///croot/typing_extensions_1669924550328/work zipp @ file:///croot/zipp_1672387121353/work
name: sympy channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - attrs=22.1.0=py37h06a4308_0 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2022.12.7=py37h06a4308_0 - flit-core=3.6.0=pyhd3eb1b0_0 - importlib-metadata=4.11.3=py37h06a4308_0 - importlib_metadata=4.11.3=hd3eb1b0_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - packaging=22.0=py37h06a4308_0 - pip=22.3.1=py37h06a4308_0 - pluggy=1.0.0=py37h06a4308_1 - py=1.11.0=pyhd3eb1b0_0 - pytest=7.1.2=py37h06a4308_0 - python=3.7.16=h7a1cb2a_0 - readline=8.2=h5eee18b_0 - setuptools=65.6.3=py37h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tomli=2.0.1=py37h06a4308_0 - typing_extensions=4.4.0=py37h06a4308_0 - wheel=0.38.4=py37h06a4308_0 - xz=5.6.4=h5eee18b_1 - zipp=3.11.0=py37h06a4308_0 - zlib=1.2.13=h5eee18b_1 - pip: - mpmath==1.3.0 prefix: /opt/conda/envs/sympy
[ "sympy/logic/tests/test_boolalg.py::test_issue_8571", "sympy/polys/tests/test_polyutils.py::test_dict_from_expr" ]
[]
[ "sympy/logic/tests/test_boolalg.py::test_overloading", "sympy/logic/tests/test_boolalg.py::test_And", "sympy/logic/tests/test_boolalg.py::test_Or", "sympy/logic/tests/test_boolalg.py::test_Xor", "sympy/logic/tests/test_boolalg.py::test_Not", "sympy/logic/tests/test_boolalg.py::test_Nand", "sympy/logic/tests/test_boolalg.py::test_Nor", "sympy/logic/tests/test_boolalg.py::test_Implies", "sympy/logic/tests/test_boolalg.py::test_Equivalent", "sympy/logic/tests/test_boolalg.py::test_equals", "sympy/logic/tests/test_boolalg.py::test_simplification", "sympy/logic/tests/test_boolalg.py::test_bool_map", "sympy/logic/tests/test_boolalg.py::test_bool_symbol", "sympy/logic/tests/test_boolalg.py::test_is_boolean", "sympy/logic/tests/test_boolalg.py::test_subs", "sympy/logic/tests/test_boolalg.py::test_commutative", "sympy/logic/tests/test_boolalg.py::test_and_associativity", "sympy/logic/tests/test_boolalg.py::test_or_assicativity", "sympy/logic/tests/test_boolalg.py::test_double_negation", "sympy/logic/tests/test_boolalg.py::test_eliminate_implications", "sympy/logic/tests/test_boolalg.py::test_conjuncts", "sympy/logic/tests/test_boolalg.py::test_disjuncts", "sympy/logic/tests/test_boolalg.py::test_distribute", "sympy/logic/tests/test_boolalg.py::test_to_nnf", "sympy/logic/tests/test_boolalg.py::test_to_cnf", "sympy/logic/tests/test_boolalg.py::test_to_dnf", "sympy/logic/tests/test_boolalg.py::test_to_int_repr", "sympy/logic/tests/test_boolalg.py::test_is_nnf", "sympy/logic/tests/test_boolalg.py::test_is_cnf", "sympy/logic/tests/test_boolalg.py::test_is_dnf", "sympy/logic/tests/test_boolalg.py::test_ITE", "sympy/logic/tests/test_boolalg.py::test_ITE_diff", "sympy/logic/tests/test_boolalg.py::test_is_literal", "sympy/logic/tests/test_boolalg.py::test_operators", "sympy/logic/tests/test_boolalg.py::test_true_false", "sympy/logic/tests/test_boolalg.py::test_bool_as_set", "sympy/logic/tests/test_boolalg.py::test_all_or_nothing", "sympy/logic/tests/test_boolalg.py::test_canonical_atoms", "sympy/logic/tests/test_boolalg.py::test_issue_8777", "sympy/logic/tests/test_boolalg.py::test_issue_8975", "sympy/logic/tests/test_boolalg.py::test_term_to_integer", "sympy/logic/tests/test_boolalg.py::test_integer_to_term", "sympy/logic/tests/test_boolalg.py::test_truth_table", "sympy/polys/tests/test_polyutils.py::test__nsort", "sympy/polys/tests/test_polyutils.py::test__sort_gens", "sympy/polys/tests/test_polyutils.py::test__unify_gens", "sympy/polys/tests/test_polyutils.py::test__analyze_gens", "sympy/polys/tests/test_polyutils.py::test__sort_factors", "sympy/polys/tests/test_polyutils.py::test__dict_from_expr_if_gens", "sympy/polys/tests/test_polyutils.py::test__dict_from_expr_no_gens", "sympy/polys/tests/test_polyutils.py::test__parallel_dict_from_expr_if_gens", "sympy/polys/tests/test_polyutils.py::test__parallel_dict_from_expr_no_gens", "sympy/polys/tests/test_polyutils.py::test_parallel_dict_from_expr", "sympy/polys/tests/test_subresultants_qq_zz.py::test_sylvester", "sympy/polys/tests/test_subresultants_qq_zz.py::test_sturm_pg", "sympy/polys/tests/test_subresultants_qq_zz.py::test_sturm_q", "sympy/polys/tests/test_subresultants_qq_zz.py::test_sturm_amv", "sympy/polys/tests/test_subresultants_qq_zz.py::test_euclid_pg", "sympy/polys/tests/test_subresultants_qq_zz.py::test_euclid_q", "sympy/polys/tests/test_subresultants_qq_zz.py::test_euclid_amv", "sympy/polys/tests/test_subresultants_qq_zz.py::test_modified_subresultants_pg", "sympy/polys/tests/test_subresultants_qq_zz.py::test_subresultants_pg", "sympy/polys/tests/test_subresultants_qq_zz.py::test_subresultants_amv_q", "sympy/polys/tests/test_subresultants_qq_zz.py::test_rem_z", "sympy/polys/tests/test_subresultants_qq_zz.py::test_quo_z", "sympy/polys/tests/test_subresultants_qq_zz.py::test_subresultants_amv", "sympy/polys/tests/test_subresultants_qq_zz.py::test_modified_subresultants_amv", "sympy/polys/tests/test_subresultants_qq_zz.py::test_subresultants_rem", "sympy/polys/tests/test_subresultants_qq_zz.py::test_subresultants_vv", "sympy/polys/tests/test_subresultants_qq_zz.py::test_subresultants_vv_2" ]
[]
BSD
429
collective__icalendar-185
6888bbe02042cd65b12a6d855b527a964a4b823b
2016-02-14 13:25:31
6888bbe02042cd65b12a6d855b527a964a4b823b
untitaker: Could be a property that checks bool value of `self.errors`. stlaz: I would rather like to see it a property dependant on self.errors, too. You could do something like: ~~~python class Component: @property def is_broken(self): return bool(self.errors) ~~~
diff --git a/CHANGES.rst b/CHANGES.rst index 5fbf217..405ebcb 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -11,6 +11,7 @@ New: Fixes: - Fix testsuite for use with ``dateutil>=2.5``. Refs #195. +- Reintroduce cal.Component.is_broken that was removed with 3.9.2 [geier] 3.9.2 (2016-02-05) diff --git a/src/icalendar/cal.py b/src/icalendar/cal.py index 9828c54..8448dba 100644 --- a/src/icalendar/cal.py +++ b/src/icalendar/cal.py @@ -106,6 +106,10 @@ class Component(CaselessDict): """ return True if not (list(self.values()) + self.subcomponents) else False # noqa + @property + def is_broken(self): + return bool(self.errors) + ############################# # handling of property values
incompatible changes in 3.9.2 With 70a7b5a16748afbf0d48ca180c2b7613fdd7e7d0 we introduced some backwards incompatible changes: * `Component.is_broken` got replaced with `Component.errors` * events with a `RDATE;VALUE=PERIOD:19970101T180000Z/19970102T070000Z,19970109T180000Z/PT5H30M` component still had an `RDATE` with `VALUE=PERIOD` param before, now they are `RDATE:None` While I do agree with both changes, I think they should have been deferred to the 4.0.0 release. Because we don't have `VALUE=PERIOD` anyway I think we can leave this one as it is, but I believe we should bring back `Component.is_broken` for the 3.9.3 release.
collective/icalendar
diff --git a/src/icalendar/tests/test_fixed_issues.py b/src/icalendar/tests/test_fixed_issues.py index ae29535..6b375d3 100644 --- a/src/icalendar/tests/test_fixed_issues.py +++ b/src/icalendar/tests/test_fixed_issues.py @@ -200,6 +200,7 @@ X END:VEVENT""" event = icalendar.Calendar.from_ical(ical_str) self.assertTrue(isinstance(event, icalendar.Event)) + self.assertTrue(event.is_broken) # REMOVE FOR NEXT MAJOR RELEASE self.assertEqual( event.errors, [(None, "Content line could not be parsed into parts: 'X': Invalid content line")] # noqa
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_git_commit_hash", "has_many_modified_files" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 0, "test_score": 2 }, "num_modified_files": 2 }
3.9
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[test]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest pytest-cov pytest-xdist pytest-mock pytest-asyncio" ], "pre_install": null, "python": "3.9", "reqs_path": [ "requirements_docs.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
alabaster==0.7.16 babel==2.17.0 certifi==2025.1.31 charset-normalizer==3.4.1 coverage==7.8.0 docutils==0.21.2 exceptiongroup==1.2.2 execnet==2.1.1 -e git+https://github.com/collective/icalendar.git@6888bbe02042cd65b12a6d855b527a964a4b823b#egg=icalendar idna==3.10 imagesize==1.4.1 importlib_metadata==8.6.1 iniconfig==2.1.0 Jinja2==3.1.6 MarkupSafe==3.0.2 packaging==24.2 pluggy==1.5.0 Pygments==2.19.1 pytest==8.3.5 pytest-asyncio==0.26.0 pytest-cov==6.0.0 pytest-mock==3.14.0 pytest-xdist==3.6.1 python-dateutil==2.9.0.post0 pytz==2025.2 requests==2.32.3 six==1.17.0 snowballstemmer==2.2.0 Sphinx==7.4.7 sphinx-rtd-theme==3.0.2 sphinxcontrib-applehelp==2.0.0 sphinxcontrib-devhelp==2.0.0 sphinxcontrib-htmlhelp==2.1.0 sphinxcontrib-jquery==4.1 sphinxcontrib-jsmath==1.0.1 sphinxcontrib-qthelp==2.0.0 sphinxcontrib-serializinghtml==2.0.0 swebench_matterhorn @ file:///swebench_matterhorn tomli==2.2.1 typing_extensions==4.13.0 urllib3==2.3.0 zipp==3.21.0
name: icalendar channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - alabaster==0.7.16 - babel==2.17.0 - certifi==2025.1.31 - charset-normalizer==3.4.1 - coverage==7.8.0 - docutils==0.21.2 - exceptiongroup==1.2.2 - execnet==2.1.1 - idna==3.10 - imagesize==1.4.1 - importlib-metadata==8.6.1 - iniconfig==2.1.0 - jinja2==3.1.6 - markupsafe==3.0.2 - packaging==24.2 - pluggy==1.5.0 - pygments==2.19.1 - pytest==8.3.5 - pytest-asyncio==0.26.0 - pytest-cov==6.0.0 - pytest-mock==3.14.0 - pytest-xdist==3.6.1 - python-dateutil==2.9.0.post0 - pytz==2025.2 - requests==2.32.3 - six==1.17.0 - snowballstemmer==2.2.0 - sphinx==7.4.7 - sphinx-rtd-theme==3.0.2 - sphinxcontrib-applehelp==2.0.0 - sphinxcontrib-devhelp==2.0.0 - sphinxcontrib-htmlhelp==2.1.0 - sphinxcontrib-jquery==4.1 - sphinxcontrib-jsmath==1.0.1 - sphinxcontrib-qthelp==2.0.0 - sphinxcontrib-serializinghtml==2.0.0 - swebench-matterhorn==0.0.0 - tomli==2.2.1 - typing-extensions==4.13.0 - urllib3==2.3.0 - zipp==3.21.0 prefix: /opt/conda/envs/icalendar
[ "src/icalendar/tests/test_fixed_issues.py::TestIssues::test_issue_104__ignore_exceptions" ]
[]
[ "src/icalendar/tests/test_fixed_issues.py::TestIssues::test_index_error_issue", "src/icalendar/tests/test_fixed_issues.py::TestIssues::test_issue_100", "src/icalendar/tests/test_fixed_issues.py::TestIssues::test_issue_101", "src/icalendar/tests/test_fixed_issues.py::TestIssues::test_issue_104__no_ignore_exceptions", "src/icalendar/tests/test_fixed_issues.py::TestIssues::test_issue_112", "src/icalendar/tests/test_fixed_issues.py::TestIssues::test_issue_116", "src/icalendar/tests/test_fixed_issues.py::TestIssues::test_issue_142", "src/icalendar/tests/test_fixed_issues.py::TestIssues::test_issue_143", "src/icalendar/tests/test_fixed_issues.py::TestIssues::test_issue_157", "src/icalendar/tests/test_fixed_issues.py::TestIssues::test_issue_168", "src/icalendar/tests/test_fixed_issues.py::TestIssues::test_issue_178", "src/icalendar/tests/test_fixed_issues.py::TestIssues::test_issue_53", "src/icalendar/tests/test_fixed_issues.py::TestIssues::test_issue_55", "src/icalendar/tests/test_fixed_issues.py::TestIssues::test_issue_58", "src/icalendar/tests/test_fixed_issues.py::TestIssues::test_issue_64", "src/icalendar/tests/test_fixed_issues.py::TestIssues::test_issue_70", "src/icalendar/tests/test_fixed_issues.py::TestIssues::test_issue_82" ]
[]
BSD License
430
sphinx-gallery__sphinx-gallery-97
e6273037ebce248851e74b96a881c85dc61fe951
2016-02-14 18:30:23
4dd2df0365bd1929ad09fa245f6ac74e61a5dd64
Titan-C: More of a design question. Now that there is non­-zero exit code on example building, does it still make sense to have a non-building example in the gallery, which is build then by travis and will always fail? GaelVaroquaux: > More of a design question. Now that there is non­-zero exit code on > example building, does it still make sense to have a non-building > example in the gallery, which is build then by travis and will always > fail? I think that it is a question left to the user of whether to fail or not the build. Titan-C: This is working quite well, I needs a more systematic test suite I believe. I have written some documentation to explain my defaults on this one It should also Fix #105 Fix #94 because first image is no longer overwritten Titan-C: Travis is unhappy for the Mayavi test case. I can't figure out why it does not work, I had solved that problem with the pyface forced update. And If I redo in a clean virtual environment it works, somehow in travis pyface does not get updated lesteve: @Titan-C you need to rebase on master unfortunately ! This is probably due to #110 which was merged. lesteve: @Titan-C still need a rebase. Titan-C: @lesteve Now is working
diff --git a/.travis.yml b/.travis.yml index 943cfde..a1b0a30 100644 --- a/.travis.yml +++ b/.travis.yml @@ -56,4 +56,5 @@ script: - if [ "$DISTRIB" == "ubuntu" ]; then python setup.py nosetests; fi - if [ "$DISTRIB" == "conda" ]; then nosetests; fi - cd doc + - make html-noplot - make html diff --git a/CHANGES.rst b/CHANGES.rst index 1675f21..1b7b378 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -6,11 +6,19 @@ git master New features '''''''''''' - +* Summary of failing examples with traceback at the end of the sphinx + build. By default the build exits with a 1 exit code if an example + has failed. A list of examples that are expected to fail can be + defined in `conf.py` and exit the build with 0 + exit code. Alternatively it is possible to exit the build as soon as + one example has failed. * Print aggregated and sorted list of computation times of all examples in the console during the build. * For examples that create multiple figures, set the thumbnail image. - +* The ``plot_gallery`` and ``abort_on_example_error`` options can now + be specified in ``sphinx_gallery_conf``. The build option (``-D`` + flag passed to ``sphinx-build``) takes precedence over the + ``sphinx_gallery_conf`` option. v0.1.2 ------ @@ -41,7 +49,7 @@ Example scripts are now available for download as IPython Notebooks `#75 <https://github.com/sphinx-gallery/sphinx-gallery/pull/75>`_ New features ------------- +'''''''''''' * Configurable filename pattern to select which example scripts are executed while building the Gallery diff --git a/README.rst b/README.rst index 9109581..b5f146e 100644 --- a/README.rst +++ b/README.rst @@ -26,7 +26,6 @@ Who uses Sphinx-Gallery * `GIMLi <http://www.pygimli.org/_examples_auto/index.html>`_ * `Nestle <http://kbarbary.github.io/nestle/examples/index.html>`_ * `pyRiemann <http://pythonhosted.org/pyriemann/auto_examples/index.html>`_ -* `scikit-image <http://scikit-image.org/docs/dev/auto_examples/>`_ Getting the package diff --git a/doc/advanced_configuration.rst b/doc/advanced_configuration.rst index d129d6b..fc01992 100644 --- a/doc/advanced_configuration.rst +++ b/doc/advanced_configuration.rst @@ -55,11 +55,11 @@ you would do: } Here, one should escape the dot ``'\.'`` as otherwise python `regular expressions`_ matches any character. Nevertheless, as -one is targetting a specific file, it is most certainly going to match the dot in the filename. +one is targeting a specific file, it is most certainly going to match the dot in the filename. Similarly, to build only examples in a specific directory, you can do: -.. code-blocK:: python +.. code-block:: python sphinx_gallery_conf = { 'filename_pattern' : '/directory/plot_' @@ -251,7 +251,13 @@ your ``Makefile`` with:: @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." Remember that for ``Makefile`` white space is significant and the indentation are tabs -and not spaces +and not spaces. + +Alternatively, you can add the ``plot_gallery`` option to the +``sphinx_gallery_conf`` dictionary inside your ``conf.py`` +configuration file to have it as a default. The highest precedence is +always given to the `-D` flag of the ``sphinx-build`` command. + Dealing with failing Gallery example scripts ============================================ @@ -267,7 +273,17 @@ failing code block. Refer to example :ref:`sphx_glr_auto_examples_plot_raise.py` to view the default behavior. -An extra functionality of Sphinx-Gallery is the early fail option. In +The build is also failed exiting with code 1 and giving you a summary +of the failed examples with their respective traceback. This way you +are aware of failing examples right after the build and can find them +easily. + +There are some additional options at your hand to deal with broken examples. + +Abort build on first fail +------------------------- + +Sphinx-Gallery provides the early fail option. In this mode the gallery build process breaks as soon as an exception occurs in the execution of the examples scripts. To activate this behavior you need to pass a flag at the build process. It can be done @@ -279,6 +295,34 @@ by including in your ``Makefile``:: @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." Remember that for ``Makefile`` white space is significant and the indentation are tabs -and not spaces +and not spaces. + +Alternatively, you can add the ``abort_on_example_error`` option to +the ``sphinx_gallery_conf`` dictionary inside your ``conf.py`` +configuration file to have it as a default. The highest precedence is +always given to the `-D` flag of the ``sphinx-build`` command. + + +Don't fail the build on exit +---------------------------- + +It might be the case that you want to keep the gallery even with +failed examples. Thus you can configure Sphinx-Gallery to allow +certain examples to fail and still exit with a 0 exit code. For this +you need to list all the examples you want to allow to fail during +build. Change your `conf.py` accordingly: + + +.. code-block:: python + + sphinx_gallery_conf = { + ... + 'expected_failing_examples': ['../examples/plot_raise.py'] + } + +Here you list the examples you allow to fail during the build process, +keep in mind to specify the full relative path from your `conf.py` to +the example script. + .. _regular expressions: https://docs.python.org/2/library/re.html diff --git a/doc/conf.py b/doc/conf.py index 788b37e..04aad13 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -32,6 +32,7 @@ import sphinx_gallery # ones. extensions = [ 'sphinx.ext.autodoc', + 'sphinx.ext.napoleon', 'sphinx.ext.autosummary', 'sphinx.ext.doctest', 'sphinx.ext.intersphinx', @@ -114,6 +115,7 @@ pygments_style = 'sphinx' # a list of builtin themes. html_theme = 'default' + def setup(app): app.add_stylesheet('theme_override.css') @@ -199,22 +201,22 @@ htmlhelp_basename = 'Sphinx-Gallerydoc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { -# The paper size ('letterpaper' or 'a4paper'). -#'papersize': 'letterpaper', + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', -# The font size ('10pt', '11pt' or '12pt'). -#'pointsize': '10pt', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', -# Additional stuff for the LaTeX preamble. -#'preamble': '', + # Additional stuff for the LaTeX preamble. + #'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - ('index', 'Sphinx-Gallery.tex', u'Sphinx-Gallery Documentation', - u'Óscar Nájera', 'manual'), + ('index', 'Sphinx-Gallery.tex', u'Sphinx-Gallery Documentation', + u'Óscar Nájera', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of @@ -257,9 +259,9 @@ man_pages = [ # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - ('index', 'Sphinx-Gallery', u'Sphinx-Gallery Documentation', - u'Óscar Nájera', 'Sphinx-Gallery', 'One line description of project.', - 'Miscellaneous'), + ('index', 'Sphinx-Gallery', u'Sphinx-Gallery Documentation', + u'Óscar Nájera', 'Sphinx-Gallery', 'One line description of project.', + 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. @@ -304,4 +306,5 @@ sphinx_gallery_conf = { 'examples_dirs': examples_dirs, 'gallery_dirs': gallery_dirs, 'find_mayavi_figures': find_mayavi_figures, - } + 'expected_failing_examples': ['../examples/plot_raise.py'] +} diff --git a/doc/index.rst b/doc/index.rst index 83ef58e..789e22d 100644 --- a/doc/index.rst +++ b/doc/index.rst @@ -55,8 +55,6 @@ Features with the :ref:`notebook_examples_syntax` -Sphinx-Gallery Show: :ref:`examples-index` ------------------------------------------- Contents: --------- @@ -73,6 +71,10 @@ Contents: auto_mayavi_examples/index changes +Sphinx-Gallery Show: :ref:`examples-index` +'''''''''''''''''''''''''''''''''''''''''' + + Indices and tables ================== diff --git a/examples/plot_quantum.py b/examples/plot_quantum.py index 527a641..18300eb 100644 --- a/examples/plot_quantum.py +++ b/examples/plot_quantum.py @@ -60,7 +60,7 @@ mu = np.linspace(0, 3, 800) for b in [10, 20, 30]: n = 2 * (np.exp(b * (mu - 1)) + np.exp(b * (2 * mu - 3))) / \ (1 + np.exp(b * (mu - 1)) * (2 + np.exp(b * (mu - 2)))) - plt.plot(mu, n, label=r"$\beta={}$".format(b)) + plt.plot(mu, n, label=r"$\beta={0}$".format(b)) plt.xlabel(r'$\mu$ ($\epsilon=1$, $U=1$)') plt.ylabel(r'$\langle N \rangle=\langle n_\uparrow \rangle+\langle n_\downarrow\rangle$') plt.legend(loc=0) diff --git a/examples/plot_raise.py b/examples/plot_raise.py index 88a05de..24e0152 100644 --- a/examples/plot_raise.py +++ b/examples/plot_raise.py @@ -10,14 +10,6 @@ image stamp. Thus allowing easy identification in the gallery display. You also get the python traceback of the failed code block """ -iae - -############################################################################### -# Sphinx gallery as it executes scripts by block will continue -# evaluating the script after exceptions, but there is no warranty -# figure ordering will continue to match block's code. Anyway when the -# script is broken, you should try to fix it first. - # Code source: Óscar Nájera # License: BSD 3 clause @@ -26,8 +18,18 @@ import matplotlib.pyplot as plt plt.pcolormesh(np.random.randn(100, 100)) +############################################################################### +# This next block will raise a NameError + +iae + +############################################################################### +# Sphinx gallery will stop executing the remaining code blocks after +# the exception has occurred in the example script. Nevertheless the +# html will still render all the example annotated text and +# code blocks, but no output will be shown. ############################################################################### -# Here is another error raising Block +# Here is another error raising block but will not be executed plt.plot('Strings are not a valid argument for the plot function') diff --git a/sphinx_gallery/_static/gallery.css b/sphinx_gallery/_static/gallery.css index f4f71ed..86cf0c4 100644 --- a/sphinx_gallery/_static/gallery.css +++ b/sphinx_gallery/_static/gallery.css @@ -1,8 +1,8 @@ /* -Sphinx-Gallery has compatible CSS to fix default sphinx themes +Sphinx-Gallery is has compatible CSS to fix default sphinx themes Tested for Sphinx 1.3.1 for all themes: default, alabaster, sphinxdoc, scrolls, agogo, traditional, nature, haiku, pyramid -Tested for Read the Docs theme 0.1.7 */ +Tested for Read the docs theme 0.1.7 */ .sphx-glr-thumbcontainer { background: #fff; border: solid #fff 1px; @@ -132,5 +132,6 @@ p.sphx-glr-signature a.reference.external { -webkit-border-radius: 5px; border-radius: 5px; padding: 3px; - font-size: 75%; + position: absolute; + right: 15px; } diff --git a/sphinx_gallery/gen_gallery.py b/sphinx_gallery/gen_gallery.py index 9b742cf..7e59381 100644 --- a/sphinx_gallery/gen_gallery.py +++ b/sphinx_gallery/gen_gallery.py @@ -12,12 +12,27 @@ when building the documentation. from __future__ import division, print_function, absolute_import +import copy import re import os from . import glr_path_static from .gen_rst import generate_dir_rst, SPHX_GLR_SIG from .docs_resolv import embed_code_links +DEFAULT_GALLERY_CONF = { + 'filename_pattern': re.escape(os.sep) + 'plot', + 'examples_dirs': os.path.join('..', 'examples'), + 'gallery_dirs': 'auto_examples', + 'mod_example_dir': os.path.join('modules', 'generated'), + 'doc_module': (), + 'reference_url': {}, + # build options + 'plot_gallery': True, + 'abort_on_example_error': False, + 'failing_examples': {}, + 'expected_failing_examples': set(), +} + def clean_gallery_out(build_dir): """Deletes images under the sphx_glr namespace in the build directory""" @@ -55,6 +70,7 @@ def generate_gallery_rst(app): except TypeError: plot_gallery = bool(app.builder.config.plot_gallery) + gallery_conf = copy.deepcopy(DEFAULT_GALLERY_CONF) gallery_conf.update(app.config.sphinx_gallery_conf) gallery_conf.update(plot_gallery=plot_gallery) gallery_conf.update( @@ -143,21 +159,68 @@ def touch_empty_backreferences(app, what, name, obj, options, lines): open(examples_path, 'w').close() -gallery_conf = { - 'filename_pattern': re.escape(os.sep) + 'plot', - 'examples_dirs': '../examples', - 'gallery_dirs': 'auto_examples', - 'mod_example_dir': os.path.join('modules', 'generated'), - 'doc_module': (), - 'reference_url': {}, -} +def sumarize_failing_examples(app, exception): + """Collects the list of falling examples during build and prints them with the traceback + + Raises ValueError if there where failing examples + """ + if exception is not None: + return + + # Under no-plot Examples are not run so nothing to summarize + if not app.config.sphinx_gallery_conf['plot_gallery']: + return + + gallery_conf = app.config.sphinx_gallery_conf + failing_examples = set(gallery_conf['failing_examples']) + expected_failing_examples = set(gallery_conf['expected_failing_examples']) + + examples_expected_to_fail = failing_examples.intersection( + expected_failing_examples) + expected_fail_msg = [] + if examples_expected_to_fail: + expected_fail_msg.append("Examples failing as expected:") + for fail_example in examples_expected_to_fail: + expected_fail_msg.append(fail_example + ' failed leaving traceback:\n' + + gallery_conf['failing_examples'][fail_example] + '\n') + print("\n".join(expected_fail_msg)) + + examples_not_expected_to_fail = failing_examples.difference( + expected_failing_examples) + fail_msgs = [] + if examples_not_expected_to_fail: + fail_msgs.append("Unexpected failing examples:") + for fail_example in examples_not_expected_to_fail: + fail_msgs.append(fail_example + ' failed leaving traceback:\n' + + gallery_conf['failing_examples'][fail_example] + '\n') + + examples_not_expected_to_pass = expected_failing_examples.difference( + failing_examples) + if examples_not_expected_to_pass: + fail_msgs.append("Examples expected to fail, but not failling:\n" + + "Please remove this examples from\n" + + "sphinx_gallery_conf['expected_failing_examples']\n" + + "in your conf.py file" + "\n".join(examples_not_expected_to_pass)) + + if fail_msgs: + raise ValueError("Here is a summary of the problems encountered when " + "running the examples\n\n" + "\n".join(fail_msgs) + + "\n" + "-" * 79) + + +def get_default_config_value(key): + def default_getter(conf): + return conf['sphinx_gallery_conf'].get(key, DEFAULT_GALLERY_CONF[key]) + return default_getter def setup(app): """Setup sphinx-gallery sphinx extension""" - app.add_config_value('plot_gallery', True, 'html') - app.add_config_value('abort_on_example_error', False, 'html') - app.add_config_value('sphinx_gallery_conf', gallery_conf, 'html') + app.add_config_value('sphinx_gallery_conf', DEFAULT_GALLERY_CONF, 'html') + for key in ['plot_gallery', 'abort_on_example_error']: + app.add_config_value(key, get_default_config_value(key), 'html') + app.add_stylesheet('gallery.css') if 'sphinx.ext.autodoc' in app._extensions: @@ -165,6 +228,7 @@ def setup(app): app.connect('builder-inited', generate_gallery_rst) + app.connect('build-finished', sumarize_failing_examples) app.connect('build-finished', embed_code_links) diff --git a/sphinx_gallery/gen_rst.py b/sphinx_gallery/gen_rst.py index b6779ad..7385662 100644 --- a/sphinx_gallery/gen_rst.py +++ b/sphinx_gallery/gen_rst.py @@ -96,6 +96,7 @@ class Tee(object): class MixedEncodingStringIO(StringIO): """Helper when both ASCII and unicode strings will be written""" + def write(self, data): if not isinstance(data, unicode): data = data.decode('utf-8') @@ -143,7 +144,7 @@ CODE_OUTPUT = u""".. rst-class:: sphx-glr-script-out SPHX_GLR_SIG = """\n.. rst-class:: sphx-glr-signature - `Generated by Sphinx-Gallery <http://sphinx-gallery.readthedocs.io>`_\n""" + `Generated by Sphinx-Gallery <http://sphinx-gallery.readthedocs.org>`_\n""" def get_docstring_and_rest(filename): @@ -239,7 +240,8 @@ def extract_thumbnail_number(text): """ Pull out the thumbnail image number specified in the docstring. """ # check whether the user has specified a specific thumbnail image - pattr = re.compile("^\s*#\s*sphinx_gallery_thumbnail_number\s*=\s*([0-9]+)\s*$", flags=re.MULTILINE) + pattr = re.compile( + r"^\s*#\s*sphinx_gallery_thumbnail_number\s*=\s*([0-9]+)\s*$", flags=re.MULTILINE) match = pattr.search(text) if match is None: @@ -250,6 +252,7 @@ def extract_thumbnail_number(text): return thumbnail_number + def extract_intro(filename): """ Extract the first paragraph of module-level docstring. max:95 char""" @@ -284,34 +287,19 @@ def get_md5sum(src_file): return src_md5 -def check_md5sum_change(src_file): - """Returns True if src_file has a different md5sum""" +def md5sum_is_current(src_file): + """Returns True if src_file has the same md5 hash as the one stored on disk""" src_md5 = get_md5sum(src_file) src_md5_file = src_file + '.md5' - src_file_changed = True if os.path.exists(src_md5_file): with open(src_md5_file, 'r') as file_checksum: ref_md5 = file_checksum.read() - if src_md5 == ref_md5: - src_file_changed = False - - if src_file_changed: - with open(src_md5_file, 'w') as file_checksum: - file_checksum.write(src_md5) - - return src_file_changed - - -def _plots_are_current(src_file, image_path): - """Test existence of image file and no change in md5sum of - example""" - has_image = os.path.exists(image_path) - src_file_changed = check_md5sum_change(src_file) + return src_md5 == ref_md5 - return has_image and not src_file_changed + return False def save_figures(image_path, fig_count, gallery_conf): @@ -323,10 +311,15 @@ def save_figures(image_path, fig_count, gallery_conf): Path where plots are saved (format string which accepts figure number) fig_count : int Previous figure number count. Figure number add from this number + gallery_conf : dict + Contains the configuration of Sphinx-Gallery Returns ------- - list of strings containing the full path to each figure + figure_list : list of str + strings containing the full path to each figure + images_rst : str + rst code to embed the images in the document """ figure_list = [] @@ -362,7 +355,18 @@ def save_figures(image_path, fig_count, gallery_conf): figure_list.append(current_fig) mlab.close(all=True) - return figure_list + # Depending on whether we have one or more figures, we're using a + # horizontal list or a single rst call to 'image'. + images_rst = "" + if len(figure_list) == 1: + figure_name = figure_list[0] + images_rst = SINGLE_IMAGE % figure_name.lstrip('/') + elif len(figure_list) > 1: + images_rst = HLIST_HEADER + for figure_name in figure_list: + images_rst += HLIST_IMAGE_TEMPLATE % figure_name.lstrip('/') + + return figure_list, images_rst def scale_image(in_fname, out_fname, max_width, max_height): @@ -410,17 +414,28 @@ def scale_image(in_fname, out_fname, max_width, max_height): generated images') -def save_thumbnail(thumbnail_image_path, base_image_name, gallery_conf): +def save_thumbnail(image_path_template, src_file, gallery_conf): """Save the thumbnail image""" + # read specification of the figure to display as thumbnail from main text + _, content = get_docstring_and_rest(src_file) + thumbnail_number = extract_thumbnail_number(content) + thumbnail_image_path = image_path_template.format(thumbnail_number) + thumb_dir = os.path.join(os.path.dirname(thumbnail_image_path), 'thumb') if not os.path.exists(thumb_dir): os.makedirs(thumb_dir) + base_image_name = os.path.splitext(os.path.basename(src_file))[0] thumb_file = os.path.join(thumb_dir, 'sphx_glr_%s_thumb.png' % base_image_name) - if os.path.exists(thumbnail_image_path): + if src_file in gallery_conf['failing_examples']: + broken_img = os.path.join(glr_path_static(), 'broken_example.png') + scale_image(broken_img, thumb_file, 200, 140) + + elif os.path.exists(thumbnail_image_path): scale_image(thumbnail_image_path, thumb_file, 400, 280) + elif not os.path.exists(thumb_file): # create something to replace the thumbnail default_thumb_file = os.path.join(glr_path_static(), 'no_image.png') @@ -475,19 +490,21 @@ def generate_dir_rst(src_dir, target_dir, gallery_conf, seen_backrefs): return fhindex, computation_times -def execute_script(code_block, example_globals, image_path, fig_count, - src_file, gallery_conf): +def execute_code_block(code_block, example_globals, + block_vars, gallery_conf): """Executes the code block of the example file""" time_elapsed = 0 stdout = '' - # We need to execute the code - print('plotting code blocks in %s' % src_file) + # If example is not suitable to run, skip executing its blocks + if not block_vars['execute_script']: + return stdout, time_elapsed plt.close('all') cwd = os.getcwd() # Redirect output to stdout and orig_stdout = sys.stdout + src_file = block_vars['src_file'] try: # First cd in the original example dir, so that any file @@ -510,62 +527,62 @@ def execute_script(code_block, example_globals, image_path, fig_count, if my_stdout: stdout = CODE_OUTPUT.format(indent(my_stdout, u' ' * 4)) os.chdir(cwd) - figure_list = save_figures(image_path, fig_count, gallery_conf) - - # Depending on whether we have one or more figures, we're using a - # horizontal list or a single rst call to 'image'. - image_list = "" - if len(figure_list) == 1: - figure_name = figure_list[0] - image_list = SINGLE_IMAGE % figure_name.lstrip('/') - elif len(figure_list) > 1: - image_list = HLIST_HEADER - for figure_name in figure_list: - image_list += HLIST_IMAGE_TEMPLATE % figure_name.lstrip('/') + fig_list, images_rst = save_figures( + block_vars['image_path'], block_vars['fig_count'], gallery_conf) + fig_num = len(fig_list) except Exception: formatted_exception = traceback.format_exc() - sys.stdout = orig_stdout # need this here so these lines don't bomb - print(80 * '_') - print('%s is not compiling:' % src_file) - print(formatted_exception) - print(80 * '_') - - figure_list = [] - image_list = codestr2rst(formatted_exception, lang='pytb') + fail_example_warning = 80 * '_' + '\n' + \ + '%s failed to execute correctly:' % src_file + \ + formatted_exception + 80 * '_' + '\n' + warnings.warn(fail_example_warning) - # Overrides the output thumbnail in the gallery for easy identification - broken_img = os.path.join(glr_path_static(), 'broken_example.png') - shutil.copyfile(broken_img, os.path.join(cwd, image_path.format(1))) - fig_count += 1 # raise count to avoid overwriting image + fig_num = 0 + images_rst = codestr2rst(formatted_exception, lang='pytb') # Breaks build on first example error # XXX This check can break during testing e.g. if you uncomment the # `raise RuntimeError` by the `my_stdout` call, maybe use `.get()`? if gallery_conf['abort_on_example_error']: raise + # Stores failing file + gallery_conf['failing_examples'][src_file] = formatted_exception + block_vars['execute_script'] = False finally: os.chdir(cwd) sys.stdout = orig_stdout print(" - time elapsed : %.2g sec" % time_elapsed) - code_output = u"\n{0}\n\n{1}\n\n".format(image_list, stdout) + code_output = u"\n{0}\n\n{1}\n\n".format(images_rst, stdout) + block_vars['fig_count'] += fig_num - return code_output, time_elapsed, fig_count + len(figure_list) + return code_output, time_elapsed def generate_file_rst(fname, target_dir, src_dir, gallery_conf): - """ Generate the rst file for a given example. + """Generate the rst file for a given example. - Returns the amout of code (in characters) of the corresponding - files. + Returns + ------- + amount_of_code : int + character count of the corresponding python script in file + time_elapsed : float + seconds required to run the script """ src_file = os.path.join(src_dir, fname) example_file = os.path.join(target_dir, fname) shutil.copyfile(src_file, example_file) + script_blocks = split_code_and_text_blocks(src_file) + amount_of_code = sum([len(bcontent) + for blabel, bcontent in script_blocks + if blabel == 'code']) + + if md5sum_is_current(example_file): + return amount_of_code, 0 image_dir = os.path.join(target_dir, 'images') if not os.path.exists(image_dir): @@ -575,80 +592,63 @@ def generate_file_rst(fname, target_dir, src_dir, gallery_conf): image_fname = 'sphx_glr_' + base_image_name + '_{0:03}.png' image_path_template = os.path.join(image_dir, image_fname) - script_blocks = split_code_and_text_blocks(example_file) - - # read specification of the figure to display as thumbnail from main text - _, content = get_docstring_and_rest(example_file) - thumbnail_number = extract_thumbnail_number(content) - - amount_of_code = sum([len(bcontent) - for blabel, bcontent in script_blocks - if blabel == 'code']) - - first_image_path = image_path_template.format(1) - if _plots_are_current(example_file, first_image_path): - return amount_of_code, 0 - - time_elapsed = 0 - ref_fname = example_file.replace(os.path.sep, '_') example_rst = """\n\n.. _sphx_glr_{0}:\n\n""".format(ref_fname) example_nb = Notebook(fname, target_dir) filename_pattern = gallery_conf.get('filename_pattern') - if re.search(filename_pattern, src_file) and gallery_conf['plot_gallery']: - example_globals = { - # A lot of examples contains 'print(__doc__)' for example in - # scikit-learn so that running the example prints some useful - # information. Because the docstring has been separated from - # the code blocks in sphinx-gallery, __doc__ is actually - # __builtin__.__doc__ in the execution context and we do not - # want to print it - '__doc__': '', - # Examples may contain if __name__ == '__main__' guards - # for in example scikit-learn if the example uses multiprocessing - '__name__': '__main__'} - - fig_count = 0 - # A simple example has two blocks: one for the - # example introduction/explanation and one for the code - is_example_notebook_like = len(script_blocks) > 2 - for blabel, bcontent in script_blocks: - if blabel == 'code': - code_output, rtime, fig_count = execute_script(bcontent, - example_globals, - image_path_template, - fig_count, - src_file, - gallery_conf) - - time_elapsed += rtime - example_nb.add_code_cell(bcontent) - - if is_example_notebook_like: - example_rst += codestr2rst(bcontent) + '\n' - example_rst += code_output - else: - example_rst += code_output - if 'sphx-glr-script-out' in code_output: - # Add some vertical space after output - example_rst += "\n\n|\n\n" - example_rst += codestr2rst(bcontent) + '\n' - - else: - example_rst += text2string(bcontent) + '\n' - example_nb.add_markdown_cell(text2string(bcontent)) - else: - for blabel, bcontent in script_blocks: - if blabel == 'code': + execute_script = re.search(filename_pattern, src_file) and gallery_conf[ + 'plot_gallery'] + example_globals = { + # A lot of examples contains 'print(__doc__)' for example in + # scikit-learn so that running the example prints some useful + # information. Because the docstring has been separated from + # the code blocks in sphinx-gallery, __doc__ is actually + # __builtin__.__doc__ in the execution context and we do not + # want to print it + '__doc__': '', + # Examples may contain if __name__ == '__main__' guards + # for in example scikit-learn if the example uses multiprocessing + '__name__': '__main__', + } + + # A simple example has two blocks: one for the + # example introduction/explanation and one for the code + is_example_notebook_like = len(script_blocks) > 2 + time_elapsed = 0 + block_vars = {'execute_script': execute_script, 'fig_count': 0, + 'image_path': image_path_template, 'src_file': src_file} + for blabel, bcontent in script_blocks: + if blabel == 'code': + code_output, rtime = execute_code_block(bcontent, + example_globals, + block_vars, + gallery_conf) + + time_elapsed += rtime + example_nb.add_code_cell(bcontent) + + if is_example_notebook_like: example_rst += codestr2rst(bcontent) + '\n' - example_nb.add_code_cell(bcontent) + example_rst += code_output else: - example_rst += bcontent + '\n' - example_nb.add_markdown_cell(text2string(bcontent)) + example_rst += code_output + if 'sphx-glr-script-out' in code_output: + # Add some vertical space after output + example_rst += "\n\n|\n\n" + example_rst += codestr2rst(bcontent) + '\n' - thumbnail_image_path = image_path_template.format(thumbnail_number) - save_thumbnail(thumbnail_image_path, base_image_name, gallery_conf) + else: + example_rst += text2string(bcontent) + '\n' + example_nb.add_markdown_cell(text2string(bcontent)) + + # Writes md5 checksum if example has build correctly + # not failed and was initially meant to run(no-plot shall not cache md5sum) + if block_vars['execute_script']: + with open(example_file + '.md5', 'w') as file_checksum: + file_checksum.write(get_md5sum(example_file)) + + save_thumbnail(image_path_template, src_file, gallery_conf) time_m, time_s = divmod(time_elapsed, 60) example_nb.save_file() diff --git a/sphinx_gallery/notebook.py b/sphinx_gallery/notebook.py index 121e945..c0ee5cd 100644 --- a/sphinx_gallery/notebook.py +++ b/sphinx_gallery/notebook.py @@ -4,7 +4,7 @@ r""" Parser for Jupyter notebooks ============================ -Class that holds the Jupyter notebook information +Class that holds the Ipython notebook information """ # Author: Óscar Nájera @@ -48,7 +48,7 @@ def ipy_notebook_skeleton(): def rst2md(text): """Converts the RST text from the examples docstrigs and comments - into markdown text for the Jupyter notebooks""" + into markdown text for the IPython notebooks""" top_heading = re.compile(r'^=+$\s^([\w\s-]+)^=+$', flags=re.M) text = re.sub(top_heading, r'# \1', text) @@ -64,7 +64,7 @@ def rst2md(text): class Notebook(object): - """Jupyter notebook object + """Ipython notebook object Constructs the file cell-by-cell and writes it at the end"""
Stop example execution as soon as one cell has an error In a large majority of cases, an error when executing one cell, is going to cause further cells to have errors. I think it just adds unnecessary noise: This is the root cause of the problem: ``` File "/home/ubuntu/nilearn/doc/sphinxext/sphinx_gallery/gen_rst.py", line 427, in execute_script exec(code_block, example_globals) File "<string>", line 3, in <module> File "/home/ubuntu/nilearn/nilearn/datasets/func.py", line 481, in fetch_adhd resume=resume, verbose=verbose) File "/home/ubuntu/nilearn/nilearn/datasets/utils.py", line 728, in _fetch_files raise IOError('Fetching aborted: ' + abort) IOError: Fetching aborted: CRC check failed 0x55f58d0f != 0xd6aafce5L ``` And this is all the additional stuff you get from trying to execute cells after the first failure: ``` Traceback (most recent call last): File "/home/ubuntu/nilearn/doc/sphinxext/sphinx_gallery/gen_rst.py", line 427, in execute_script exec(code_block, example_globals) File "<string>", line 6, in <module> NameError: name 'func_filenames' is not defined Traceback (most recent call last): File "/home/ubuntu/nilearn/doc/sphinxext/sphinx_gallery/gen_rst.py", line 427, in execute_script exec(code_block, example_globals) File "<string>", line 4, in <module> NameError: name 'components_img' is not defined Traceback (most recent call last): File "/home/ubuntu/nilearn/doc/sphinxext/sphinx_gallery/gen_rst.py", line 427, in execute_script exec(code_block, example_globals) File "<string>", line 4, in <module> NameError: name 'components_img' is not defined Traceback (most recent call last): File "/home/ubuntu/nilearn/doc/sphinxext/sphinx_gallery/gen_rst.py", line 427, in execute_script exec(code_block, example_globals) File "<string>", line 3, in <module> File "/home/ubuntu/nilearn/nilearn/datasets/func.py", line 481, in fetch_adhd resume=resume, verbose=verbose) File "/home/ubuntu/nilearn/nilearn/datasets/utils.py", line 695, in _fetch_files overwrite=overwrite) File "/home/ubuntu/nilearn/nilearn/datasets/utils.py", line 523, in _fetch_file data = url_opener.open(request) File "/usr/lib/python2.7/urllib2.py", line 404, in open response = self._open(req, data) File "/usr/lib/python2.7/urllib2.py", line 422, in _open '_open', req) File "/usr/lib/python2.7/urllib2.py", line 382, in _call_chain result = func(*args) File "/usr/lib/python2.7/urllib2.py", line 1222, in https_open return self.do_open(httplib.HTTPSConnection, req) File "/usr/lib/python2.7/urllib2.py", line 1184, in do_open raise URLError(err) URLError: <urlopen error [Errno 111] Connection refused| Error while fetching file adhd40_0010064.tgz; dataset fetching aborted.> Traceback (most recent call last): File "/home/ubuntu/nilearn/doc/sphinxext/sphinx_gallery/gen_rst.py", line 427, in execute_script exec(code_block, example_globals) File "<string>", line 7, in <module> NameError: name 'func_filenames' is not defined ```
sphinx-gallery/sphinx-gallery
diff --git a/sphinx_gallery/tests/test_gen_rst.py b/sphinx_gallery/tests/test_gen_rst.py index 787ed79..177bb7c 100644 --- a/sphinx_gallery/tests/test_gen_rst.py +++ b/sphinx_gallery/tests/test_gen_rst.py @@ -8,6 +8,7 @@ from __future__ import (division, absolute_import, print_function, unicode_literals) import ast import codecs +import copy import json import tempfile import re @@ -17,6 +18,7 @@ import shutil from nose.tools import assert_equal, assert_false, assert_true import sphinx_gallery.gen_rst as sg +from sphinx_gallery import gen_gallery from sphinx_gallery import notebook import matplotlib.pylab as plt # Import gen_rst first to enable 'Agg' backend. @@ -119,28 +121,57 @@ def test_md5sums(): file_md5 = sg.get_md5sum(f.name) # verify correct md5sum assert_equal('ea8a570e9f3afc0a7c3f2a17a48b8047', file_md5) - # True because is a new file - assert_true(sg.check_md5sum_change(f.name)) - # False because file has not changed since last check - assert_false(sg.check_md5sum_change(f.name)) - + # False because is a new file + assert_false(sg.md5sum_is_current(f.name)) + # Write md5sum to file to check is current + with open(f.name + '.md5', 'w') as file_checksum: + file_checksum.write(file_md5) + assert_true(sg.md5sum_is_current(f.name)) os.remove(f.name + '.md5') +def build_test_configuration(**kwargs): + """Sets up a test sphinx-gallery configuration""" + + gallery_conf = copy.deepcopy(gen_gallery.DEFAULT_GALLERY_CONF) + gallery_conf.update(examples_dir=tempfile.mkdtemp(), + gallery_dir=tempfile.mkdtemp()) + gallery_conf.update(kwargs) + + return gallery_conf + + +def test_fail_example(): + """Test that failing examples are only executed until failing block""" + + gallery_conf = build_test_configuration(filename_pattern='raise.py') + + failing_code = CONTENT + ['#' * 79, + 'First_test_fail', '#' * 79, 'second_fail'] + + with codecs.open(os.path.join(gallery_conf['examples_dir'], 'raise.py'), + mode='w', encoding='utf-8') as f: + f.write('\n'.join(failing_code)) + + sg.generate_file_rst('raise.py', gallery_conf['gallery_dir'], + gallery_conf['examples_dir'], gallery_conf) + + # read rst file and check if it contains traceback output + + with codecs.open(os.path.join(gallery_conf['gallery_dir'], 'raise.rst'), + mode='r', encoding='utf-8') as f: + ex_failing_blocks = f.read().count('pytb') + if ex_failing_blocks == 0: + raise ValueError('Did not run into errors in bad code') + elif ex_failing_blocks > 1: + raise ValueError('Did not stop executing script after error') + + def test_pattern_matching(): """Test if only examples matching pattern are executed""" - examples_dir = tempfile.mkdtemp() - gallery_dir = tempfile.mkdtemp() - - gallery_conf = { - 'filename_pattern': re.escape(os.sep) + 'plot_0', - 'examples_dirs': examples_dir, - 'gallery_dirs': gallery_dir, - 'plot_gallery': True, - 'mod_example_dir': 'modules/generated', - 'doc_module': (), - 'reference_url': {}, - } + + gallery_conf = build_test_configuration( + filename_pattern=re.escape(os.sep) + 'plot_0') code_output = ('\n Out::\n' '\n' @@ -151,18 +182,19 @@ def test_pattern_matching(): # create three files in tempdir (only one matches the pattern) fnames = ['plot_0.py', 'plot_1.py', 'plot_2.py'] for fname in fnames: - with codecs.open(os.path.join(examples_dir, fname), mode='w', - encoding='utf-8') as f: + with codecs.open(os.path.join(gallery_conf['examples_dir'], fname), + mode='w', encoding='utf-8') as f: f.write('\n'.join(CONTENT)) # generate rst file - sg.generate_file_rst(fname, gallery_dir, examples_dir, gallery_conf) + sg.generate_file_rst(fname, gallery_conf['gallery_dir'], + gallery_conf['examples_dir'], gallery_conf) # read rst file and check if it contains code output rst_fname = os.path.splitext(fname)[0] + '.rst' - with codecs.open(os.path.join(gallery_dir, rst_fname), + with codecs.open(os.path.join(gallery_conf['gallery_dir'], rst_fname), mode='r', encoding='utf-8') as f: rst = f.read() if re.search(gallery_conf['filename_pattern'], - os.path.join(gallery_dir, rst_fname)): + os.path.join(gallery_conf['gallery_dir'], rst_fname)): assert_true(code_output in rst) else: assert_false(code_output in rst) @@ -185,6 +217,7 @@ def test_ipy_notebook(): f.flush() assert_equal(json.load(f), example_nb.work_notebook) + def test_thumbnail_number(): # which plot to show as the thumbnail image for test_str in ['# sphinx_gallery_thumbnail_number= 2', @@ -199,6 +232,7 @@ def test_thumbnail_number(): thumbnail_number = sg.extract_thumbnail_number(content) assert_equal(thumbnail_number, 2) + def test_save_figures(): """Test file naming when saving figures. Requires mayavi.""" try: @@ -212,16 +246,21 @@ def test_save_figures(): mlab.test_plot3d() plt.plot(1, 1) fname_template = os.path.join(examples_dir, 'image{0}.png') - fig_list = sg.save_figures(fname_template, 0, gallery_conf) + fig_list, _ = sg.save_figures(fname_template, 0, gallery_conf) assert_equal(len(fig_list), 2) assert fig_list[0].endswith('image1.png') assert fig_list[1].endswith('image2.png') mlab.test_plot3d() plt.plot(1, 1) - fig_list = sg.save_figures(fname_template, 2, gallery_conf) + fig_list, _ = sg.save_figures(fname_template, 2, gallery_conf) assert_equal(len(fig_list), 2) assert fig_list[0].endswith('image3.png') assert fig_list[1].endswith('image4.png') shutil.rmtree(examples_dir) + +# TODO: test that broken thumbnail does appear when needed +# TODO: test that examples are not executed twice +# TODO: test that examples are executed after a no-plot and produce +# the correct image in the thumbnail
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 1, "test_score": 2 }, "num_modified_files": 12 }
0.1
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": null, "python": "3.6", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
alabaster==0.7.13 attrs==22.2.0 Babel==2.11.0 certifi==2021.5.30 charset-normalizer==2.0.12 cycler==0.11.0 docutils==0.18.1 idna==3.10 imagesize==1.4.1 importlib-metadata==4.8.3 iniconfig==1.1.1 Jinja2==3.0.3 kiwisolver==1.3.1 MarkupSafe==2.0.1 matplotlib==3.3.4 nose==1.3.7 numpy==1.19.5 packaging==21.3 Pillow==8.4.0 pluggy==1.0.0 py==1.11.0 Pygments==2.14.0 pyparsing==3.1.4 pytest==7.0.1 python-dateutil==2.9.0.post0 pytz==2025.2 requests==2.27.1 six==1.17.0 snowballstemmer==2.2.0 Sphinx==5.3.0 -e git+https://github.com/sphinx-gallery/sphinx-gallery.git@e6273037ebce248851e74b96a881c85dc61fe951#egg=sphinx_gallery sphinxcontrib-applehelp==1.0.2 sphinxcontrib-devhelp==1.0.2 sphinxcontrib-htmlhelp==2.0.0 sphinxcontrib-jsmath==1.0.1 sphinxcontrib-qthelp==1.0.3 sphinxcontrib-serializinghtml==1.1.5 tomli==1.2.3 typing_extensions==4.1.1 urllib3==1.26.20 zipp==3.6.0
name: sphinx-gallery channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - alabaster==0.7.13 - attrs==22.2.0 - babel==2.11.0 - charset-normalizer==2.0.12 - cycler==0.11.0 - docutils==0.18.1 - idna==3.10 - imagesize==1.4.1 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - jinja2==3.0.3 - kiwisolver==1.3.1 - markupsafe==2.0.1 - matplotlib==3.3.4 - nose==1.3.7 - numpy==1.19.5 - packaging==21.3 - pillow==8.4.0 - pluggy==1.0.0 - py==1.11.0 - pygments==2.14.0 - pyparsing==3.1.4 - pytest==7.0.1 - python-dateutil==2.9.0.post0 - pytz==2025.2 - requests==2.27.1 - six==1.17.0 - snowballstemmer==2.2.0 - sphinx==5.3.0 - sphinxcontrib-applehelp==1.0.2 - sphinxcontrib-devhelp==1.0.2 - sphinxcontrib-htmlhelp==2.0.0 - sphinxcontrib-jsmath==1.0.1 - sphinxcontrib-qthelp==1.0.3 - sphinxcontrib-serializinghtml==1.1.5 - tomli==1.2.3 - typing-extensions==4.1.1 - urllib3==1.26.20 - zipp==3.6.0 prefix: /opt/conda/envs/sphinx-gallery
[ "sphinx_gallery/tests/test_gen_rst.py::test_md5sums", "sphinx_gallery/tests/test_gen_rst.py::test_fail_example", "sphinx_gallery/tests/test_gen_rst.py::test_pattern_matching" ]
[]
[ "sphinx_gallery/tests/test_gen_rst.py::test_split_code_and_text_blocks", "sphinx_gallery/tests/test_gen_rst.py::test_bug_cases_of_notebook_syntax", "sphinx_gallery/tests/test_gen_rst.py::test_direct_comment_after_docstring", "sphinx_gallery/tests/test_gen_rst.py::test_codestr2rst", "sphinx_gallery/tests/test_gen_rst.py::test_extract_intro", "sphinx_gallery/tests/test_gen_rst.py::test_ipy_notebook", "sphinx_gallery/tests/test_gen_rst.py::test_thumbnail_number" ]
[]
BSD 3-Clause "New" or "Revised" License
431
scopely-devops__skew-92
a379c1344b8cb630a5f9e9e8ff5e7ab0f7423df0
2016-02-15 14:16:07
73848f12aeb8c630a93a3b125c7c0a716d972b57
diff --git a/skew/resources/resource.py b/skew/resources/resource.py index 93bbdf9..92514cd 100644 --- a/skew/resources/resource.py +++ b/skew/resources/resource.py @@ -17,6 +17,8 @@ import jmespath import skew.awsclient +from botocore.exceptions import ClientError + LOG = logging.getLogger(__name__) @@ -46,7 +48,13 @@ class Resource(object): if extra_args: kwargs.update(extra_args) LOG.debug('enum_op=%s' % enum_op) - data = client.call(enum_op, query=path, **kwargs) + try: + data = client.call(enum_op, query=path, **kwargs) + except ClientError as e: + data = {} + # if the error is because the resource was not found, be quiet + if 'NotFound' not in e.response['Error']['Code']: + raise LOG.debug(data) resources = [] if data:
Wildcard searches for ec2 instances result in exception In version 0.16.1, when running a `skew.scan()` for a wildcard account and or region the following error is encountered: ``` raise ClientError(parsed_response, operation_name) botocore.exceptions.ClientError: An error occurred (InvalidInstanceID.NotFound) when calling the DescribeInstances operation: The instance ID <SNIP INSTANCE ID>' does not exist ``` The call for this looks like: `skew.scan('arn:aws:ec2:*:*:instance/<SPECIFIC INSTANCE ID>')` If I call this with the specific region and instance id, then the query succeeds. The full stack trace after sniping out my code lines: ``` #!/usr/bin/env python (...) File "/usr/local/lib/python2.7/dist-packages/skew/arn/__init__.py", line 319, in __iter__ for scheme in self.scheme.enumerate(context, **self.kwargs): File "/usr/local/lib/python2.7/dist-packages/skew/arn/__init__.py", line 244, in enumerate context, **kwargs): File "/usr/local/lib/python2.7/dist-packages/skew/arn/__init__.py", line 229, in enumerate context, **kwargs): File "/usr/local/lib/python2.7/dist-packages/skew/arn/__init__.py", line 214, in enumerate context, **kwargs): File "/usr/local/lib/python2.7/dist-packages/skew/arn/__init__.py", line 195, in enumerate context, **kwargs): File "/usr/local/lib/python2.7/dist-packages/skew/arn/__init__.py", line 146, in enumerate context, **kwargs): File "/usr/local/lib/python2.7/dist-packages/skew/arn/__init__.py", line 128, in enumerate self._arn, region, account, resource_id, **kwargs)) File "/usr/local/lib/python2.7/dist-packages/skew/resources/resource.py", line 49, in enumerate data = client.call(enum_op, query=path, **kwargs) File "/usr/local/lib/python2.7/dist-packages/skew/awsclient.py", line 116, in call data = results.build_full_result() File "/usr/local/lib/python2.7/dist-packages/botocore/paginate.py", line 271, in build_full_result for response in self: File "/usr/local/lib/python2.7/dist-packages/botocore/paginate.py", line 85, in __iter__ response = self._make_request(current_kwargs) File "/usr/local/lib/python2.7/dist-packages/botocore/paginate.py", line 157, in _make_request return self._method(**current_kwargs) File "/usr/local/lib/python2.7/dist-packages/botocore/client.py", line 310, in _api_call return self._make_api_call(operation_name, kwargs) File "/usr/local/lib/python2.7/dist-packages/botocore/client.py", line 407, in _make_api_call raise ClientError(parsed_response, operation_name) botocore.exceptions.ClientError: An error occurred (InvalidInstanceID.NotFound) when calling the DescribeInstances operation: The instance ID ' <SNIP INSTANCE ID>' does not exist ```
scopely-devops/skew
diff --git a/tests/unit/responses/instances_3/ec2.DescribeInstances_1.json b/tests/unit/responses/instances_3/ec2.DescribeInstances_1.json new file mode 100644 index 0000000..b8d59f6 --- /dev/null +++ b/tests/unit/responses/instances_3/ec2.DescribeInstances_1.json @@ -0,0 +1,13 @@ +{ + "status_code": 400, + "data": { + "ResponseMetadata": { + "HTTPStatusCode": 400, + "RequestId": "c54d7e0e-ccfc-4a93-a2e5-862de7716e5d" + }, + "Error": { + "Message": "The instance ID 'i-eedf6728' does not exist", + "Code": "InvalidInstanceID.NotFound" + } + } +} \ No newline at end of file diff --git a/tests/unit/test_arn.py b/tests/unit/test_arn.py index fcbd8b4..8522ffd 100644 --- a/tests/unit/test_arn.py +++ b/tests/unit/test_arn.py @@ -67,6 +67,17 @@ class TestARN(unittest.TestCase): r = l[0] self.assertEqual(r.filtered_data, 't2.small') + def test_ec2_instance_not_found(self): + placebo_cfg = { + 'placebo': placebo, + 'placebo_dir': self._get_response_path('instances_3'), + 'placebo_mode': 'playback'} + arn = scan('arn:aws:ec2:us-west-2:123456789012:instance/i-87654321', + **placebo_cfg) + # Fetch all Instance resources + l = list(arn) + self.assertEqual(len(l), 0) + def test_ec2_volumes(self): placebo_cfg = { 'placebo': placebo,
{ "commit_name": "head_commit", "failed_lite_validators": [], "has_test_patch": true, "is_lite": true, "llm_score": { "difficulty_score": 0, "issue_text_score": 0, "test_score": 2 }, "num_modified_files": 1 }
0.16
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "nose", "mock", "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.9", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
boto3==1.37.23 botocore==1.37.23 distlib==0.3.9 exceptiongroup==1.2.2 filelock==3.18.0 iniconfig==2.1.0 jmespath==1.0.1 mock==1.0.1 nose==1.3.4 packaging==24.2 placebo==0.4.3 platformdirs==4.3.7 pluggy==1.5.0 py==1.11.0 pytest==8.3.5 python-dateutil==2.9.0.post0 PyYAML==3.11 s3transfer==0.11.4 six==1.17.0 -e git+https://github.com/scopely-devops/skew.git@a379c1344b8cb630a5f9e9e8ff5e7ab0f7423df0#egg=skew tomli==2.2.1 tox==1.8.1 urllib3==1.26.20 virtualenv==20.29.3
name: skew channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - boto3==1.37.23 - botocore==1.37.23 - distlib==0.3.9 - exceptiongroup==1.2.2 - filelock==3.18.0 - iniconfig==2.1.0 - jmespath==1.0.1 - mock==1.0.1 - nose==1.3.4 - packaging==24.2 - placebo==0.4.3 - platformdirs==4.3.7 - pluggy==1.5.0 - py==1.11.0 - pytest==8.3.5 - python-dateutil==2.9.0.post0 - pyyaml==3.11 - s3transfer==0.11.4 - six==1.17.0 - tomli==2.2.1 - tox==1.8.1 - urllib3==1.26.20 - virtualenv==20.29.3 prefix: /opt/conda/envs/skew
[ "tests/unit/test_arn.py::TestARN::test_ec2_instance_not_found" ]
[]
[ "tests/unit/test_arn.py::TestARN::test_cloudformation_stacks", "tests/unit/test_arn.py::TestARN::test_ec2", "tests/unit/test_arn.py::TestARN::test_ec2_keypairs", "tests/unit/test_arn.py::TestARN::test_ec2_network_acls", "tests/unit/test_arn.py::TestARN::test_ec2_routetable", "tests/unit/test_arn.py::TestARN::test_ec2_securitygroup", "tests/unit/test_arn.py::TestARN::test_ec2_volumes", "tests/unit/test_arn.py::TestARN::test_ec2_vpcs", "tests/unit/test_arn.py::TestARN::test_elb_loadbalancer", "tests/unit/test_arn.py::TestARN::test_iam_groups", "tests/unit/test_arn.py::TestARN::test_iam_users", "tests/unit/test_arn.py::TestARN::test_s3_buckets" ]
[]
Apache License 2.0
432
docker__docker-py-942
c3a66cc5999a5435b81769ac758d411d34c995c4
2016-02-15 19:45:01
4c34be5d4ab8a5a017950712e9c96b56d78d1c58
dnephin: Looks good, just one consideration for backwards compatibility.
diff --git a/docker/api/container.py b/docker/api/container.py index ceac173f..8aa9aa2c 100644 --- a/docker/api/container.py +++ b/docker/api/container.py @@ -193,12 +193,14 @@ class ContainerApiMixin(object): @utils.check_resource def logs(self, container, stdout=True, stderr=True, stream=False, - timestamps=False, tail='all', since=None): + timestamps=False, tail='all', since=None, follow=None): if utils.compare_version('1.11', self._version) >= 0: + if follow is None: + follow = stream params = {'stderr': stderr and 1 or 0, 'stdout': stdout and 1 or 0, 'timestamps': timestamps and 1 or 0, - 'follow': stream and 1 or 0, + 'follow': follow and 1 or 0, } if utils.compare_version('1.13', self._version) >= 0: if tail != 'all' and (not isinstance(tail, int) or tail < 0): diff --git a/docs/api.md b/docs/api.md index 00ccabca..32952bf3 100644 --- a/docs/api.md +++ b/docs/api.md @@ -677,6 +677,7 @@ output as it happens. * timestamps (bool): Show timestamps * tail (str or int): Output specified number of lines at the end of logs: `"all"` or `number`. Default `"all"` * since (datetime or int): Show logs since a given datetime or integer epoch (in seconds) +* follow (bool): Follow log output **Returns** (generator or str):
logs() separate param for stream and follow From: https://github.com/docker/compose/pull/2720/files#r52222296 Current the `follow` param is set based on `stream` I think `follow=True` does imply `stream=True`, but `stream=True` doesn't imply `follow=True`, you may still want to stream without following.
docker/docker-py
diff --git a/tests/integration/container_test.py b/tests/integration/container_test.py index 1714599b..142299d3 100644 --- a/tests/integration/container_test.py +++ b/tests/integration/container_test.py @@ -666,7 +666,7 @@ Line2''' logs = self.client.logs(id, tail=1) self.assertEqual(logs, 'Line2\n'.encode(encoding='ascii')) - def test_logs_streaming(self): + def test_logs_streaming_and_follow(self): snippet = 'Flowering Nights (Sakuya Iyazoi)' container = self.client.create_container( BUSYBOX, 'echo {0}'.format(snippet) @@ -675,7 +675,7 @@ Line2''' self.tmp_containers.append(id) self.client.start(id) logs = six.binary_type() - for chunk in self.client.logs(id, stream=True): + for chunk in self.client.logs(id, stream=True, follow=True): logs += chunk exitcode = self.client.wait(id) diff --git a/tests/unit/container_test.py b/tests/unit/container_test.py index c2b25734..d66eeede 100644 --- a/tests/unit/container_test.py +++ b/tests/unit/container_test.py @@ -1119,6 +1119,36 @@ class ContainerTest(DockerClientTest): ) def test_log_streaming(self): + with mock.patch('docker.Client.inspect_container', + fake_inspect_container): + self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=True, + follow=False) + + fake_request.assert_called_with( + 'GET', + url_prefix + 'containers/3cc2351ab11b/logs', + params={'timestamps': 0, 'follow': 0, 'stderr': 1, 'stdout': 1, + 'tail': 'all'}, + timeout=DEFAULT_TIMEOUT_SECONDS, + stream=True + ) + + def test_log_following(self): + with mock.patch('docker.Client.inspect_container', + fake_inspect_container): + self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=False, + follow=True) + + fake_request.assert_called_with( + 'GET', + url_prefix + 'containers/3cc2351ab11b/logs', + params={'timestamps': 0, 'follow': 1, 'stderr': 1, 'stdout': 1, + 'tail': 'all'}, + timeout=DEFAULT_TIMEOUT_SECONDS, + stream=False + ) + + def test_log_following_backwards(self): with mock.patch('docker.Client.inspect_container', fake_inspect_container): self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=True) @@ -1132,12 +1162,27 @@ class ContainerTest(DockerClientTest): stream=True ) + def test_log_streaming_and_following(self): + with mock.patch('docker.Client.inspect_container', + fake_inspect_container): + self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=True, + follow=True) + + fake_request.assert_called_with( + 'GET', + url_prefix + 'containers/3cc2351ab11b/logs', + params={'timestamps': 0, 'follow': 1, 'stderr': 1, 'stdout': 1, + 'tail': 'all'}, + timeout=DEFAULT_TIMEOUT_SECONDS, + stream=True + ) + def test_log_tail(self): with mock.patch('docker.Client.inspect_container', fake_inspect_container): self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=False, - tail=10) + follow=False, tail=10) fake_request.assert_called_with( 'GET', @@ -1153,7 +1198,7 @@ class ContainerTest(DockerClientTest): with mock.patch('docker.Client.inspect_container', fake_inspect_container): self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=False, - since=ts) + follow=False, since=ts) fake_request.assert_called_with( 'GET', @@ -1170,7 +1215,7 @@ class ContainerTest(DockerClientTest): with mock.patch('docker.Client.inspect_container', fake_inspect_container): self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=False, - since=time) + follow=False, since=time) fake_request.assert_called_with( 'GET', @@ -1188,7 +1233,7 @@ class ContainerTest(DockerClientTest): with mock.patch('docker.Client._stream_raw_result', m): self.client.logs(fake_api.FAKE_CONTAINER_ID, - stream=True) + follow=True, stream=True) self.assertTrue(m.called) fake_request.assert_called_with(
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_hyperlinks", "has_many_modified_files" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 1, "test_score": 0 }, "num_modified_files": 2 }
1.7
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.9", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
-e git+https://github.com/docker/docker-py.git@c3a66cc5999a5435b81769ac758d411d34c995c4#egg=docker_py exceptiongroup==1.2.2 iniconfig==2.1.0 packaging==24.2 pluggy==1.5.0 pytest==8.3.5 requests==2.5.3 six==1.17.0 tomli==2.2.1 websocket_client==0.32.0
name: docker-py channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - exceptiongroup==1.2.2 - iniconfig==2.1.0 - packaging==24.2 - pluggy==1.5.0 - pytest==8.3.5 - requests==2.5.3 - six==1.17.0 - tomli==2.2.1 - websocket-client==0.32.0 prefix: /opt/conda/envs/docker-py
[ "tests/unit/container_test.py::ContainerTest::test_log_following", "tests/unit/container_test.py::ContainerTest::test_log_since", "tests/unit/container_test.py::ContainerTest::test_log_since_with_datetime", "tests/unit/container_test.py::ContainerTest::test_log_streaming", "tests/unit/container_test.py::ContainerTest::test_log_streaming_and_following", "tests/unit/container_test.py::ContainerTest::test_log_tail", "tests/unit/container_test.py::ContainerTest::test_log_tty" ]
[]
[ "tests/unit/container_test.py::StartContainerTest::test_start_container", "tests/unit/container_test.py::StartContainerTest::test_start_container_none", "tests/unit/container_test.py::StartContainerTest::test_start_container_privileged", "tests/unit/container_test.py::StartContainerTest::test_start_container_regression_573", "tests/unit/container_test.py::StartContainerTest::test_start_container_with_binds_ro", "tests/unit/container_test.py::StartContainerTest::test_start_container_with_binds_rw", "tests/unit/container_test.py::StartContainerTest::test_start_container_with_dict_instead_of_id", "tests/unit/container_test.py::StartContainerTest::test_start_container_with_links", "tests/unit/container_test.py::StartContainerTest::test_start_container_with_links_as_list_of_tuples", "tests/unit/container_test.py::StartContainerTest::test_start_container_with_lxc_conf", "tests/unit/container_test.py::StartContainerTest::test_start_container_with_lxc_conf_compat", "tests/unit/container_test.py::StartContainerTest::test_start_container_with_multiple_links", "tests/unit/container_test.py::StartContainerTest::test_start_container_with_port_binds", "tests/unit/container_test.py::CreateContainerTest::test_create_container", "tests/unit/container_test.py::CreateContainerTest::test_create_container_empty_volumes_from", "tests/unit/container_test.py::CreateContainerTest::test_create_container_privileged", "tests/unit/container_test.py::CreateContainerTest::test_create_container_with_added_capabilities", "tests/unit/container_test.py::CreateContainerTest::test_create_container_with_binds", "tests/unit/container_test.py::CreateContainerTest::test_create_container_with_binds_list", "tests/unit/container_test.py::CreateContainerTest::test_create_container_with_binds_mode", "tests/unit/container_test.py::CreateContainerTest::test_create_container_with_binds_mode_and_ro_error", "tests/unit/container_test.py::CreateContainerTest::test_create_container_with_binds_ro", "tests/unit/container_test.py::CreateContainerTest::test_create_container_with_binds_rw", "tests/unit/container_test.py::CreateContainerTest::test_create_container_with_cgroup_parent", "tests/unit/container_test.py::CreateContainerTest::test_create_container_with_cpu_shares", "tests/unit/container_test.py::CreateContainerTest::test_create_container_with_cpuset", "tests/unit/container_test.py::CreateContainerTest::test_create_container_with_devices", "tests/unit/container_test.py::CreateContainerTest::test_create_container_with_dropped_capabilities", "tests/unit/container_test.py::CreateContainerTest::test_create_container_with_entrypoint", "tests/unit/container_test.py::CreateContainerTest::test_create_container_with_labels_dict", "tests/unit/container_test.py::CreateContainerTest::test_create_container_with_labels_list", "tests/unit/container_test.py::CreateContainerTest::test_create_container_with_links", "tests/unit/container_test.py::CreateContainerTest::test_create_container_with_links_as_list_of_tuples", "tests/unit/container_test.py::CreateContainerTest::test_create_container_with_lxc_conf", "tests/unit/container_test.py::CreateContainerTest::test_create_container_with_lxc_conf_compat", "tests/unit/container_test.py::CreateContainerTest::test_create_container_with_mac_address", "tests/unit/container_test.py::CreateContainerTest::test_create_container_with_mem_limit_as_int", "tests/unit/container_test.py::CreateContainerTest::test_create_container_with_mem_limit_as_string", "tests/unit/container_test.py::CreateContainerTest::test_create_container_with_mem_limit_as_string_with_g_unit", "tests/unit/container_test.py::CreateContainerTest::test_create_container_with_mem_limit_as_string_with_k_unit", "tests/unit/container_test.py::CreateContainerTest::test_create_container_with_mem_limit_as_string_with_m_unit", "tests/unit/container_test.py::CreateContainerTest::test_create_container_with_mem_limit_as_string_with_wrong_value", "tests/unit/container_test.py::CreateContainerTest::test_create_container_with_multiple_links", "tests/unit/container_test.py::CreateContainerTest::test_create_container_with_named_volume", "tests/unit/container_test.py::CreateContainerTest::test_create_container_with_port_binds", "tests/unit/container_test.py::CreateContainerTest::test_create_container_with_ports", "tests/unit/container_test.py::CreateContainerTest::test_create_container_with_restart_policy", "tests/unit/container_test.py::CreateContainerTest::test_create_container_with_stdin_open", "tests/unit/container_test.py::CreateContainerTest::test_create_container_with_stop_signal", "tests/unit/container_test.py::CreateContainerTest::test_create_container_with_volume_string", "tests/unit/container_test.py::CreateContainerTest::test_create_container_with_volumes_from", "tests/unit/container_test.py::CreateContainerTest::test_create_container_with_working_dir", "tests/unit/container_test.py::CreateContainerTest::test_create_named_container", "tests/unit/container_test.py::ContainerTest::test_container_stats", "tests/unit/container_test.py::ContainerTest::test_container_top", "tests/unit/container_test.py::ContainerTest::test_container_top_with_psargs", "tests/unit/container_test.py::ContainerTest::test_diff", "tests/unit/container_test.py::ContainerTest::test_diff_with_dict_instead_of_id", "tests/unit/container_test.py::ContainerTest::test_export", "tests/unit/container_test.py::ContainerTest::test_export_with_dict_instead_of_id", "tests/unit/container_test.py::ContainerTest::test_inspect_container", "tests/unit/container_test.py::ContainerTest::test_inspect_container_undefined_id", "tests/unit/container_test.py::ContainerTest::test_kill_container", "tests/unit/container_test.py::ContainerTest::test_kill_container_with_dict_instead_of_id", "tests/unit/container_test.py::ContainerTest::test_kill_container_with_signal", "tests/unit/container_test.py::ContainerTest::test_list_containers", "tests/unit/container_test.py::ContainerTest::test_log_following_backwards", "tests/unit/container_test.py::ContainerTest::test_logs", "tests/unit/container_test.py::ContainerTest::test_logs_with_dict_instead_of_id", "tests/unit/container_test.py::ContainerTest::test_pause_container", "tests/unit/container_test.py::ContainerTest::test_port", "tests/unit/container_test.py::ContainerTest::test_remove_container", "tests/unit/container_test.py::ContainerTest::test_remove_container_with_dict_instead_of_id", "tests/unit/container_test.py::ContainerTest::test_rename_container", "tests/unit/container_test.py::ContainerTest::test_resize_container", "tests/unit/container_test.py::ContainerTest::test_restart_container", "tests/unit/container_test.py::ContainerTest::test_restart_container_with_dict_instead_of_id", "tests/unit/container_test.py::ContainerTest::test_stop_container", "tests/unit/container_test.py::ContainerTest::test_stop_container_with_dict_instead_of_id", "tests/unit/container_test.py::ContainerTest::test_unpause_container", "tests/unit/container_test.py::ContainerTest::test_wait", "tests/unit/container_test.py::ContainerTest::test_wait_with_dict_instead_of_id" ]
[]
Apache License 2.0
433
harlowja__fasteners-23
8b63aafd5a9cde3e506810b5df52174d016edd2d
2016-02-15 22:24:40
8b63aafd5a9cde3e506810b5df52174d016edd2d
diff --git a/fasteners/process_lock.py b/fasteners/process_lock.py index b5b7405..72e4f4d 100644 --- a/fasteners/process_lock.py +++ b/fasteners/process_lock.py @@ -214,30 +214,44 @@ class _InterProcessLock(object): return os.path.exists(self.path) def trylock(self): - raise NotImplementedError() + self._trylock(self.lockfile) def unlock(self): + self._unlock(self.lockfile) + + @staticmethod + def _trylock(): + raise NotImplementedError() + + @staticmethod + def _unlock(): raise NotImplementedError() class _WindowsLock(_InterProcessLock): """Interprocess lock implementation that works on windows systems.""" - def trylock(self): - msvcrt.locking(self.lockfile.fileno(), msvcrt.LK_NBLCK, 1) + @staticmethod + def _trylock(lockfile): + fileno = lockfile.fileno() + msvcrt.locking(fileno, msvcrt.LK_NBLCK, 1) - def unlock(self): - msvcrt.locking(self.lockfile.fileno(), msvcrt.LK_UNLCK, 1) + @staticmethod + def _unlock(lockfile): + fileno = lockfile.fileno() + msvcrt.locking(fileno, msvcrt.LK_UNLCK, 1) class _FcntlLock(_InterProcessLock): """Interprocess lock implementation that works on posix systems.""" - def trylock(self): - fcntl.lockf(self.lockfile, fcntl.LOCK_EX | fcntl.LOCK_NB) + @staticmethod + def _trylock(lockfile): + fcntl.lockf(lockfile, fcntl.LOCK_EX | fcntl.LOCK_NB) - def unlock(self): - fcntl.lockf(self.lockfile, fcntl.LOCK_UN) + @staticmethod + def _unlock(lockfile): + fcntl.lockf(lockfile, fcntl.LOCK_UN) if os.name == 'nt':
Process Lock tests assume POSIX The `test_process_lock` module assumes we're on a POSIX system. It *looks* like it can be fixed pretty easily, but I really don't know much about the details of locking on various platforms. Here we import `fcntl`: https://github.com/harlowja/fasteners/blob/master/fasteners/tests/test_process_lock.py#L19 And here we use it: https://github.com/harlowja/fasteners/blob/master/fasteners/tests/test_process_lock.py#L127 And here it looks like we could be using this instead: https://github.com/harlowja/fasteners/blob/master/fasteners/process_lock.py#L227
harlowja/fasteners
diff --git a/fasteners/tests/test_process_lock.py b/fasteners/tests/test_process_lock.py index 9e96589..d31632d 100644 --- a/fasteners/tests/test_process_lock.py +++ b/fasteners/tests/test_process_lock.py @@ -15,12 +15,12 @@ # License for the specific language governing permissions and limitations # under the License. +import contextlib import errno -import fcntl import multiprocessing import os import shutil -import signal +import sys import tempfile import threading import time @@ -28,6 +28,8 @@ import time from fasteners import process_lock as pl from fasteners import test +WIN32 = os.name == 'nt' + class BrokenLock(pl.InterProcessLock): def __init__(self, name, errno_code): @@ -43,6 +45,87 @@ class BrokenLock(pl.InterProcessLock): raise err [email protected] +def scoped_child_processes(children, timeout=0.1, exitcode=0): + for child in children: + child.daemon = True + child.start() + yield + start = time.time() + timed_out = 0 + + for child in children: + child.join(max(timeout - (time.time() - start), 0)) + if child.is_alive(): + timed_out += 1 + child.terminate() + + if timed_out: + msg = "{} child processes killed due to timeout\n".format(timed_out) + sys.stderr.write(msg) + + if exitcode is not None: + for child in children: + c_code = child.exitcode + msg = "Child exitcode {} != {}" + assert c_code == exitcode, msg.format(c_code, exitcode) + + +def try_lock(lock_file): + try: + my_lock = pl.InterProcessLock(lock_file) + my_lock.lockfile = open(lock_file, 'w') + my_lock.trylock() + my_lock.unlock() + os._exit(1) + except IOError: + os._exit(0) + + +def lock_files(lock_path, handles_dir, num_handles=50): + with pl.InterProcessLock(lock_path): + + # Open some files we can use for locking + handles = [] + for n in range(num_handles): + path = os.path.join(handles_dir, ('file-%s' % n)) + handles.append(open(path, 'w')) + + # Loop over all the handles and try locking the file + # without blocking, keep a count of how many files we + # were able to lock and then unlock. If the lock fails + # we get an IOError and bail out with bad exit code + count = 0 + for handle in handles: + try: + pl.InterProcessLock._trylock(handle) + count += 1 + pl.InterProcessLock._unlock(handle) + except IOError: + os._exit(2) + finally: + handle.close() + + # Check if we were able to open all files + if count != num_handles: + raise AssertionError("Unable to open all handles") + + +def inter_processlock_helper(lockname, lock_filename, pipe): + lock2 = pl.InterProcessLock(lockname) + lock2.lockfile = open(lock_filename, 'w') + have_lock = False + while not have_lock: + try: + lock2.trylock() + have_lock = True + except IOError: + pass + # Hold the lock and wait for the parent + pipe.send(None) + pipe.recv() + + class ProcessLockTest(test.TestCase): def setUp(self): super(ProcessLockTest, self).setUp() @@ -59,27 +142,13 @@ class ProcessLockTest(test.TestCase): lock_file = os.path.join(self.lock_dir, 'lock') lock = pl.InterProcessLock(lock_file) - def try_lock(): - try: - my_lock = pl.InterProcessLock(lock_file) - my_lock.lockfile = open(lock_file, 'w') - my_lock.trylock() - my_lock.unlock() - os._exit(1) - except IOError: - os._exit(0) - def attempt_acquire(count): - children = [] - for i in range(count): - child = multiprocessing.Process(target=try_lock) - child.start() - children.append(child) - exit_codes = [] - for child in children: - child.join() - exit_codes.append(child.exitcode) - return sum(exit_codes) + children = [ + multiprocessing.Process(target=try_lock, args=(lock_file,)) + for i in range(count)] + with scoped_child_processes(children, timeout=10, exitcode=None): + pass + return sum(c.exitcode for c in children) self.assertTrue(lock.acquire()) try: @@ -108,49 +177,17 @@ class ProcessLockTest(test.TestCase): def _do_test_lock_externally(self, lock_dir): lock_path = os.path.join(lock_dir, "lock") - def lock_files(handles_dir): - with pl.InterProcessLock(lock_path): - - # Open some files we can use for locking - handles = [] - for n in range(50): - path = os.path.join(handles_dir, ('file-%s' % n)) - handles.append(open(path, 'w')) - - # Loop over all the handles and try locking the file - # without blocking, keep a count of how many files we - # were able to lock and then unlock. If the lock fails - # we get an IOError and bail out with bad exit code - count = 0 - for handle in handles: - try: - fcntl.flock(handle, fcntl.LOCK_EX | fcntl.LOCK_NB) - count += 1 - fcntl.flock(handle, fcntl.LOCK_UN) - except IOError: - os._exit(2) - finally: - handle.close() - - # Check if we were able to open all files - self.assertEqual(50, count) - handles_dir = tempfile.mkdtemp() self.tmp_dirs.append(handles_dir) - children = [] - for n in range(50): - pid = os.fork() - if pid: - children.append(pid) - else: - try: - lock_files(handles_dir) - finally: - os._exit(0) - for child in children: - (pid, status) = os.waitpid(child, 0) - if pid: - self.assertEqual(0, status) + + num_handles = 50 + num_processes = 50 + args = [lock_path, handles_dir, num_handles] + children = [multiprocessing.Process(target=lock_files, args=args) + for _ in range(num_processes)] + + with scoped_child_processes(children, timeout=30, exitcode=0): + pass def test_lock_externally(self): self._do_test_lock_externally(self.lock_dir) @@ -180,16 +217,20 @@ class ProcessLockTest(test.TestCase): def test_interprocess_lock(self): lock_file = os.path.join(self.lock_dir, 'lock') + lock_name = 'foo' + + child_pipe, them = multiprocessing.Pipe() + child = multiprocessing.Process( + target=inter_processlock_helper, args=(lock_name, lock_file, them)) + + with scoped_child_processes((child,)): - pid = os.fork() - if pid: # Make sure the child grabs the lock first + if not child_pipe.poll(5): + self.fail('Timed out waiting for child to grab lock') + start = time.time() - while not os.path.exists(lock_file): - if time.time() - start > 5: - self.fail('Timed out waiting for child to grab lock') - time.sleep(0) - lock1 = pl.InterProcessLock('foo') + lock1 = pl.InterProcessLock(lock_name) lock1.lockfile = open(lock_file, 'w') # NOTE(bnemec): There is a brief window between when the lock file # is created and when it actually becomes locked. If we happen to @@ -206,26 +247,10 @@ class ProcessLockTest(test.TestCase): break else: self.fail('Never caught expected lock exception') - # We don't need to wait for the full sleep in the child here - os.kill(pid, signal.SIGKILL) - else: - try: - lock2 = pl.InterProcessLock('foo') - lock2.lockfile = open(lock_file, 'w') - have_lock = False - while not have_lock: - try: - lock2.trylock() - have_lock = True - except IOError: - pass - finally: - # NOTE(bnemec): This is racy, but I don't want to add any - # synchronization primitives that might mask a problem - # with the one we're trying to test here. - time.sleep(.5) - os._exit(0) + child_pipe.send(None) + + @test.testtools.skipIf(WIN32, "Windows cannot open file handles twice") def test_non_destructive(self): lock_file = os.path.join(self.lock_dir, 'not-destroyed') with open(lock_file, 'w') as f:
{ "commit_name": "head_commit", "failed_lite_validators": [], "has_test_patch": true, "is_lite": true, "llm_score": { "difficulty_score": 1, "issue_text_score": 0, "test_score": 1 }, "num_modified_files": 1 }
0.14
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "nose", "testtools", "pytest" ], "pre_install": null, "python": "3.5", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work certifi==2021.5.30 -e git+https://github.com/harlowja/fasteners.git@8b63aafd5a9cde3e506810b5df52174d016edd2d#egg=fasteners fixtures==4.0.1 importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work monotonic==1.6 more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work nose==1.3.7 packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work pbr==6.1.1 pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work py @ file:///opt/conda/conda-bld/py_1644396412707/work pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work pytest==6.2.4 six==1.17.0 testtools==2.6.0 toml @ file:///tmp/build/80754af9/toml_1616166611790/work typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
name: fasteners channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - attrs=21.4.0=pyhd3eb1b0_0 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - importlib-metadata=4.8.1=py36h06a4308_0 - importlib_metadata=4.8.1=hd3eb1b0_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - more-itertools=8.12.0=pyhd3eb1b0_0 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - packaging=21.3=pyhd3eb1b0_0 - pip=21.2.2=py36h06a4308_0 - pluggy=0.13.1=py36h06a4308_0 - py=1.11.0=pyhd3eb1b0_0 - pyparsing=3.0.4=pyhd3eb1b0_0 - pytest=6.2.4=py36h06a4308_2 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - toml=0.10.2=pyhd3eb1b0_0 - typing_extensions=4.1.1=pyh06a4308_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zipp=3.6.0=pyhd3eb1b0_0 - zlib=1.2.13=h5eee18b_1 - pip: - fixtures==4.0.1 - monotonic==1.6 - nose==1.3.7 - pbr==6.1.1 - six==1.17.0 - testtools==2.6.0 prefix: /opt/conda/envs/fasteners
[ "fasteners/tests/test_process_lock.py::ProcessLockTest::test_lock_externally", "fasteners/tests/test_process_lock.py::ProcessLockTest::test_lock_externally_lock_dir_not_exist" ]
[]
[ "fasteners/tests/test_process_lock.py::ProcessLockTest::test_bad_acquire", "fasteners/tests/test_process_lock.py::ProcessLockTest::test_bad_release", "fasteners/tests/test_process_lock.py::ProcessLockTest::test_interprocess_lock", "fasteners/tests/test_process_lock.py::ProcessLockTest::test_lock_acquire_release_file_lock", "fasteners/tests/test_process_lock.py::ProcessLockTest::test_lock_file_exists", "fasteners/tests/test_process_lock.py::ProcessLockTest::test_nested_synchronized_external_works", "fasteners/tests/test_process_lock.py::ProcessLockTest::test_non_destructive" ]
[]
Apache License 2.0
434
nose-devs__nose2-268
bbf5897eb1aa224100e86ba594042e4399fd2f5f
2016-02-16 09:53:50
bbf5897eb1aa224100e86ba594042e4399fd2f5f
little-dude: It's still failing when using the `uses` decorator. Added a test for this.
diff --git a/nose2/suite.py b/nose2/suite.py index b52e0cb..f107489 100644 --- a/nose2/suite.py +++ b/nose2/suite.py @@ -22,6 +22,7 @@ class LayerSuite(unittest.BaseTestSuite): self.wasSetup = False def run(self, result): + self.handle_previous_test_teardown(result) if not self._safeMethodCall(self.setUp, result): return try: @@ -37,6 +38,21 @@ class LayerSuite(unittest.BaseTestSuite): if self.wasSetup: self._safeMethodCall(self.tearDown, result) + def handle_previous_test_teardown(self, result): + try: + prev = result._previousTestClass + except AttributeError: + return + layer_attr = getattr(prev, 'layer', None) + if isinstance(layer_attr, LayerSuite): + return + try: + suite_obj = unittest.suite.TestSuite() + suite_obj._tearDownPreviousClass(None, result) + suite_obj._handleModuleTearDown(result) + finally: + delattr(result, '_previousTestClass') + def setUp(self): if self.layer is None: return
such and normal test cases: mixed up call order of it.has_setup and setUpClass As it is a little bit complicated to explain, I have created a working example as a gist: https://gist.github.com/jrast/109f70f9b4c52bab4252 I have several "normal" UnitTestCases and a few such tests. The problem is that the setUpClass calls are mixed up with the setup calls in such tests, as can be seen in the log file. The documentation states that I can not use Layer and setUpClass functions in the same TestCase, but it does not state that I cant use them side by side wich is the case in my example. So the question is: is this a bug or is there a missing hint in the documentation. Either way I think this is a dangerous behaviour because test fixtures made in a setup call can be modified from outside of the current test case wich can lead to very unpredictable behaviour and wrong test results. One example of wrong behaviour is shown with the UniqueResource class wich is locked in the setup functions and unlocked in the teardown functions. As soon as the calls get mixed up a exception is thrown because the resource is already locked. I realy hope this is not just a missing hint/warning in the docs, otherwise I have to rethink about how to structure my tests...
nose-devs/nose2
diff --git a/nose2/tests/functional/support/scenario/layers_and_non_layers/__init__.py b/nose2/tests/functional/support/scenario/layers_and_non_layers/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/nose2/tests/functional/support/scenario/layers_and_non_layers/common.py b/nose2/tests/functional/support/scenario/layers_and_non_layers/common.py new file mode 100644 index 0000000..ae24633 --- /dev/null +++ b/nose2/tests/functional/support/scenario/layers_and_non_layers/common.py @@ -0,0 +1,60 @@ +import unittest +import logging +log = logging.getLogger(__name__) + + +class UniqueResource(object): + _instance = None + used = False + + def __new__(cls, *args, **kwargs): + if not cls._instance: + cls._instance = super(UniqueResource, cls).__new__( + cls, *args, **kwargs) + return cls._instance + + def lock(self): + if not self.used: + self.used = True + else: + raise Exception("Resource allready used") + + def unlock(self): + if self.used: + self.used = False + else: + raise Exception("Resource already unlocked") + + +class NormalTest(unittest.TestCase): + + @classmethod + def setUpClass(cls): + log.info("Called setUpClass in NormalTest") + cls.unique_resource = UniqueResource() + cls.unique_resource.lock() + + @classmethod + def tearDownClass(cls): + log.info("Called tearDownClass in NormalTest") + cls.unique_resource.unlock() + + def test(self): + self.assertTrue(self.unique_resource.used) + + +class NormalTestTwo(unittest.TestCase): + + @classmethod + def setUpClass(cls): + log.info("Called setUpClass in NormalTestTwo") + cls.unique_resource = UniqueResource() + cls.unique_resource.lock() + + @classmethod + def tearDownClass(cls): + log.info("Called tearDownClass in NormalTestTwo") + cls.unique_resource.unlock() + + def test(self): + self.assertTrue(self.unique_resource.used) diff --git a/nose2/tests/functional/support/scenario/layers_and_non_layers/test_layers.py b/nose2/tests/functional/support/scenario/layers_and_non_layers/test_layers.py new file mode 100644 index 0000000..4fb9c8c --- /dev/null +++ b/nose2/tests/functional/support/scenario/layers_and_non_layers/test_layers.py @@ -0,0 +1,67 @@ +import unittest +import logging +from .common import UniqueResource, NormalTest, NormalTestTwo +log = logging.getLogger(__name__) + + +class Layer1(object): + + @classmethod + def setUp(cls): + log.info("Called setup in layer 1") + cls.unique_resource = UniqueResource() + cls.unique_resource.lock() + + @classmethod + def tearDown(cls): + log.info("Called teardown in layer 2") + cls.unique_resource.unlock() + + +class Layer2(object): + + @classmethod + def setUp(cls): + log.info("Called setup in layer 2") + cls.unique_resource = UniqueResource() + cls.unique_resource.lock() + + @classmethod + def tearDown(cls): + log.info("Called teardown in layer 2") + cls.unique_resource.unlock() + + +class Layer3(Layer2): + + @classmethod + def setUp(cls): + log.info("Called setup in layer 3") + + @classmethod + def tearDown(cls): + log.info("Called teardown in layer 3") + + +class LayerTest1(unittest.TestCase): + + layer = Layer1 + + def test(self): + self.assertTrue(self.layer.unique_resource.used) + + +class LayerTest2(unittest.TestCase): + + layer = Layer2 + + def test(self): + self.assertTrue(self.layer.unique_resource.used) + + +class LayerTest3(unittest.TestCase): + + layer = Layer2 + + def test(self): + self.assertTrue(self.layer.unique_resource.used) diff --git a/nose2/tests/functional/support/scenario/layers_and_non_layers/test_such_with_has_setup.py b/nose2/tests/functional/support/scenario/layers_and_non_layers/test_such_with_has_setup.py new file mode 100644 index 0000000..14268fe --- /dev/null +++ b/nose2/tests/functional/support/scenario/layers_and_non_layers/test_such_with_has_setup.py @@ -0,0 +1,24 @@ +from nose2.tools import such +import logging +from .common import UniqueResource, NormalTest, NormalTestTwo +log = logging.getLogger(__name__) + + +with such.A('system with setup') as it: + + @it.has_setup + def setup(): + log.info("Called setup in such test") + it.unique_resource = UniqueResource() + it.unique_resource.lock() + + @it.has_teardown + def teardown(): + log.info("Called teardown in such test") + it.unique_resource.unlock() + + @it.should('do something') + def test(case): + it.assertTrue(it.unique_resource.used) + +it.createTests(globals()) diff --git a/nose2/tests/functional/support/scenario/layers_and_non_layers/test_such_with_uses_decorator.py b/nose2/tests/functional/support/scenario/layers_and_non_layers/test_such_with_uses_decorator.py new file mode 100644 index 0000000..ee15921 --- /dev/null +++ b/nose2/tests/functional/support/scenario/layers_and_non_layers/test_such_with_uses_decorator.py @@ -0,0 +1,51 @@ +from nose2.tools import such +import logging +from .common import UniqueResource, NormalTest, NormalTestTwo +log = logging.getLogger(__name__) + + +class Layer1(object): + + description = 'Layer1' + + @classmethod + def setUp(cls): + log.info("Called setup in layer 1") + it.unique_resource = UniqueResource() + it.unique_resource.lock() + + @classmethod + def tearDown(cls): + log.info("Called teardown in layer 2") + it.unique_resource.unlock() + + +class Layer2(object): + + description = 'Layer2' + + @classmethod + def setUp(cls): + log.info("Called setup in layer 2") + + @classmethod + def tearDown(cls): + log.info("Called teardown in layer 2") + +with such.A('system with setup') as it: + + it.uses(Layer1) + + @it.should('do something') + def test(case): + it.assertTrue(it.unique_resource.used) + + with it.having('another setup'): + + it.uses(Layer2) + + @it.should('do something else') + def test(case): + it.assertTrue(it.unique_resource.used) + +it.createTests(globals()) diff --git a/nose2/tests/functional/test_layers_plugin.py b/nose2/tests/functional/test_layers_plugin.py index e072854..9666dba 100644 --- a/nose2/tests/functional/test_layers_plugin.py +++ b/nose2/tests/functional/test_layers_plugin.py @@ -120,3 +120,14 @@ Base self.assertTestRunOutputMatches(proc, stderr='ERROR: LayerSuite') self.assertTestRunOutputMatches(proc, stderr='FAIL') self.assertTestRunOutputMatches(proc, stderr='Bad Error in Layer setUp!') + + def test_layers_and_non_layers(self): + proc = self.runIn( + 'scenario/', + 'layers_and_non_layers', + '-v', + '--plugin=nose2.plugins.layers', + ) + self.assertTestRunOutputMatches(proc, stderr='Ran 12 tests in') + self.assertTestRunOutputMatches(proc, stderr='OK') + self.assertEqual(proc.poll(), 0)
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_hyperlinks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 1, "test_score": 0 }, "num_modified_files": 1 }
0.5
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "nose2", "pytest" ], "pre_install": null, "python": "3.9", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
cov-core==1.15.0 coverage==7.8.0 exceptiongroup==1.2.2 iniconfig==2.1.0 -e git+https://github.com/nose-devs/nose2.git@bbf5897eb1aa224100e86ba594042e4399fd2f5f#egg=nose2 packaging==24.2 pluggy==1.5.0 pytest==8.3.5 six==1.17.0 tomli==2.2.1
name: nose2 channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - cov-core==1.15.0 - coverage==7.8.0 - exceptiongroup==1.2.2 - iniconfig==2.1.0 - packaging==24.2 - pluggy==1.5.0 - pytest==8.3.5 - six==1.17.0 - tomli==2.2.1 prefix: /opt/conda/envs/nose2
[ "nose2/tests/functional/test_layers_plugin.py::TestLayers::test_layers_and_non_layers" ]
[ "nose2/tests/functional/support/scenario/layers_and_non_layers/test_layers.py::LayerTest1::test", "nose2/tests/functional/support/scenario/layers_and_non_layers/test_layers.py::LayerTest2::test", "nose2/tests/functional/support/scenario/layers_and_non_layers/test_layers.py::LayerTest3::test", "nose2/tests/functional/support/scenario/layers_and_non_layers/test_such_with_has_setup.py::A", "nose2/tests/functional/support/scenario/layers_and_non_layers/test_such_with_uses_decorator.py::Layer1", "nose2/tests/functional/support/scenario/layers_and_non_layers/test_such_with_uses_decorator.py::A", "nose2/tests/functional/support/scenario/layers_and_non_layers/test_such_with_uses_decorator.py::having" ]
[ "nose2/tests/functional/support/scenario/layers_and_non_layers/common.py::NormalTest::test", "nose2/tests/functional/support/scenario/layers_and_non_layers/common.py::NormalTestTwo::test", "nose2/tests/functional/support/scenario/layers_and_non_layers/test_layers.py::NormalTest::test", "nose2/tests/functional/support/scenario/layers_and_non_layers/test_layers.py::NormalTestTwo::test", "nose2/tests/functional/support/scenario/layers_and_non_layers/test_such_with_has_setup.py::NormalTest::test", "nose2/tests/functional/support/scenario/layers_and_non_layers/test_such_with_has_setup.py::NormalTestTwo::test", "nose2/tests/functional/support/scenario/layers_and_non_layers/test_such_with_uses_decorator.py::NormalTest::test", "nose2/tests/functional/support/scenario/layers_and_non_layers/test_such_with_uses_decorator.py::NormalTestTwo::test", "nose2/tests/functional/test_layers_plugin.py::TestLayers::test_layer_reporter_error_output", "nose2/tests/functional/test_layers_plugin.py::TestLayers::test_layer_reporter_output", "nose2/tests/functional/test_layers_plugin.py::TestLayers::test_layers_and_attributes", "nose2/tests/functional/test_layers_plugin.py::TestLayers::test_methods_run_once_per_class", "nose2/tests/functional/test_layers_plugin.py::TestLayers::test_runs_layer_fixtures", "nose2/tests/functional/test_layers_plugin.py::TestLayers::test_scenario_fails_without_plugin", "nose2/tests/functional/test_layers_plugin.py::TestLayers::test_setup_fail", "nose2/tests/functional/test_layers_plugin.py::TestLayers::test_teardown_fail" ]
[]
BSD
435
nose-devs__nose2-269
bbf5897eb1aa224100e86ba594042e4399fd2f5f
2016-02-16 12:45:43
bbf5897eb1aa224100e86ba594042e4399fd2f5f
little-dude: I need to add a test for this before merging. @slemesle let me know if you want to be the author of this commit, since you proposed the fix.
diff --git a/nose2/plugins/junitxml.py b/nose2/plugins/junitxml.py index c90450e..e61a08b 100644 --- a/nose2/plugins/junitxml.py +++ b/nose2/plugins/junitxml.py @@ -4,15 +4,71 @@ Output test reports in junit-xml format. This plugin implements :func:`startTest`, :func:`testOutcome` and :func:`stopTestRun` to compile and then output a test report in junit-xml format. By default, the report is written to a file called -``nose2-junit.xml`` in the current working directory. +``nose2-junit.xml`` in the current working directory. You can configure the output filename by setting ``path`` in a ``[junit-xml]`` section in a config file. Unicode characters which are invalid in XML 1.0 -are replaced with the ``U+FFFD`` replacement character. In the case that your -software throws an error with an invalid byte string. +are replaced with the ``U+FFFD`` replacement character. In the case that your +software throws an error with an invalid byte string. -By default, the ranges of discouraged characters are replaced as well. This can be -changed by setting the ``keep_restricted`` configuration variable to ``True``. +By default, the ranges of discouraged characters are replaced as well. This can +be changed by setting the ``keep_restricted`` configuration variable to +``True``. + +By default, the arguments of parametrized and generated tests are not printed. +For instance, the following code: + +.. code-block:: python + + # a.py + + from nose2 import tools + + def test_gen(): + def check(a, b): + assert a == b, '{}!={}'.format(a,b) + + yield check, 99, 99 + yield check, -1, -1 + + @tools.params('foo', 'bar') + def test_params(arg): + assert arg in ['foo', 'bar', 'baz'] + +Produces this XML by default: + +.. code-block:: xml + + <testcase classname="a" name="test_gen:1" time="0.000171"> + <system-out /> + </testcase> + <testcase classname="a" name="test_gen:2" time="0.000202"> + <system-out /> + </testcase> + <testcase classname="a" name="test_params:1" time="0.000159"> + <system-out /> + </testcase> + <testcase classname="a" name="test_params:2" time="0.000163"> + <system-out /> + </testcase> + +But if ``test_fullname`` is ``True``, then the following XML is +produced: + +.. code-block:: xml + + <testcase classname="a" name="test_gen:1 (99, 99)" time="0.000213"> + <system-out /> + </testcase> + <testcase classname="a" name="test_gen:2 (-1, -1)" time="0.000194"> + <system-out /> + </testcase> + <testcase classname="a" name="test_params:1 ('foo')" time="0.000178"> + <system-out /> + </testcase> + <testcase classname="a" name="test_params:2 ('bar')" time="0.000187"> + <system-out /> + </testcase> """ # Based on unittest2/plugins/junitxml.py, @@ -39,10 +95,12 @@ class JUnitXmlReporter(events.Plugin): def __init__(self): self.path = os.path.realpath( self.config.as_str('path', default='nose2-junit.xml')) - self.keep_restricted = self.config.as_bool('keep_restricted', - default=False) - self.test_properties = self.config.as_str('test_properties', - default=None) + self.keep_restricted = self.config.as_bool( + 'keep_restricted', default=False) + self.test_properties = self.config.as_str( + 'test_properties', default=None) + self.test_fullname = self.config.as_bool( + 'test_fullname', default=False) if self.test_properties is not None: self.test_properties_path = os.path.realpath(self.test_properties) self.errors = 0 @@ -60,11 +118,15 @@ class JUnitXmlReporter(events.Plugin): def testOutcome(self, event): """Add test outcome to xml tree""" test = event.test - testid = test.id().split('\n')[0] - # split into module, class, method parts... somehow + testid_lines = test.id().split('\n') + testid = testid_lines[0] parts = testid.split('.') classname = '.'.join(parts[:-1]) method = parts[-1] + # for generated test cases + if len(testid_lines) > 1 and self.test_fullname: + test_args = ':'.join(testid_lines[1:]) + method = '%s (%s)' % (method, test_args) testcase = ET.SubElement(self.tree, 'testcase') testcase.set('time', "%.6f" % self._time())
junit-xml does not output generated tests parameters I use generated tests with meaningfull parameters and I wish I could see them exported in junit-xml report. The patch looks pretty simple (junitxml.py): ```python def testOutcome(self, event): """Add test outcome to xml tree""" test = event.test testid_lines = test.id().split('\n') testid = testid_lines[0] # split into module, class, method parts... somehow parts = testid.split('.') classname = '.'.join(parts[:-1]) method = parts[-1] if len(testid_lines) > 1: method = '%s:' % method + ':'.join(testid_lines[1:]) ``` With this one we can have all parameters as requested.
nose-devs/nose2
diff --git a/nose2/tests/unit/test_junitxml.py b/nose2/tests/unit/test_junitxml.py index 6ffef43..706daa2 100644 --- a/nose2/tests/unit/test_junitxml.py +++ b/nose2/tests/unit/test_junitxml.py @@ -186,6 +186,20 @@ class TestJunitXmlPlugin(TestCase): self.assertEqual(xml[0].get('name'), 'test_gen:1') self.assertEqual(xml[1].get('name'), 'test_gen:2') + def test_generator_test_full_name_correct(self): + gen = generators.Generators(session=self.session) + gen.register() + self.plugin.test_fullname = True + event = events.LoadFromTestCaseEvent(self.loader, self.case) + self.session.hooks.loadTestsFromTestCase(event) + cases = event.extraTests + for case in cases: + case(self.result) + xml = self.plugin.tree.findall('testcase') + self.assertEqual(len(xml), 2) + self.assertEqual(xml[0].get('name'), 'test_gen:1 (1, 1)') + self.assertEqual(xml[1].get('name'), 'test_gen:2 (1, 2)') + def test_params_test_name_correct(self): # param test loading is a bit more complex than generator # loading. XXX -- can these be reconciled so they both @@ -213,6 +227,31 @@ class TestJunitXmlPlugin(TestCase): self.assertEqual(params[1].get('name'), 'test_params:2') self.assertEqual(params[2].get('name'), 'test_params:3') + def test_params_test_full_name_correct(self): + plug1 = parameters.Parameters(session=self.session) + plug1.register() + plug2 = testcases.TestCaseLoader(session=self.session) + plug2.register() + # need module to fire top-level event + + class Mod(object): + pass + + m = Mod() + m.Test = self.case + event = events.LoadFromModuleEvent(self.loader, m) + self.plugin.test_fullname = True + self.session.hooks.loadTestsFromModule(event) + for case in event.extraTests: + case(self.result) + xml = self.plugin.tree.findall('testcase') + self.assertEqual(len(xml), 12) + params = [x for x in xml if x.get('name').startswith('test_params')] + self.assertEqual(len(params), 3) + self.assertEqual(params[0].get('name'), 'test_params:1 (1)') + self.assertEqual(params[1].get('name'), 'test_params:2 (2)') + self.assertEqual(params[2].get('name'), 'test_params:3 (3)') + def test_writes_xml_file_at_end(self): test = self.case('test') test(self.result)
{ "commit_name": "head_commit", "failed_lite_validators": [], "has_test_patch": true, "is_lite": true, "llm_score": { "difficulty_score": 0, "issue_text_score": 1, "test_score": 2 }, "num_modified_files": 1 }
0.5
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "nose2", "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.7", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
certifi @ file:///croot/certifi_1671487769961/work/certifi cov-core==1.15.0 coverage==7.2.7 exceptiongroup==1.2.2 importlib-metadata==6.7.0 iniconfig==2.0.0 -e git+https://github.com/nose-devs/nose2.git@bbf5897eb1aa224100e86ba594042e4399fd2f5f#egg=nose2 packaging==24.0 pluggy==1.2.0 pytest==7.4.4 six==1.17.0 tomli==2.0.1 typing_extensions==4.7.1 zipp==3.15.0
name: nose2 channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2022.12.7=py37h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=22.3.1=py37h06a4308_0 - python=3.7.16=h7a1cb2a_0 - readline=8.2=h5eee18b_0 - setuptools=65.6.3=py37h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.38.4=py37h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - cov-core==1.15.0 - coverage==7.2.7 - exceptiongroup==1.2.2 - importlib-metadata==6.7.0 - iniconfig==2.0.0 - packaging==24.0 - pluggy==1.2.0 - pytest==7.4.4 - six==1.17.0 - tomli==2.0.1 - typing-extensions==4.7.1 - zipp==3.15.0 prefix: /opt/conda/envs/nose2
[ "nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_generator_test_full_name_correct", "nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_params_test_full_name_correct" ]
[]
[ "nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_error_bad_xml", "nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_error_bad_xml_b", "nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_error_bad_xml_b_keep", "nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_error_bad_xml_keep", "nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_error_includes_traceback", "nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_failure_includes_traceback", "nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_generator_test_name_correct", "nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_params_test_name_correct", "nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_skip_includes_skipped", "nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_success_added_to_xml", "nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_writes_xml_file_at_end", "nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_xml_contains_empty_system_err_without_logcapture", "nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_xml_contains_log_message_in_system_err_with_logcapture", "nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_xml_file_path_is_not_affected_by_chdir_in_test" ]
[]
BSD
436
sympy__sympy-10605
6b07c2a991f698bbe4df5fdaf0bccdad34dd4f73
2016-02-16 16:26:24
0241b35cae5f2adc04dc37b25f7dc9c5f00bd746
asmeurer: Just the one comment. Looks good otherwise. AnishShah: @asmeurer I have updated the PR. Can you cancel the old Travis CI for this PR?
diff --git a/sympy/assumptions/refine.py b/sympy/assumptions/refine.py index fd9d5e9108..bdb82f5d85 100644 --- a/sympy/assumptions/refine.py +++ b/sympy/assumptions/refine.py @@ -16,11 +16,12 @@ def refine(expr, assumptions=True): Examples ======== - >>> from sympy import refine, sqrt, Q - >>> from sympy.abc import x - >>> refine(sqrt(x**2), Q.real(x)) + >>> from sympy import Symbol, refine, sqrt, Q + >>> x = Symbol('x', real=True) + >>> refine(sqrt(x**2)) Abs(x) - >>> refine(sqrt(x**2), Q.positive(x)) + >>> x = Symbol('x', positive=True) + >>> refine(sqrt(x**2)) x """ @@ -46,32 +47,6 @@ def refine(expr, assumptions=True): return refine(new_expr, assumptions) -def refine_abs(expr, assumptions): - """ - Handler for the absolute value. - - Examples - ======== - - >>> from sympy import Symbol, Q, refine, Abs - >>> from sympy.assumptions.refine import refine_abs - >>> from sympy.abc import x - >>> refine_abs(Abs(x), Q.real(x)) - >>> refine_abs(Abs(x), Q.positive(x)) - x - >>> refine_abs(Abs(x), Q.negative(x)) - -x - - """ - arg = expr.args[0] - if ask(Q.real(arg), assumptions) and \ - fuzzy_not(ask(Q.negative(arg), assumptions)): - # if it's nonnegative - return arg - if ask(Q.negative(arg), assumptions): - return -arg - - def refine_Pow(expr, assumptions): """ Handler for instances of Pow. @@ -230,7 +205,6 @@ def refine_Relational(expr, assumptions): handlers_dict = { - 'Abs': refine_abs, 'Pow': refine_Pow, 'atan2': refine_atan2, 'Equality': refine_Relational, diff --git a/sympy/core/basic.py b/sympy/core/basic.py index 502a3adb8e..9699c354db 100644 --- a/sympy/core/basic.py +++ b/sympy/core/basic.py @@ -1635,6 +1635,7 @@ def class_key(cls): @cacheit def sort_key(self, order=None): + from sympy.core import S return self.class_key(), (1, (str(self),)), S.One.sort_key(), S.One def _eval_simplify(self, ratio, measure): diff --git a/sympy/core/compatibility.py b/sympy/core/compatibility.py index ffcf6c5e01..702a02cfc6 100644 --- a/sympy/core/compatibility.py +++ b/sympy/core/compatibility.py @@ -522,10 +522,9 @@ def default_sort_key(item, order=None): """ - from .singleton import S - from .basic import Basic - from .sympify import sympify, SympifyError - from .compatibility import iterable + from sympy.core import S, Basic + from sympy.core.sympify import sympify, SympifyError + from sympy.core.compatibility import iterable if isinstance(item, Basic): return item.sort_key(order=order) diff --git a/sympy/core/expr.py b/sympy/core/expr.py index 50f1532a28..32954b2e7a 100644 --- a/sympy/core/expr.py +++ b/sympy/core/expr.py @@ -909,9 +909,8 @@ def as_ordered_terms(self, order=None, data=False): def as_terms(self): """Transform an expression to a list of terms. """ - from .add import Add - from .mul import Mul - from .exprtools import decompose_power + from sympy.core import Add, Mul, S + from sympy.core.exprtools import decompose_power gens, terms = set([]), [] diff --git a/sympy/core/power.py b/sympy/core/power.py index a3d5fa68da..0398d44098 100644 --- a/sympy/core/power.py +++ b/sympy/core/power.py @@ -229,7 +229,7 @@ def _eval_refine(self): return -Pow(-b, e) def _eval_power(self, other): - from sympy import Abs, arg, exp, floor, im, log, re, sign + from sympy import Abs, arg, exp, floor, im, log, re, sign, refine b, e = self.as_base_exp() if b is S.NaN: return (b**e)**other # let __new__ handle it @@ -273,9 +273,9 @@ def _n2(e): return Pow(b.conjugate()/Abs(b)**2, other) elif e.is_even: if b.is_real: - b = abs(b) + b = refine(abs(b)) if b.is_imaginary: - b = abs(im(b))*S.ImaginaryUnit + b = refine(abs(im(b)))*S.ImaginaryUnit if (abs(e) < 1) == True or (e == 1) == True: s = 1 # floor = 0 @@ -1027,6 +1027,8 @@ def _eval_is_algebraic(self): if self.base.is_zero or (self.base - 1).is_zero: return True elif self.exp.is_rational: + if self.base.is_algebraic is False: + return self.exp.is_nonzero return self.base.is_algebraic elif self.base.is_algebraic and self.exp.is_algebraic: if ((fuzzy_not(self.base.is_zero) diff --git a/sympy/functions/elementary/complexes.py b/sympy/functions/elementary/complexes.py index bc4190bd41..6afd10154a 100644 --- a/sympy/functions/elementary/complexes.py +++ b/sympy/functions/elementary/complexes.py @@ -427,6 +427,29 @@ def fdiff(self, argindex=1): else: raise ArgumentIndexError(self, argindex) + def _eval_refine(self): + arg = self.args[0] + if arg.is_zero: + return S.Zero + if arg.is_nonnegative: + return arg + if arg.is_nonpositive: + return -arg + if arg.is_Add: + expr_list = [] + for _arg in Add.make_args(arg): + if _arg.is_negative or _arg.is_negative is None: + return None + if _arg.is_zero: + expr_list.append(S.Zero) + elif _arg.is_nonnegative: + expr_list.append(_arg) + elif _arg.is_nonpositive: + expr_list.append(-_arg) + if expr_list: + return Add(*expr_list) + return arg + @classmethod def eval(cls, arg): from sympy.simplify.simplify import signsimp @@ -444,7 +467,7 @@ def eval(cls, arg): if arg.is_Mul: known = [] unk = [] - for t in arg.args: + for t in Mul.make_args(arg): tnew = cls(t) if tnew.func is cls: unk.append(tnew.args[0]) diff --git a/sympy/matrices/dense.py b/sympy/matrices/dense.py index ba24f4d013..baee02a542 100644 --- a/sympy/matrices/dense.py +++ b/sympy/matrices/dense.py @@ -554,18 +554,10 @@ def __rsub__(self, other): def __mul__(self, other): return super(DenseMatrix, self).__mul__(_force_mutable(other)) - @call_highest_priority('__rmul__') - def __matmul__(self, other): - return super(DenseMatrix, self).__mul__(_force_mutable(other)) - @call_highest_priority('__mul__') def __rmul__(self, other): return super(DenseMatrix, self).__rmul__(_force_mutable(other)) - @call_highest_priority('__mul__') - def __rmatmul__(self, other): - return super(DenseMatrix, self).__rmul__(_force_mutable(other)) - @call_highest_priority('__div__') def __div__(self, other): return super(DenseMatrix, self).__div__(_force_mutable(other)) diff --git a/sympy/matrices/expressions/matexpr.py b/sympy/matrices/expressions/matexpr.py index 706c578d74..de13236cc2 100644 --- a/sympy/matrices/expressions/matexpr.py +++ b/sympy/matrices/expressions/matexpr.py @@ -105,21 +105,11 @@ def __rsub__(self, other): def __mul__(self, other): return MatMul(self, other).doit() - @_sympifyit('other', NotImplemented) - @call_highest_priority('__rmul__') - def __matmul__(self, other): - return MatMul(self, other).doit() - @_sympifyit('other', NotImplemented) @call_highest_priority('__mul__') def __rmul__(self, other): return MatMul(other, self).doit() - @_sympifyit('other', NotImplemented) - @call_highest_priority('__mul__') - def __rmatmul__(self, other): - return MatMul(other, self).doit() - @_sympifyit('other', NotImplemented) @call_highest_priority('__rpow__') def __pow__(self, other): diff --git a/sympy/matrices/immutable.py b/sympy/matrices/immutable.py index 9a34473005..502e7980f0 100644 --- a/sympy/matrices/immutable.py +++ b/sympy/matrices/immutable.py @@ -106,9 +106,7 @@ def _eval_Eq(self, other): __add__ = MatrixBase.__add__ __radd__ = MatrixBase.__radd__ __mul__ = MatrixBase.__mul__ - __matmul__ = MatrixBase.__matmul__ __rmul__ = MatrixBase.__rmul__ - __rmatmul__ = MatrixBase.__rmatmul__ __pow__ = MatrixBase.__pow__ __sub__ = MatrixBase.__sub__ __rsub__ = MatrixBase.__rsub__ diff --git a/sympy/matrices/matrices.py b/sympy/matrices/matrices.py index 8b1d5def3a..0a61971a3e 100644 --- a/sympy/matrices/matrices.py +++ b/sympy/matrices/matrices.py @@ -513,15 +513,11 @@ def __mul__(self, other): return self._new(self.rows, self.cols, [i*other for i in self._mat]) - __matmul__ = __mul__ - def __rmul__(self, a): if getattr(a, 'is_Matrix', False): return self._new(a)*self return self._new(self.rows, self.cols, [a*i for i in self._mat]) - __rmatmul__ = __rmul__ - def __pow__(self, num): from sympy.matrices import eye, diag, MutableMatrix from sympy import binomial diff --git a/sympy/matrices/sparse.py b/sympy/matrices/sparse.py index 4528f1cd34..e14f5b47e0 100644 --- a/sympy/matrices/sparse.py +++ b/sympy/matrices/sparse.py @@ -395,8 +395,6 @@ def __mul__(self, other): return other._new(self*self._new(other)) return self.scalar_multiply(other) - __matmul__ = __mul__ - def __rmul__(self, other): """Return product the same type as other (if a Matrix). @@ -419,8 +417,6 @@ def __rmul__(self, other): return other*other._new(self) return self.scalar_multiply(other) - __rmatmul__ = __rmul__ - def __add__(self, other): """Add other to self, efficiently if possible.
If t is transcendental, t**n is determined (wrongly) to be non-integer Check this: ```python >>> t = Symbol('t', real=True, transcendental=True) >>> n = n = Symbol('n', integer=True) # may be 0 >>> (t**n).is_algebraic # should be None, as n may be 0 False >>> (t**n).is_integer # should be None, as n may be 0 False ``` etc.
sympy/sympy
diff --git a/sympy/assumptions/tests/test_refine.py b/sympy/assumptions/tests/test_refine.py index ce94837680..831401071a 100644 --- a/sympy/assumptions/tests/test_refine.py +++ b/sympy/assumptions/tests/test_refine.py @@ -6,40 +6,57 @@ def test_Abs(): - assert refine(Abs(x), Q.positive(x)) == x - assert refine(1 + Abs(x), Q.positive(x)) == 1 + x - assert refine(Abs(x), Q.negative(x)) == -x - assert refine(1 + Abs(x), Q.negative(x)) == 1 - x - + x = Symbol('x', positive=True) + assert Abs(x) == x + assert 1 + Abs(x) == 1 + x + x = Symbol('x', negative=True) + assert Abs(x) == -x + assert 1 + Abs(x) == 1 - x + x = Symbol('x') assert refine(Abs(x**2)) != x**2 - assert refine(Abs(x**2), Q.real(x)) == x**2 + x = Symbol('x', real=True) + assert refine(Abs(x**2)) == x**2 def test_pow(): - assert refine((-1)**x, Q.even(x)) == 1 - assert refine((-1)**x, Q.odd(x)) == -1 - assert refine((-2)**x, Q.even(x)) == 2**x + x = Symbol('x', even=True) + assert refine((-1)**x) == 1 + x = Symbol('x', odd=True) + assert refine((-1)**x) == -1 + x = Symbol('x', even=True) + assert refine((-2)**x) == 2**x # nested powers + x = Symbol('x') + assert refine(sqrt(x**2)) != Abs(x) + x = Symbol('x', complex=True) assert refine(sqrt(x**2)) != Abs(x) - assert refine(sqrt(x**2), Q.complex(x)) != Abs(x) - assert refine(sqrt(x**2), Q.real(x)) == Abs(x) - assert refine(sqrt(x**2), Q.positive(x)) == x + x = Symbol('x', real=True) + assert refine(sqrt(x**2)) == Abs(x) + p = Symbol('p', positive=True) + assert refine(sqrt(p**2)) == p + x = Symbol('x') assert refine((x**3)**(S(1)/3)) != x - - assert refine((x**3)**(S(1)/3), Q.real(x)) != x - assert refine((x**3)**(S(1)/3), Q.positive(x)) == x - - assert refine(sqrt(1/x), Q.real(x)) != 1/sqrt(x) - assert refine(sqrt(1/x), Q.positive(x)) == 1/sqrt(x) + x = Symbol('x', real=True) + assert refine((x**3)**(S(1)/3)) != x + x = Symbol('x', positive=True) + assert refine((x**3)**(S(1)/3)) == x + x = Symbol('x', real=True) + assert refine(sqrt(1/x)) != 1/sqrt(x) + x = Symbol('x', positive=True) + assert refine(sqrt(1/x)) == 1/sqrt(x) # powers of (-1) + x = Symbol('x', even=True) assert refine((-1)**(x + y), Q.even(x)) == (-1)**y + x = Symbol('x', odd=True) + z = Symbol('z', odd=True) assert refine((-1)**(x + y + z), Q.odd(x) & Q.odd(z)) == (-1)**y assert refine((-1)**(x + y + 1), Q.odd(x)) == (-1)**y assert refine((-1)**(x + y + 2), Q.odd(x)) == (-1)**(y + 1) + x = Symbol('x') assert refine((-1)**(x + 3)) == (-1)**(x + 1) - + x = Symbol('x', integer=True) assert refine((-1)**((-1)**x/2 - S.Half), Q.integer(x)) == (-1)**x assert refine((-1)**((-1)**x/2 + S.Half), Q.integer(x)) == (-1)**(x + 1) assert refine((-1)**((-1)**x/2 + 5*S.Half), Q.integer(x)) == (-1)**(x + 1) @@ -47,8 +64,10 @@ def test_pow(): assert refine((-1)**((-1)**x/2 - 9*S.Half), Q.integer(x)) == (-1)**x # powers of Abs + x = Symbol('x', real=True) assert refine(Abs(x)**2, Q.real(x)) == x**2 assert refine(Abs(x)**3, Q.real(x)) == Abs(x)**3 + x = Symbol('x') assert refine(Abs(x)**2) == Abs(x)**2 diff --git a/sympy/core/tests/test_assumptions.py b/sympy/core/tests/test_assumptions.py index b419d0d443..e1e3131638 100644 --- a/sympy/core/tests/test_assumptions.py +++ b/sympy/core/tests/test_assumptions.py @@ -742,7 +742,7 @@ def test_Pow_is_algebraic(): x = Symbol('x') assert (a**r).is_algebraic assert (a**x).is_algebraic is None - assert (na**r).is_algebraic is False + assert (na**r).is_algebraic is None assert (ia**r).is_algebraic assert (ia**ib).is_algebraic is False @@ -753,6 +753,13 @@ def test_Pow_is_algebraic(): assert Pow(S.GoldenRatio, sqrt(3), evaluate=False).is_algebraic is False + # issue 8649 + t = Symbol('t', real=True, transcendental=True) + n = Symbol('n', integer=True) + assert (t**n).is_algebraic is None + assert (t**n).is_integer is None + + def test_Mul_is_prime(): from sympy import Mul x = Symbol('x', positive=True, integer=True) diff --git a/sympy/functions/elementary/tests/test_complexes.py b/sympy/functions/elementary/tests/test_complexes.py index d74e2e317e..2351f6570a 100644 --- a/sympy/functions/elementary/tests/test_complexes.py +++ b/sympy/functions/elementary/tests/test_complexes.py @@ -214,7 +214,7 @@ def test_sign(): assert sign(x).is_integer is True assert sign(x).is_real is True assert sign(x).is_zero is False - assert sign(x).doit() == x / Abs(x) + assert sign(x).doit() == x/Abs(x) assert sign(Abs(x)) == 1 assert Abs(sign(x)) == 1 @@ -289,7 +289,7 @@ def test_as_real_imag(): assert sqrt(a**2).as_real_imag() == (sqrt(a**2), 0) i = symbols('i', imaginary=True) - assert sqrt(i**2).as_real_imag() == (0, abs(i)) + assert sqrt(i**2).as_real_imag() == (0, Abs(-i)) @XFAIL @@ -317,10 +317,10 @@ def test_Abs(): assert Abs(I) == 1 assert Abs(-I) == 1 assert Abs(nan) == nan - assert Abs(I * pi) == pi - assert Abs(-I * pi) == pi - assert Abs(I * x) == Abs(x) - assert Abs(-I * x) == Abs(x) + assert Abs(I*pi) == pi + assert Abs(-I*pi) == pi + assert Abs(I*x) == Abs(x) + assert Abs(-I*x) == Abs(x) assert Abs(-2*x) == 2*Abs(x) assert Abs(-2.0*x) == 2.0*Abs(x) assert Abs(2*pi*x*y) == 2*pi*Abs(x*y) diff --git a/sympy/functions/elementary/tests/test_exponential.py b/sympy/functions/elementary/tests/test_exponential.py index ec8ce348d0..40955f557e 100644 --- a/sympy/functions/elementary/tests/test_exponential.py +++ b/sympy/functions/elementary/tests/test_exponential.py @@ -311,7 +311,7 @@ def test_log_sign(): def test_log_expand_complex(): assert log(1 + I).expand(complex=True) == log(2)/2 + I*pi/4 - assert log(1 - sqrt(2)).expand(complex=True) == log(sqrt(2) - 1) + I*pi + assert log(1 - sqrt(2)).expand(complex=True) == log(-1 + sqrt(2)) + I*pi def test_log_apply_evalf(): diff --git a/sympy/functions/elementary/tests/test_piecewise.py b/sympy/functions/elementary/tests/test_piecewise.py index dc0617e20a..5a57c013d2 100644 --- a/sympy/functions/elementary/tests/test_piecewise.py +++ b/sympy/functions/elementary/tests/test_piecewise.py @@ -2,7 +2,7 @@ adjoint, And, Basic, conjugate, diff, expand, Eq, Function, I, Integral, integrate, Interval, lambdify, log, Max, Min, oo, Or, pi, Piecewise, piecewise_fold, Rational, solve, symbols, transpose, - cos, exp, Abs, Not, Symbol, S + cos, exp, Abs, Not, Symbol, S, refine ) from sympy.printing import srepr from sympy.utilities.pytest import XFAIL, raises @@ -337,7 +337,7 @@ def test_piecewise_fold_piecewise_in_cond(): p3 = piecewise_fold(p2) assert(p2.subs(x, -pi/2) == 0.0) assert(p2.subs(x, 1) == 0.0) - assert(p2.subs(x, -pi/4) == 1.0) + assert(refine(p2.subs(x, -pi/4)) == 1.0) p4 = Piecewise((0, Eq(p1, 0)), (1,True)) assert(piecewise_fold(p4) == Piecewise( (0, Or(And(Eq(cos(x), 0), x < 0), Not(x < 0))), (1, True))) diff --git a/sympy/functions/elementary/tests/test_trigonometric.py b/sympy/functions/elementary/tests/test_trigonometric.py index 7fd817001d..7d507a25d9 100644 --- a/sympy/functions/elementary/tests/test_trigonometric.py +++ b/sympy/functions/elementary/tests/test_trigonometric.py @@ -1,5 +1,5 @@ from sympy import (symbols, Symbol, nan, oo, zoo, I, sinh, sin, pi, atan, - acos, Rational, sqrt, asin, acot, coth, E, S, tan, tanh, cos, + acos, Rational, sqrt, asin, acot, coth, E, S, tan, tanh, cos, refine, cosh, atan2, exp, log, asinh, acoth, atanh, O, cancel, Matrix, re, im, Float, Pow, gcd, sec, csc, cot, diff, simplify, Heaviside, arg, conjugate, series, FiniteSet, asec, acsc, Mul, sinc, jn, Product, @@ -890,7 +890,7 @@ def test_atan2(): rewrite = e.rewrite(arg) reps = {i: I, r: -2} assert rewrite == -I*log(abs(I*i + r)/sqrt(abs(i**2 + r**2))) + arg((I*i + r)/sqrt(i**2 + r**2)) - assert (e - rewrite).subs(reps).equals(0) + assert refine((e - rewrite).subs(reps)).equals(0) assert conjugate(atan2(x, y)) == atan2(conjugate(x), conjugate(y)) diff --git a/sympy/integrals/tests/test_meijerint.py b/sympy/integrals/tests/test_meijerint.py index 55da116728..9cb4620920 100644 --- a/sympy/integrals/tests/test_meijerint.py +++ b/sympy/integrals/tests/test_meijerint.py @@ -99,7 +99,7 @@ def t(a, b, arg, n): def test_recursive(): - from sympy import symbols + from sympy import symbols, refine a, b, c = symbols('a b c', positive=True) r = exp(-(x - a)**2)*exp(-(x - b)**2) e = integrate(r, (x, 0, oo), meijerg=True) @@ -112,7 +112,7 @@ def test_recursive(): + (2*a + 2*b + c)**2/8)/4) assert simplify(integrate(exp(-(x - a - b - c)**2), (x, 0, oo), meijerg=True)) == \ sqrt(pi)/2*(1 + erf(a + b + c)) - assert simplify(integrate(exp(-(x + a + b + c)**2), (x, 0, oo), meijerg=True)) == \ + assert simplify(refine(integrate(exp(-(x + a + b + c)**2), (x, 0, oo), meijerg=True))) == \ sqrt(pi)/2*(1 - erf(a + b + c)) diff --git a/sympy/integrals/tests/test_transforms.py b/sympy/integrals/tests/test_transforms.py index 1fd77d7db5..71fbfb3e98 100644 --- a/sympy/integrals/tests/test_transforms.py +++ b/sympy/integrals/tests/test_transforms.py @@ -773,14 +773,14 @@ def test_issue_7173(): def test_issue_8514(): - from sympy import simplify + from sympy import simplify, refine a, b, c, = symbols('a b c', positive=True) t = symbols('t', positive=True) ft = simplify(inverse_laplace_transform(1/(a*s**2+b*s+c),s, t)) assert ft == ((exp(t*(exp(I*atan2(0, -4*a*c + b**2)/2) - exp(-I*atan2(0, -4*a*c + b**2)/2))* - sqrt(Abs(4*a*c - b**2))/(4*a))*exp(t*cos(atan2(0, -4*a*c + b**2)/2) - *sqrt(Abs(4*a*c - b**2))/a) + I*sin(t*sin(atan2(0, -4*a*c + b**2)/2) - *sqrt(Abs(4*a*c - b**2))/(2*a)) - cos(t*sin(atan2(0, -4*a*c + b**2)/2) - *sqrt(Abs(4*a*c - b**2))/(2*a)))*exp(-t*(b + cos(atan2(0, -4*a*c + b**2)/2) - *sqrt(Abs(4*a*c - b**2)))/(2*a))/sqrt(-4*a*c + b**2)) + sqrt(refine(Abs(4*a*c - b**2)))/(4*a))*exp(t*cos(atan2(0, -4*a*c + b**2)/2) + *sqrt(refine(Abs(4*a*c - b**2)))/a) + I*sin(t*sin(atan2(0, -4*a*c + b**2)/2) + *sqrt(refine(Abs(4*a*c - b**2)))/(2*a)) - cos(t*sin(atan2(0, -4*a*c + b**2)/2) + *sqrt(refine(Abs(4*a*c - b**2)))/(2*a)))*exp(-t*(b + cos(atan2(0, -4*a*c + b**2)/2) + *sqrt(refine(Abs(4*a*c - b**2))))/(2*a))/sqrt(-4*a*c + b**2)) diff --git a/sympy/matrices/tests/test_matrices.py b/sympy/matrices/tests/test_matrices.py index b9237ca780..2a9b90a0ee 100644 --- a/sympy/matrices/tests/test_matrices.py +++ b/sympy/matrices/tests/test_matrices.py @@ -128,18 +128,6 @@ def test_multiplication(): assert c[2, 0] == 18 assert c[2, 1] == 0 - try: - eval('c = a @ b') - except SyntaxError: - pass - else: - assert c[0, 0] == 7 - assert c[0, 1] == 2 - assert c[1, 0] == 6 - assert c[1, 1] == 6 - assert c[2, 0] == 18 - assert c[2, 1] == 0 - h = matrix_multiply_elementwise(a, c) assert h == a.multiply_elementwise(c) assert h[0, 0] == 7 @@ -167,17 +155,6 @@ def test_multiplication(): assert c[1, 0] == 3*5 assert c[1, 1] == 0 - try: - eval('c = 5 @ b') - except SyntaxError: - pass - else: - assert isinstance(c, Matrix) - assert c[0, 0] == 5 - assert c[0, 1] == 2*5 - assert c[1, 0] == 3*5 - assert c[1, 1] == 0 - def test_power(): raises(NonSquareMatrixError, lambda: Matrix((1, 2))**2) diff --git a/sympy/matrices/tests/test_sparse.py b/sympy/matrices/tests/test_sparse.py index 2952878fe0..3695e21ddb 100644 --- a/sympy/matrices/tests/test_sparse.py +++ b/sympy/matrices/tests/test_sparse.py @@ -65,18 +65,6 @@ def sparse_zeros(n): assert c[2, 0] == 18 assert c[2, 1] == 0 - try: - eval('c = a @ b') - except SyntaxError: - pass - else: - assert c[0, 0] == 7 - assert c[0, 1] == 2 - assert c[1, 0] == 6 - assert c[1, 1] == 6 - assert c[2, 0] == 18 - assert c[2, 1] == 0 - x = Symbol("x") c = b * Symbol("x")
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 0, "test_score": 1 }, "num_modified_files": 11 }
0.7
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y libgmp-dev" ], "python": "3.6", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work certifi==2021.5.30 importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work mpmath==1.3.0 packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work py @ file:///opt/conda/conda-bld/py_1644396412707/work pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work pytest==6.2.4 -e git+https://github.com/sympy/sympy.git@6b07c2a991f698bbe4df5fdaf0bccdad34dd4f73#egg=sympy toml @ file:///tmp/build/80754af9/toml_1616166611790/work typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
name: sympy channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - attrs=21.4.0=pyhd3eb1b0_0 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - importlib-metadata=4.8.1=py36h06a4308_0 - importlib_metadata=4.8.1=hd3eb1b0_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - more-itertools=8.12.0=pyhd3eb1b0_0 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - packaging=21.3=pyhd3eb1b0_0 - pip=21.2.2=py36h06a4308_0 - pluggy=0.13.1=py36h06a4308_0 - py=1.11.0=pyhd3eb1b0_0 - pyparsing=3.0.4=pyhd3eb1b0_0 - pytest=6.2.4=py36h06a4308_2 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - toml=0.10.2=pyhd3eb1b0_0 - typing_extensions=4.1.1=pyh06a4308_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zipp=3.6.0=pyhd3eb1b0_0 - zlib=1.2.13=h5eee18b_1 - pip: - mpmath==1.3.0 prefix: /opt/conda/envs/sympy
[ "sympy/core/tests/test_assumptions.py::test_Pow_is_algebraic" ]
[ "sympy/functions/elementary/tests/test_trigonometric.py::test_sincos_rewrite_sqrt" ]
[ "sympy/assumptions/tests/test_refine.py::test_Abs", "sympy/assumptions/tests/test_refine.py::test_pow", "sympy/assumptions/tests/test_refine.py::test_exp", "sympy/assumptions/tests/test_refine.py::test_Relational", "sympy/assumptions/tests/test_refine.py::test_Piecewise", "sympy/assumptions/tests/test_refine.py::test_atan2", "sympy/assumptions/tests/test_refine.py::test_func_args", "sympy/assumptions/tests/test_refine.py::test_eval_refine", "sympy/core/tests/test_assumptions.py::test_symbol_unset", "sympy/core/tests/test_assumptions.py::test_zero", "sympy/core/tests/test_assumptions.py::test_one", "sympy/core/tests/test_assumptions.py::test_negativeone", "sympy/core/tests/test_assumptions.py::test_infinity", "sympy/core/tests/test_assumptions.py::test_neg_infinity", "sympy/core/tests/test_assumptions.py::test_nan", "sympy/core/tests/test_assumptions.py::test_pos_rational", "sympy/core/tests/test_assumptions.py::test_neg_rational", "sympy/core/tests/test_assumptions.py::test_pi", "sympy/core/tests/test_assumptions.py::test_E", "sympy/core/tests/test_assumptions.py::test_I", "sympy/core/tests/test_assumptions.py::test_symbol_real", "sympy/core/tests/test_assumptions.py::test_symbol_imaginary", "sympy/core/tests/test_assumptions.py::test_symbol_zero", "sympy/core/tests/test_assumptions.py::test_symbol_positive", "sympy/core/tests/test_assumptions.py::test_neg_symbol_positive", "sympy/core/tests/test_assumptions.py::test_symbol_nonpositive", "sympy/core/tests/test_assumptions.py::test_neg_symbol_nonpositive", "sympy/core/tests/test_assumptions.py::test_symbol_falsepositive", "sympy/core/tests/test_assumptions.py::test_symbol_falsepositive_mul", "sympy/core/tests/test_assumptions.py::test_neg_symbol_falsepositive", "sympy/core/tests/test_assumptions.py::test_neg_symbol_falsenegative", "sympy/core/tests/test_assumptions.py::test_symbol_falsepositive_real", "sympy/core/tests/test_assumptions.py::test_neg_symbol_falsepositive_real", "sympy/core/tests/test_assumptions.py::test_symbol_falsenonnegative", "sympy/core/tests/test_assumptions.py::test_symbol_falsenonnegative_real", "sympy/core/tests/test_assumptions.py::test_neg_symbol_falsenonnegative_real", "sympy/core/tests/test_assumptions.py::test_prime", "sympy/core/tests/test_assumptions.py::test_composite", "sympy/core/tests/test_assumptions.py::test_prime_symbol", "sympy/core/tests/test_assumptions.py::test_symbol_noncommutative", "sympy/core/tests/test_assumptions.py::test_other_symbol", "sympy/core/tests/test_assumptions.py::test_issue_3825", "sympy/core/tests/test_assumptions.py::test_issue_4822", "sympy/core/tests/test_assumptions.py::test_hash_vs_typeinfo", "sympy/core/tests/test_assumptions.py::test_hash_vs_typeinfo_2", "sympy/core/tests/test_assumptions.py::test_hash_vs_eq", "sympy/core/tests/test_assumptions.py::test_Add_is_pos_neg", "sympy/core/tests/test_assumptions.py::test_Add_is_imaginary", "sympy/core/tests/test_assumptions.py::test_Add_is_algebraic", "sympy/core/tests/test_assumptions.py::test_Mul_is_algebraic", "sympy/core/tests/test_assumptions.py::test_Mul_is_prime", "sympy/core/tests/test_assumptions.py::test_Pow_is_prime", "sympy/core/tests/test_assumptions.py::test_Mul_is_infinite", "sympy/core/tests/test_assumptions.py::test_special_is_rational", "sympy/core/tests/test_assumptions.py::test_sanitize_assumptions", "sympy/core/tests/test_assumptions.py::test_special_assumptions", "sympy/core/tests/test_assumptions.py::test_inconsistent", "sympy/core/tests/test_assumptions.py::test_issue_6631", "sympy/core/tests/test_assumptions.py::test_issue_2730", "sympy/core/tests/test_assumptions.py::test_issue_4149", "sympy/core/tests/test_assumptions.py::test_issue_2920", "sympy/core/tests/test_assumptions.py::test_issue_7899", "sympy/core/tests/test_assumptions.py::test_issue_8075", "sympy/core/tests/test_assumptions.py::test_issue_8642", "sympy/core/tests/test_assumptions.py::test_issues_8632_8633_8638_8675_8992", "sympy/core/tests/test_assumptions.py::test_issue_9115", "sympy/core/tests/test_assumptions.py::test_issue_9165", "sympy/core/tests/test_assumptions.py::test_issue_10024", "sympy/core/tests/test_assumptions.py::test_issue_10302", "sympy/core/tests/test_assumptions.py::test_complex_reciprocal_imaginary", "sympy/functions/elementary/tests/test_complexes.py::test_re", "sympy/functions/elementary/tests/test_complexes.py::test_im", "sympy/functions/elementary/tests/test_complexes.py::test_sign", "sympy/functions/elementary/tests/test_complexes.py::test_as_real_imag", "sympy/functions/elementary/tests/test_complexes.py::test_Abs", "sympy/functions/elementary/tests/test_complexes.py::test_Abs_rewrite", "sympy/functions/elementary/tests/test_complexes.py::test_Abs_real", "sympy/functions/elementary/tests/test_complexes.py::test_Abs_properties", "sympy/functions/elementary/tests/test_complexes.py::test_abs", "sympy/functions/elementary/tests/test_complexes.py::test_arg", "sympy/functions/elementary/tests/test_complexes.py::test_arg_rewrite", "sympy/functions/elementary/tests/test_complexes.py::test_adjoint", "sympy/functions/elementary/tests/test_complexes.py::test_conjugate", "sympy/functions/elementary/tests/test_complexes.py::test_conjugate_transpose", "sympy/functions/elementary/tests/test_complexes.py::test_transpose", "sympy/functions/elementary/tests/test_complexes.py::test_polarify", "sympy/functions/elementary/tests/test_complexes.py::test_unpolarify", "sympy/functions/elementary/tests/test_complexes.py::test_issue_4035", "sympy/functions/elementary/tests/test_complexes.py::test_issue_3206", "sympy/functions/elementary/tests/test_complexes.py::test_issue_4754_derivative_conjugate", "sympy/functions/elementary/tests/test_complexes.py::test_derivatives_issue_4757", "sympy/functions/elementary/tests/test_complexes.py::test_periodic_argument", "sympy/functions/elementary/tests/test_complexes.py::test_principal_branch", "sympy/functions/elementary/tests/test_exponential.py::test_exp_values", "sympy/functions/elementary/tests/test_exponential.py::test_exp_log", "sympy/functions/elementary/tests/test_exponential.py::test_exp_expand", "sympy/functions/elementary/tests/test_exponential.py::test_exp__as_base_exp", "sympy/functions/elementary/tests/test_exponential.py::test_exp_infinity", "sympy/functions/elementary/tests/test_exponential.py::test_exp_subs", "sympy/functions/elementary/tests/test_exponential.py::test_exp_conjugate", "sympy/functions/elementary/tests/test_exponential.py::test_exp_rewrite", "sympy/functions/elementary/tests/test_exponential.py::test_exp_leading_term", "sympy/functions/elementary/tests/test_exponential.py::test_exp_taylor_term", "sympy/functions/elementary/tests/test_exponential.py::test_log_values", "sympy/functions/elementary/tests/test_exponential.py::test_log_base", "sympy/functions/elementary/tests/test_exponential.py::test_log_symbolic", "sympy/functions/elementary/tests/test_exponential.py::test_exp_assumptions", "sympy/functions/elementary/tests/test_exponential.py::test_exp_AccumBounds", "sympy/functions/elementary/tests/test_exponential.py::test_log_assumptions", "sympy/functions/elementary/tests/test_exponential.py::test_log_hashing", "sympy/functions/elementary/tests/test_exponential.py::test_log_sign", "sympy/functions/elementary/tests/test_exponential.py::test_log_expand_complex", "sympy/functions/elementary/tests/test_exponential.py::test_log_apply_evalf", "sympy/functions/elementary/tests/test_exponential.py::test_log_expand", "sympy/functions/elementary/tests/test_exponential.py::test_log_simplify", "sympy/functions/elementary/tests/test_exponential.py::test_log_AccumBounds", "sympy/functions/elementary/tests/test_exponential.py::test_lambertw", "sympy/functions/elementary/tests/test_exponential.py::test_issue_5673", "sympy/functions/elementary/tests/test_exponential.py::test_exp_expand_NC", "sympy/functions/elementary/tests/test_exponential.py::test_as_numer_denom", "sympy/functions/elementary/tests/test_exponential.py::test_polar", "sympy/functions/elementary/tests/test_exponential.py::test_log_product", "sympy/functions/elementary/tests/test_exponential.py::test_issue_8866", "sympy/functions/elementary/tests/test_exponential.py::test_issue_9116", "sympy/functions/elementary/tests/test_piecewise.py::test_piecewise", "sympy/functions/elementary/tests/test_piecewise.py::test_piecewise_free_symbols", "sympy/functions/elementary/tests/test_piecewise.py::test_piecewise_integrate", "sympy/functions/elementary/tests/test_piecewise.py::test_piecewise_integrate_inequality_conditions", "sympy/functions/elementary/tests/test_piecewise.py::test_piecewise_integrate_symbolic_conditions", "sympy/functions/elementary/tests/test_piecewise.py::test_piecewise_integrate_independent_conditions", "sympy/functions/elementary/tests/test_piecewise.py::test_piecewise_simplify", "sympy/functions/elementary/tests/test_piecewise.py::test_piecewise_solve", "sympy/functions/elementary/tests/test_piecewise.py::test_piecewise_fold", "sympy/functions/elementary/tests/test_piecewise.py::test_piecewise_fold_piecewise_in_cond", "sympy/functions/elementary/tests/test_piecewise.py::test_piecewise_fold_expand", "sympy/functions/elementary/tests/test_piecewise.py::test_piecewise_duplicate", "sympy/functions/elementary/tests/test_piecewise.py::test_doit", "sympy/functions/elementary/tests/test_piecewise.py::test_piecewise_interval", "sympy/functions/elementary/tests/test_piecewise.py::test_piecewise_collapse", "sympy/functions/elementary/tests/test_piecewise.py::test_piecewise_lambdify", "sympy/functions/elementary/tests/test_piecewise.py::test_piecewise_series", "sympy/functions/elementary/tests/test_piecewise.py::test_piecewise_as_leading_term", "sympy/functions/elementary/tests/test_piecewise.py::test_piecewise_complex", "sympy/functions/elementary/tests/test_piecewise.py::test_conjugate_transpose", "sympy/functions/elementary/tests/test_piecewise.py::test_piecewise_evaluate", "sympy/functions/elementary/tests/test_piecewise.py::test_as_expr_set_pairs", "sympy/functions/elementary/tests/test_piecewise.py::test_S_srepr_is_identity", "sympy/functions/elementary/tests/test_trigonometric.py::test_sin", "sympy/functions/elementary/tests/test_trigonometric.py::test_sin_cos", "sympy/functions/elementary/tests/test_trigonometric.py::test_sin_series", "sympy/functions/elementary/tests/test_trigonometric.py::test_sin_rewrite", "sympy/functions/elementary/tests/test_trigonometric.py::test_sin_expansion", "sympy/functions/elementary/tests/test_trigonometric.py::test_sin_AccumBounds", "sympy/functions/elementary/tests/test_trigonometric.py::test_trig_symmetry", "sympy/functions/elementary/tests/test_trigonometric.py::test_cos", "sympy/functions/elementary/tests/test_trigonometric.py::test_issue_6190", "sympy/functions/elementary/tests/test_trigonometric.py::test_cos_series", "sympy/functions/elementary/tests/test_trigonometric.py::test_cos_rewrite", "sympy/functions/elementary/tests/test_trigonometric.py::test_cos_expansion", "sympy/functions/elementary/tests/test_trigonometric.py::test_cos_AccumBounds", "sympy/functions/elementary/tests/test_trigonometric.py::test_tan", "sympy/functions/elementary/tests/test_trigonometric.py::test_tan_series", "sympy/functions/elementary/tests/test_trigonometric.py::test_tan_rewrite", "sympy/functions/elementary/tests/test_trigonometric.py::test_tan_subs", "sympy/functions/elementary/tests/test_trigonometric.py::test_tan_expansion", "sympy/functions/elementary/tests/test_trigonometric.py::test_tan_AccumBounds", "sympy/functions/elementary/tests/test_trigonometric.py::test_cot", "sympy/functions/elementary/tests/test_trigonometric.py::test_cot_series", "sympy/functions/elementary/tests/test_trigonometric.py::test_cot_rewrite", "sympy/functions/elementary/tests/test_trigonometric.py::test_cot_subs", "sympy/functions/elementary/tests/test_trigonometric.py::test_cot_expansion", "sympy/functions/elementary/tests/test_trigonometric.py::test_cot_AccumBounds", "sympy/functions/elementary/tests/test_trigonometric.py::test_sinc", "sympy/functions/elementary/tests/test_trigonometric.py::test_asin", "sympy/functions/elementary/tests/test_trigonometric.py::test_asin_series", "sympy/functions/elementary/tests/test_trigonometric.py::test_asin_rewrite", "sympy/functions/elementary/tests/test_trigonometric.py::test_acos", "sympy/functions/elementary/tests/test_trigonometric.py::test_acos_series", "sympy/functions/elementary/tests/test_trigonometric.py::test_acos_rewrite", "sympy/functions/elementary/tests/test_trigonometric.py::test_atan", "sympy/functions/elementary/tests/test_trigonometric.py::test_atan_rewrite", "sympy/functions/elementary/tests/test_trigonometric.py::test_atan2", "sympy/functions/elementary/tests/test_trigonometric.py::test_acot", "sympy/functions/elementary/tests/test_trigonometric.py::test_acot_rewrite", "sympy/functions/elementary/tests/test_trigonometric.py::test_attributes", "sympy/functions/elementary/tests/test_trigonometric.py::test_sincos_rewrite", "sympy/functions/elementary/tests/test_trigonometric.py::test_evenodd_rewrite", "sympy/functions/elementary/tests/test_trigonometric.py::test_issue_4547", "sympy/functions/elementary/tests/test_trigonometric.py::test_as_leading_term_issue_5272", "sympy/functions/elementary/tests/test_trigonometric.py::test_leading_terms", "sympy/functions/elementary/tests/test_trigonometric.py::test_atan2_expansion", "sympy/functions/elementary/tests/test_trigonometric.py::test_aseries", "sympy/functions/elementary/tests/test_trigonometric.py::test_issue_4420", "sympy/functions/elementary/tests/test_trigonometric.py::test_inverses", "sympy/functions/elementary/tests/test_trigonometric.py::test_real_imag", "sympy/functions/elementary/tests/test_trigonometric.py::test_tancot_rewrite_sqrt", "sympy/functions/elementary/tests/test_trigonometric.py::test_sec", "sympy/functions/elementary/tests/test_trigonometric.py::test_csc", "sympy/functions/elementary/tests/test_trigonometric.py::test_asec", "sympy/functions/elementary/tests/test_trigonometric.py::test_asec_is_real", "sympy/functions/elementary/tests/test_trigonometric.py::test_acsc", "sympy/functions/elementary/tests/test_trigonometric.py::test_issue_8653", "sympy/functions/elementary/tests/test_trigonometric.py::test_issue_9157", "sympy/integrals/tests/test_meijerint.py::test_rewrite_single", "sympy/integrals/tests/test_meijerint.py::test_rewrite1", "sympy/integrals/tests/test_meijerint.py::test_meijerint_indefinite_numerically", "sympy/integrals/tests/test_meijerint.py::test_meijerint_definite", "sympy/integrals/tests/test_meijerint.py::test_inflate", "sympy/integrals/tests/test_meijerint.py::test_recursive", "sympy/integrals/tests/test_meijerint.py::test_meijerint", "sympy/integrals/tests/test_meijerint.py::test_bessel", "sympy/integrals/tests/test_meijerint.py::test_inversion", "sympy/integrals/tests/test_meijerint.py::test_lookup_table", "sympy/integrals/tests/test_meijerint.py::test_branch_bug", "sympy/integrals/tests/test_meijerint.py::test_linear_subs", "sympy/integrals/tests/test_meijerint.py::test_probability", "sympy/integrals/tests/test_meijerint.py::test_expint", "sympy/integrals/tests/test_meijerint.py::test_messy", "sympy/integrals/tests/test_meijerint.py::test_issue_6122", "sympy/integrals/tests/test_meijerint.py::test_issue_6252", "sympy/integrals/tests/test_meijerint.py::test_issue_6348", "sympy/integrals/tests/test_meijerint.py::test_fresnel", "sympy/integrals/tests/test_meijerint.py::test_issue_6860", "sympy/integrals/tests/test_meijerint.py::test_issue_8368", "sympy/integrals/tests/test_transforms.py::test_undefined_function", "sympy/integrals/tests/test_transforms.py::test_free_symbols", "sympy/integrals/tests/test_transforms.py::test_as_integral", "sympy/integrals/tests/test_transforms.py::test_mellin_transform", "sympy/integrals/tests/test_transforms.py::test_mellin_transform_bessel", "sympy/integrals/tests/test_transforms.py::test_expint", "sympy/integrals/tests/test_transforms.py::test_inverse_mellin_transform", "sympy/integrals/tests/test_transforms.py::test_laplace_transform", "sympy/integrals/tests/test_transforms.py::test_issue_8368_7173", "sympy/integrals/tests/test_transforms.py::test_inverse_laplace_transform", "sympy/integrals/tests/test_transforms.py::test_fourier_transform", "sympy/integrals/tests/test_transforms.py::test_sine_transform", "sympy/integrals/tests/test_transforms.py::test_cosine_transform", "sympy/integrals/tests/test_transforms.py::test_hankel_transform", "sympy/integrals/tests/test_transforms.py::test_issue_7181", "sympy/integrals/tests/test_transforms.py::test_issue_8882", "sympy/integrals/tests/test_transforms.py::test_issue_7173", "sympy/integrals/tests/test_transforms.py::test_issue_8514", "sympy/matrices/tests/test_matrices.py::test_args", "sympy/matrices/tests/test_matrices.py::test_division", "sympy/matrices/tests/test_matrices.py::test_sum", "sympy/matrices/tests/test_matrices.py::test_addition", "sympy/matrices/tests/test_matrices.py::test_fancy_index_matrix", "sympy/matrices/tests/test_matrices.py::test_multiplication", "sympy/matrices/tests/test_matrices.py::test_power", "sympy/matrices/tests/test_matrices.py::test_creation", "sympy/matrices/tests/test_matrices.py::test_tolist", "sympy/matrices/tests/test_matrices.py::test_as_mutable", "sympy/matrices/tests/test_matrices.py::test_determinant", "sympy/matrices/tests/test_matrices.py::test_det_LU_decomposition", "sympy/matrices/tests/test_matrices.py::test_berkowitz_minors", "sympy/matrices/tests/test_matrices.py::test_slicing", "sympy/matrices/tests/test_matrices.py::test_submatrix_assignment", "sympy/matrices/tests/test_matrices.py::test_extract", "sympy/matrices/tests/test_matrices.py::test_reshape", "sympy/matrices/tests/test_matrices.py::test_applyfunc", "sympy/matrices/tests/test_matrices.py::test_expand", "sympy/matrices/tests/test_matrices.py::test_random", "sympy/matrices/tests/test_matrices.py::test_LUdecomp", "sympy/matrices/tests/test_matrices.py::test_LUsolve", "sympy/matrices/tests/test_matrices.py::test_QRsolve", "sympy/matrices/tests/test_matrices.py::test_inverse", "sympy/matrices/tests/test_matrices.py::test_matrix_inverse_mod", "sympy/matrices/tests/test_matrices.py::test_util", "sympy/matrices/tests/test_matrices.py::test_jacobian_hessian", "sympy/matrices/tests/test_matrices.py::test_QR", "sympy/matrices/tests/test_matrices.py::test_QR_non_square", "sympy/matrices/tests/test_matrices.py::test_nullspace", "sympy/matrices/tests/test_matrices.py::test_columnspace", "sympy/matrices/tests/test_matrices.py::test_wronskian", "sympy/matrices/tests/test_matrices.py::test_eigen", "sympy/matrices/tests/test_matrices.py::test_subs", "sympy/matrices/tests/test_matrices.py::test_xreplace", "sympy/matrices/tests/test_matrices.py::test_simplify", "sympy/matrices/tests/test_matrices.py::test_transpose", "sympy/matrices/tests/test_matrices.py::test_conjugate", "sympy/matrices/tests/test_matrices.py::test_conj_dirac", "sympy/matrices/tests/test_matrices.py::test_trace", "sympy/matrices/tests/test_matrices.py::test_shape", "sympy/matrices/tests/test_matrices.py::test_col_row_op", "sympy/matrices/tests/test_matrices.py::test_zip_row_op", "sympy/matrices/tests/test_matrices.py::test_issue_3950", "sympy/matrices/tests/test_matrices.py::test_issue_3981", "sympy/matrices/tests/test_matrices.py::test_evalf", "sympy/matrices/tests/test_matrices.py::test_is_symbolic", "sympy/matrices/tests/test_matrices.py::test_is_upper", "sympy/matrices/tests/test_matrices.py::test_is_lower", "sympy/matrices/tests/test_matrices.py::test_is_nilpotent", "sympy/matrices/tests/test_matrices.py::test_zeros_ones_fill", "sympy/matrices/tests/test_matrices.py::test_empty_zeros", "sympy/matrices/tests/test_matrices.py::test_issue_3749", "sympy/matrices/tests/test_matrices.py::test_inv_iszerofunc", "sympy/matrices/tests/test_matrices.py::test_jacobian_metrics", "sympy/matrices/tests/test_matrices.py::test_jacobian2", "sympy/matrices/tests/test_matrices.py::test_issue_4564", "sympy/matrices/tests/test_matrices.py::test_nonvectorJacobian", "sympy/matrices/tests/test_matrices.py::test_vec", "sympy/matrices/tests/test_matrices.py::test_vech", "sympy/matrices/tests/test_matrices.py::test_vech_errors", "sympy/matrices/tests/test_matrices.py::test_diag", "sympy/matrices/tests/test_matrices.py::test_get_diag_blocks1", "sympy/matrices/tests/test_matrices.py::test_get_diag_blocks2", "sympy/matrices/tests/test_matrices.py::test_inv_block", "sympy/matrices/tests/test_matrices.py::test_creation_args", "sympy/matrices/tests/test_matrices.py::test_diagonal_symmetrical", "sympy/matrices/tests/test_matrices.py::test_diagonalization", "sympy/matrices/tests/test_matrices.py::test_jordan_form", "sympy/matrices/tests/test_matrices.py::test_jordan_form_complex_issue_9274", "sympy/matrices/tests/test_matrices.py::test_issue_10220", "sympy/matrices/tests/test_matrices.py::test_Matrix_berkowitz_charpoly", "sympy/matrices/tests/test_matrices.py::test_exp", "sympy/matrices/tests/test_matrices.py::test_has", "sympy/matrices/tests/test_matrices.py::test_errors", "sympy/matrices/tests/test_matrices.py::test_len", "sympy/matrices/tests/test_matrices.py::test_integrate", "sympy/matrices/tests/test_matrices.py::test_limit", "sympy/matrices/tests/test_matrices.py::test_diff", "sympy/matrices/tests/test_matrices.py::test_getattr", "sympy/matrices/tests/test_matrices.py::test_hessenberg", "sympy/matrices/tests/test_matrices.py::test_cholesky", "sympy/matrices/tests/test_matrices.py::test_LDLdecomposition", "sympy/matrices/tests/test_matrices.py::test_cholesky_solve", "sympy/matrices/tests/test_matrices.py::test_LDLsolve", "sympy/matrices/tests/test_matrices.py::test_lower_triangular_solve", "sympy/matrices/tests/test_matrices.py::test_upper_triangular_solve", "sympy/matrices/tests/test_matrices.py::test_diagonal_solve", "sympy/matrices/tests/test_matrices.py::test_matrix_norm", "sympy/matrices/tests/test_matrices.py::test_singular_values", "sympy/matrices/tests/test_matrices.py::test_condition_number", "sympy/matrices/tests/test_matrices.py::test_equality", "sympy/matrices/tests/test_matrices.py::test_col_join", "sympy/matrices/tests/test_matrices.py::test_row_insert", "sympy/matrices/tests/test_matrices.py::test_col_insert", "sympy/matrices/tests/test_matrices.py::test_normalized", "sympy/matrices/tests/test_matrices.py::test_print_nonzero", "sympy/matrices/tests/test_matrices.py::test_zeros_eye", "sympy/matrices/tests/test_matrices.py::test_is_zero", "sympy/matrices/tests/test_matrices.py::test_rotation_matrices", "sympy/matrices/tests/test_matrices.py::test_DeferredVector", "sympy/matrices/tests/test_matrices.py::test_DeferredVector_not_iterable", "sympy/matrices/tests/test_matrices.py::test_DeferredVector_Matrix", "sympy/matrices/tests/test_matrices.py::test_GramSchmidt", "sympy/matrices/tests/test_matrices.py::test_casoratian", "sympy/matrices/tests/test_matrices.py::test_zero_dimension_multiply", "sympy/matrices/tests/test_matrices.py::test_slice_issue_2884", "sympy/matrices/tests/test_matrices.py::test_slice_issue_3401", "sympy/matrices/tests/test_matrices.py::test_copyin", "sympy/matrices/tests/test_matrices.py::test_invertible_check", "sympy/matrices/tests/test_matrices.py::test_issue_5964", "sympy/matrices/tests/test_matrices.py::test_issue_7604", "sympy/matrices/tests/test_matrices.py::test_is_Identity", "sympy/matrices/tests/test_matrices.py::test_dot", "sympy/matrices/tests/test_matrices.py::test_dual", "sympy/matrices/tests/test_matrices.py::test_anti_symmetric", "sympy/matrices/tests/test_matrices.py::test_normalize_sort_diogonalization", "sympy/matrices/tests/test_matrices.py::test_issue_5321", "sympy/matrices/tests/test_matrices.py::test_issue_5320", "sympy/matrices/tests/test_matrices.py::test_cross", "sympy/matrices/tests/test_matrices.py::test_hash", "sympy/matrices/tests/test_matrices.py::test_adjoint", "sympy/matrices/tests/test_matrices.py::test_simplify_immutable", "sympy/matrices/tests/test_matrices.py::test_rank", "sympy/matrices/tests/test_matrices.py::test_replace", "sympy/matrices/tests/test_matrices.py::test_replace_map", "sympy/matrices/tests/test_matrices.py::test_atoms", "sympy/matrices/tests/test_matrices.py::test_pinv", "sympy/matrices/tests/test_matrices.py::test_pinv_solve", "sympy/matrices/tests/test_matrices.py::test_gauss_jordan_solve", "sympy/matrices/tests/test_matrices.py::test_issue_7201", "sympy/matrices/tests/test_matrices.py::test_free_symbols", "sympy/matrices/tests/test_matrices.py::test_hermitian", "sympy/matrices/tests/test_matrices.py::test_doit", "sympy/matrices/tests/test_matrices.py::test_issue_9457_9467_9876", "sympy/matrices/tests/test_matrices.py::test_issue_9422", "sympy/matrices/tests/test_sparse.py::test_sparse_matrix", "sympy/matrices/tests/test_sparse.py::test_transpose", "sympy/matrices/tests/test_sparse.py::test_trace", "sympy/matrices/tests/test_sparse.py::test_CL_RL", "sympy/matrices/tests/test_sparse.py::test_add", "sympy/matrices/tests/test_sparse.py::test_errors", "sympy/matrices/tests/test_sparse.py::test_len", "sympy/matrices/tests/test_sparse.py::test_sparse_zeros_sparse_eye", "sympy/matrices/tests/test_sparse.py::test_copyin", "sympy/matrices/tests/test_sparse.py::test_sparse_solve", "sympy/matrices/tests/test_sparse.py::test_hermitian" ]
[]
BSD
437
scrapy__scrapy-1786
41588397c04356f2b0c393b61ed68271a08d6ccd
2016-02-17 15:59:16
a975a50558cd78a1573bee2e957afcb419fd1bd6
diff --git a/scrapy/responsetypes.py b/scrapy/responsetypes.py index 4880cc7b9..c667b141d 100644 --- a/scrapy/responsetypes.py +++ b/scrapy/responsetypes.py @@ -59,7 +59,8 @@ class ResponseTypes(object): def from_content_disposition(self, content_disposition): try: - filename = to_native_str(content_disposition).split(';')[1].split('=')[1] + filename = to_native_str(content_disposition, + encoding='latin-1', errors='replace').split(';')[1].split('=')[1] filename = filename.strip('"\'') return self.from_filename(filename) except IndexError:
PY3: error decoding Content-Disposition header This request ``` scrapy shell 'http://npe.com.cn/plus/save_to_doc.php?id=1666' ``` raises this error: ``` Traceback (most recent call last): File "/Users/kmike/envs/dl/bin/scrapy", line 9, in <module> load_entry_point('Scrapy', 'console_scripts', 'scrapy')() File "/Users/kmike/svn/scrapy/scrapy/cmdline.py", line 142, in execute _run_print_help(parser, _run_command, cmd, args, opts) File "/Users/kmike/svn/scrapy/scrapy/cmdline.py", line 88, in _run_print_help func(*a, **kw) File "/Users/kmike/svn/scrapy/scrapy/cmdline.py", line 149, in _run_command cmd.run(args, opts) File "/Users/kmike/svn/scrapy/scrapy/commands/shell.py", line 71, in run shell.start(url=url) File "/Users/kmike/svn/scrapy/scrapy/shell.py", line 47, in start self.fetch(url, spider) File "/Users/kmike/svn/scrapy/scrapy/shell.py", line 112, in fetch reactor, self._schedule, request, spider) File "/Users/kmike/envs/dl/lib/python3.5/site-packages/Twisted-15.5.0-py3.5.egg/twisted/internet/threads.py", line 122, in blockingCallFromThread result.raiseException() File "/Users/kmike/envs/dl/lib/python3.5/site-packages/Twisted-15.5.0-py3.5.egg/twisted/python/failure.py", line 368, in raiseException raise self.value.with_traceback(self.tb) UnicodeDecodeError: 'utf-8' codec can't decode byte 0xb8 in position 25: invalid start byte ``` The error points to a wrong location (similar to #1760); the real traceback is ``` Traceback (most recent call last): File "/Users/kmike/envs/dl/lib/python3.5/site-packages/Twisted-15.5.0-py3.5.egg/twisted/internet/defer.py", line 1126, in _inlineCallbacks result = result.throwExceptionIntoGenerator(g) File "/Users/kmike/envs/dl/lib/python3.5/site-packages/Twisted-15.5.0-py3.5.egg/twisted/python/failure.py", line 389, in throwExceptionIntoGenerator return g.throw(self.type, self.value, self.tb) File "/Users/kmike/svn/scrapy/scrapy/core/downloader/middleware.py", line 43, in process_request defer.returnValue((yield download_func(request=request,spider=spider))) File "/Users/kmike/envs/dl/lib/python3.5/site-packages/Twisted-15.5.0-py3.5.egg/twisted/internet/defer.py", line 588, in _runCallbacks current.result = callback(current.result, *args, **kw) File "/Users/kmike/svn/scrapy/scrapy/core/downloader/handlers/http11.py", line 272, in _cb_bodydone respcls = responsetypes.from_args(headers=headers, url=url) File "/Users/kmike/svn/scrapy/scrapy/responsetypes.py", line 110, in from_args cls = self.from_headers(headers) File "/Users/kmike/svn/scrapy/scrapy/responsetypes.py", line 78, in from_headers cls = self.from_content_disposition(headers[b'Content-Disposition']) File "/Users/kmike/svn/scrapy/scrapy/responsetypes.py", line 62, in from_content_disposition filename = to_native_str(content_disposition).split(';')[1].split('=')[1] File "/Users/kmike/svn/scrapy/scrapy/utils/python.py", line 129, in to_native_str return to_unicode(text, encoding, errors) File "/Users/kmike/svn/scrapy/scrapy/utils/python.py", line 107, in to_unicode return text.decode(encoding, errors) UnicodeDecodeError: 'utf-8' codec can't decode byte 0xb8 in position 25: invalid start byte ``` It looks like Content-Disposition is decoded using utf-8, but the encoding was not UTF-8.
scrapy/scrapy
diff --git a/tests/test_responsetypes.py b/tests/test_responsetypes.py index 2374d518f..118136ac4 100644 --- a/tests/test_responsetypes.py +++ b/tests/test_responsetypes.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- import unittest from scrapy.responsetypes import responsetypes @@ -20,8 +21,14 @@ class ResponseTypesTest(unittest.TestCase): def test_from_content_disposition(self): mappings = [ - ('attachment; filename="data.xml"', XmlResponse), - ('attachment; filename=data.xml', XmlResponse), + (b'attachment; filename="data.xml"', XmlResponse), + (b'attachment; filename=data.xml', XmlResponse), + (u'attachment;filename=data£.tar.gz'.encode('utf-8'), Response), + (u'attachment;filename=dataµ.tar.gz'.encode('latin-1'), Response), + (u'attachment;filename=data高.doc'.encode('gbk'), Response), + (u'attachment;filename=دورهdata.html'.encode('cp720'), HtmlResponse), + (u'attachment;filename=日本語版Wikipedia.xml'.encode('iso2022_jp'), XmlResponse), + ] for source, cls in mappings: retcls = responsetypes.from_content_disposition(source)
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_hyperlinks", "has_issue_reference" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 1, "test_score": 0 }, "num_modified_files": 1 }
1.1
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.9", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==25.3.0 Automat==24.8.1 cffi==1.17.1 constantly==23.10.4 cryptography==44.0.2 cssselect==1.3.0 exceptiongroup==1.2.2 hyperlink==21.0.0 idna==3.10 incremental==24.7.2 iniconfig==2.1.0 jmespath==1.0.1 lxml==5.3.1 packaging==24.2 parsel==1.10.0 pluggy==1.5.0 pyasn1==0.6.1 pyasn1_modules==0.4.2 pycparser==2.22 PyDispatcher==2.0.7 pyOpenSSL==25.0.0 pytest==8.3.5 queuelib==1.7.0 -e git+https://github.com/scrapy/scrapy.git@41588397c04356f2b0c393b61ed68271a08d6ccd#egg=Scrapy service-identity==24.2.0 six==1.17.0 tomli==2.2.1 Twisted==24.11.0 typing_extensions==4.13.0 w3lib==2.3.1 zope.interface==7.2
name: scrapy channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==25.3.0 - automat==24.8.1 - cffi==1.17.1 - constantly==23.10.4 - cryptography==44.0.2 - cssselect==1.3.0 - exceptiongroup==1.2.2 - hyperlink==21.0.0 - idna==3.10 - incremental==24.7.2 - iniconfig==2.1.0 - jmespath==1.0.1 - lxml==5.3.1 - packaging==24.2 - parsel==1.10.0 - pluggy==1.5.0 - pyasn1==0.6.1 - pyasn1-modules==0.4.2 - pycparser==2.22 - pydispatcher==2.0.7 - pyopenssl==25.0.0 - pytest==8.3.5 - queuelib==1.7.0 - service-identity==24.2.0 - six==1.17.0 - tomli==2.2.1 - twisted==24.11.0 - typing-extensions==4.13.0 - w3lib==2.3.1 - zope-interface==7.2 prefix: /opt/conda/envs/scrapy
[ "tests/test_responsetypes.py::ResponseTypesTest::test_from_content_disposition" ]
[]
[ "tests/test_responsetypes.py::ResponseTypesTest::test_custom_mime_types_loaded", "tests/test_responsetypes.py::ResponseTypesTest::test_from_args", "tests/test_responsetypes.py::ResponseTypesTest::test_from_body", "tests/test_responsetypes.py::ResponseTypesTest::test_from_content_type", "tests/test_responsetypes.py::ResponseTypesTest::test_from_filename", "tests/test_responsetypes.py::ResponseTypesTest::test_from_headers" ]
[]
BSD 3-Clause "New" or "Revised" License
438
zalando-stups__senza-178
6ef4b11c9246de1010b1c62176a79c61178f0a9b
2016-02-18 12:36:42
35b73f49b8cb58e7892908413bdf2a61cfe3058e
diff --git a/senza/components/auto_scaling_group.py b/senza/components/auto_scaling_group.py index 39fb634..c3c58c2 100644 --- a/senza/components/auto_scaling_group.py +++ b/senza/components/auto_scaling_group.py @@ -145,13 +145,14 @@ def component_auto_scaling_group(definition, configuration, args, info, force, a asg_properties["MinSize"] = as_conf["Minimum"] asg_properties["DesiredCapacity"] = max(int(as_conf["Minimum"]), int(as_conf.get('DesiredCapacity', 1))) + scaling_adjustment = int(as_conf.get("ScalingAdjustment", 1)) # ScaleUp policy definition["Resources"][asg_name + "ScaleUp"] = { "Type": "AWS::AutoScaling::ScalingPolicy", "Properties": { "AdjustmentType": "ChangeInCapacity", - "ScalingAdjustment": "1", - "Cooldown": "60", + "ScalingAdjustment": str(scaling_adjustment), + "Cooldown": str(as_conf.get("Cooldown", "60")), "AutoScalingGroupName": { "Ref": asg_name } @@ -163,8 +164,8 @@ def component_auto_scaling_group(definition, configuration, args, info, force, a "Type": "AWS::AutoScaling::ScalingPolicy", "Properties": { "AdjustmentType": "ChangeInCapacity", - "ScalingAdjustment": "-1", - "Cooldown": "60", + "ScalingAdjustment": str((-1) * scaling_adjustment), + "Cooldown": str(as_conf.get("Cooldown", "60")), "AutoScalingGroupName": { "Ref": asg_name } @@ -295,15 +296,18 @@ def metric_network(asg_name, definition, configuration, args, info, force): def metric_cpu(asg_name, definition, configuration, args, info, force): + period = int(configuration.get("Period", 300)) + evaluation_periods = int(configuration.get("EvaluationPeriods", 2)) + statistic = configuration.get("Statistic", "Average") if "ScaleUpThreshold" in configuration: definition["Resources"][asg_name + "CPUAlarmHigh"] = { "Type": "AWS::CloudWatch::Alarm", "Properties": { "MetricName": "CPUUtilization", "Namespace": "AWS/EC2", - "Period": "300", - "EvaluationPeriods": "2", - "Statistic": "Average", + "Period": str(period), + "EvaluationPeriods": str(evaluation_periods), + "Statistic": statistic, "Threshold": configuration["ScaleUpThreshold"], "ComparisonOperator": "GreaterThanThreshold", "Dimensions": [ @@ -312,7 +316,11 @@ def metric_cpu(asg_name, definition, configuration, args, info, force): "Value": {"Ref": asg_name} } ], - "AlarmDescription": "Scale-up if CPU > {0}% for 10 minutes".format(configuration["ScaleUpThreshold"]), + "AlarmDescription": "Scale-up if CPU > {}% for {} minutes ({})".format( + configuration["ScaleUpThreshold"], + (period / 60) * evaluation_periods, + statistic + ), "AlarmActions": [ {"Ref": asg_name + "ScaleUp"} ] @@ -325,9 +333,9 @@ def metric_cpu(asg_name, definition, configuration, args, info, force): "Properties": { "MetricName": "CPUUtilization", "Namespace": "AWS/EC2", - "Period": "300", - "EvaluationPeriods": "2", - "Statistic": "Average", + "Period": str(period), + "EvaluationPeriods": str(evaluation_periods), + "Statistic": statistic, "Threshold": configuration["ScaleDownThreshold"], "ComparisonOperator": "LessThanThreshold", "Dimensions": [ @@ -336,8 +344,11 @@ def metric_cpu(asg_name, definition, configuration, args, info, force): "Value": {"Ref": asg_name} } ], - "AlarmDescription": "Scale-down if CPU < {0}% for 10 minutes".format( - configuration["ScaleDownThreshold"]), + "AlarmDescription": "Scale-down if CPU < {}% for {} minutes ({})".format( + configuration["ScaleDownThreshold"], + (period / 60) * evaluation_periods, + statistic + ), "AlarmActions": [ {"Ref": asg_name + "ScaleDown"} ]
Make some scaling properties configurable * Period * Cooldown * Scaling adjustment
zalando-stups/senza
diff --git a/tests/test_components.py b/tests/test_components.py index 1e14885..beb6f5d 100644 --- a/tests/test_components.py +++ b/tests/test_components.py @@ -396,6 +396,59 @@ def test_component_taupage_auto_scaling_group_user_data_with_lists_and_empty_dic assert expected_user_data == generate_user_data(configuration) +def test_component_auto_scaling_group_configurable_properties(): + definition = {"Resources": {}} + configuration = { + 'Name': 'Foo', + 'InstanceType': 't2.micro', + 'Image': 'foo', + 'AutoScaling': { + 'Minimum': 2, + 'Maximum': 10, + 'MetricType': 'CPU', + 'Period': 60, + 'ScaleUpThreshold': 50, + 'ScaleDownThreshold': 20, + 'EvaluationPeriods': 1, + 'Cooldown': 30, + 'Statistic': 'Maximum' + } + } + + args = MagicMock() + args.region = "foo" + + info = { + 'StackName': 'FooStack', + 'StackVersion': 'FooVersion' + } + + result = component_auto_scaling_group(definition, configuration, args, info, False, MagicMock()) + + assert result["Resources"]["FooScaleUp"] is not None + assert result["Resources"]["FooScaleUp"]["Properties"] is not None + assert result["Resources"]["FooScaleUp"]["Properties"]["ScalingAdjustment"] == "1" + assert result["Resources"]["FooScaleUp"]["Properties"]["Cooldown"] == "30" + + assert result["Resources"]["FooScaleDown"] is not None + assert result["Resources"]["FooScaleDown"]["Properties"] is not None + assert result["Resources"]["FooScaleDown"]["Properties"]["Cooldown"] == "30" + assert result["Resources"]["FooScaleDown"]["Properties"]["ScalingAdjustment"] == "-1" + + assert result["Resources"]["Foo"] is not None + assert result["Resources"]["Foo"]["Properties"] is not None + assert result["Resources"]["Foo"]["Properties"]["HealthCheckType"] == "EC2" + assert result["Resources"]["Foo"]["Properties"]["MinSize"] == 2 + assert result["Resources"]["Foo"]["Properties"]["DesiredCapacity"] == 2 + assert result["Resources"]["Foo"]["Properties"]["MaxSize"] == 10 + + expected_desc = "Scale-down if CPU < 20% for 1.0 minutes (Maximum)" + assert result["Resources"]["FooCPUAlarmHigh"]["Properties"]["Statistic"] == "Maximum" + assert result["Resources"]["FooCPUAlarmLow"]["Properties"]["Period"] == "60" + assert result["Resources"]["FooCPUAlarmHigh"]["Properties"]["EvaluationPeriods"] == "1" + assert result["Resources"]["FooCPUAlarmLow"]["Properties"]["AlarmDescription"] == expected_desc + + def test_component_auto_scaling_group_metric_type(): definition = {"Resources": {}} configuration = { @@ -410,7 +463,7 @@ def test_component_auto_scaling_group_metric_type(): 'EvaluationPeriods': 10, 'ScaleUpThreshold': '50 TB', 'ScaleDownThreshold': '10', - 'Statistic': 'Maximum', + 'Statistic': 'Maximum' } }
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 1, "test_score": 0 }, "num_modified_files": 1 }
1.0
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "pytest-cov" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.9", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
boto3==1.37.23 botocore==1.37.23 certifi==2025.1.31 charset-normalizer==3.4.1 click==8.1.8 clickclick==20.10.2 coverage==7.8.0 dnspython==1.15.0 dnspython3==1.15.0 exceptiongroup==1.2.2 idna==3.10 importlib_metadata==8.6.1 iniconfig==2.1.0 jmespath==1.0.1 packaging==24.2 pluggy==1.5.0 pystache==0.6.8 pytest==8.3.5 pytest-cov==6.0.0 python-dateutil==2.9.0.post0 PyYAML==6.0.2 requests==2.32.3 s3transfer==0.11.4 six==1.17.0 stups-cli-support==1.1.22 stups-pierone==1.1.56 -e git+https://github.com/zalando-stups/senza.git@6ef4b11c9246de1010b1c62176a79c61178f0a9b#egg=stups_senza stups-tokens==1.1.19 stups-zign==1.2 tomli==2.2.1 urllib3==1.26.20 zipp==3.21.0
name: senza channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - boto3==1.37.23 - botocore==1.37.23 - certifi==2025.1.31 - charset-normalizer==3.4.1 - click==8.1.8 - clickclick==20.10.2 - coverage==7.8.0 - dnspython==1.15.0 - dnspython3==1.15.0 - exceptiongroup==1.2.2 - idna==3.10 - importlib-metadata==8.6.1 - iniconfig==2.1.0 - jmespath==1.0.1 - packaging==24.2 - pluggy==1.5.0 - pystache==0.6.8 - pytest==8.3.5 - pytest-cov==6.0.0 - python-dateutil==2.9.0.post0 - pyyaml==6.0.2 - requests==2.32.3 - s3transfer==0.11.4 - six==1.17.0 - stups-cli-support==1.1.22 - stups-pierone==1.1.56 - stups-tokens==1.1.19 - stups-zign==1.2 - tomli==2.2.1 - urllib3==1.26.20 - zipp==3.21.0 prefix: /opt/conda/envs/senza
[ "tests/test_components.py::test_component_auto_scaling_group_configurable_properties" ]
[]
[ "tests/test_components.py::test_invalid_component", "tests/test_components.py::test_component_iam_role", "tests/test_components.py::test_get_merged_policies", "tests/test_components.py::test_component_load_balancer_healthcheck", "tests/test_components.py::test_component_load_balancer_idletimeout", "tests/test_components.py::test_component_load_balancer_namelength", "tests/test_components.py::test_component_stups_auto_configuration", "tests/test_components.py::test_component_redis_node", "tests/test_components.py::test_component_redis_cluster", "tests/test_components.py::test_weighted_dns_load_balancer", "tests/test_components.py::test_weighted_dns_load_balancer_with_different_domains", "tests/test_components.py::test_component_taupage_auto_scaling_group_user_data_without_ref", "tests/test_components.py::test_component_taupage_auto_scaling_group_user_data_with_ref", "tests/test_components.py::test_component_taupage_auto_scaling_group_user_data_with_lists_and_empty_dict", "tests/test_components.py::test_component_auto_scaling_group_metric_type", "tests/test_components.py::test_normalize_network_threshold" ]
[]
Apache License 2.0
439
scieloorg__xylose-91
6c03b8cee93a3ef4a1f57906f1b7e28b350edab4
2016-02-18 15:56:42
6c03b8cee93a3ef4a1f57906f1b7e28b350edab4
diff --git a/xylose/scielodocument.py b/xylose/scielodocument.py index becd530..15d73da 100644 --- a/xylose/scielodocument.py +++ b/xylose/scielodocument.py @@ -69,8 +69,17 @@ class Journal(object): """ This method creates an object level attributes (print_issn and/or electronic issn), according to the given metadata. - This method deal with the legacy datamodel fields (935, 400, 35) where: + This method deal with the legacy datamodel fields (935, 400, 35, 435) where: """ + + if 'v435' in self.data: + for item in self.data['v435']: + if 't' in item and item['t'] == 'PRINT': + self.print_issn = item['_'] + if 't' in item and item['t'] == 'ONLIN': + self.electronic_issn = item['_'] + return None + if not 'v35' in self.data: return None
Considerar campo v435 para obtenção dos tipos de ISSNs Incluir na análise o campo v435 para obtenção do ISSN na classe Journal. Ex.: Biota Neotropica 35 "ONLIN" 400 "1676-0603" **435 "1678-6424^tPRINT" 435 "1676-0611^tONLIN"** 935 "1676-0611"
scieloorg/xylose
diff --git a/tests/test_document.py b/tests/test_document.py index 8b47b82..fa8d4c2 100644 --- a/tests/test_document.py +++ b/tests/test_document.py @@ -284,6 +284,19 @@ class JournalTests(unittest.TestCase): self.assertEqual(journal.update_date, '2012-08-24') + def test_load_issn_with_v435(self): + self.fulldoc['title']['v35'] = [{u'_': u'PRINT'}] + self.fulldoc['title']['v400'] = [{u'_': u'2222-2222'}] + self.fulldoc['title']['v435'] = [ + {u'_': u'0000-0000', 't': 'ONLIN'}, + {u'_': u'9999-9999', 't': 'PRINT'} + ] + + journal = Journal(self.fulldoc['title']) + + self.assertEqual(journal.print_issn, u'9999-9999') + self.assertEqual(journal.electronic_issn, u'0000-0000') + def test_load_issn_with_v935_without_v35(self): del(self.fulldoc['title']['v35']) self.fulldoc['title']['v400'] = [{u'_': u'2222-2222'}]
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 0, "test_score": 0 }, "num_modified_files": 1 }
0.44
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "nose", "coverage" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.9", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
coverage==7.8.0 exceptiongroup==1.2.2 iniconfig==2.1.0 nose==1.3.7 packaging==24.2 pluggy==1.5.0 pytest==8.3.5 tomli==2.2.1 -e git+https://github.com/scieloorg/xylose.git@6c03b8cee93a3ef4a1f57906f1b7e28b350edab4#egg=xylose
name: xylose channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - coverage==7.8.0 - exceptiongroup==1.2.2 - iniconfig==2.1.0 - nose==1.3.7 - packaging==24.2 - pluggy==1.5.0 - pytest==8.3.5 - tomli==2.2.1 prefix: /opt/conda/envs/xylose
[ "tests/test_document.py::JournalTests::test_load_issn_with_v435" ]
[]
[ "tests/test_document.py::ToolsTests::test_get_date_wrong_day", "tests/test_document.py::ToolsTests::test_get_date_wrong_day_month", "tests/test_document.py::ToolsTests::test_get_date_wrong_day_month_not_int", "tests/test_document.py::ToolsTests::test_get_date_wrong_day_not_int", "tests/test_document.py::ToolsTests::test_get_date_wrong_month_not_int", "tests/test_document.py::ToolsTests::test_get_date_year", "tests/test_document.py::ToolsTests::test_get_date_year_day", "tests/test_document.py::ToolsTests::test_get_date_year_month", "tests/test_document.py::ToolsTests::test_get_date_year_month_day", "tests/test_document.py::ToolsTests::test_get_date_year_month_day_31", "tests/test_document.py::ToolsTests::test_get_language_iso639_1_defined", "tests/test_document.py::ToolsTests::test_get_language_iso639_1_undefined", "tests/test_document.py::ToolsTests::test_get_language_iso639_2_defined", "tests/test_document.py::ToolsTests::test_get_language_iso639_2_undefined", "tests/test_document.py::ToolsTests::test_get_language_without_iso_format", "tests/test_document.py::JournalTests::test_any_issn_priority_electronic", "tests/test_document.py::JournalTests::test_any_issn_priority_electronic_without_electronic", "tests/test_document.py::JournalTests::test_any_issn_priority_print", "tests/test_document.py::JournalTests::test_any_issn_priority_print_without_print", "tests/test_document.py::JournalTests::test_collection_acronym", "tests/test_document.py::JournalTests::test_creation_date", "tests/test_document.py::JournalTests::test_current_status", "tests/test_document.py::JournalTests::test_current_status_lots_of_changes_study_case_1", "tests/test_document.py::JournalTests::test_current_status_some_changes", "tests/test_document.py::JournalTests::test_current_without_v51", "tests/test_document.py::JournalTests::test_journal", "tests/test_document.py::JournalTests::test_journal_abbreviated_title", "tests/test_document.py::JournalTests::test_journal_acronym", "tests/test_document.py::JournalTests::test_journal_fulltitle", "tests/test_document.py::JournalTests::test_journal_fulltitle_without_subtitle", "tests/test_document.py::JournalTests::test_journal_fulltitle_without_title", "tests/test_document.py::JournalTests::test_journal_subtitle", "tests/test_document.py::JournalTests::test_journal_title", "tests/test_document.py::JournalTests::test_journal_title_nlm", "tests/test_document.py::JournalTests::test_journal_url", "tests/test_document.py::JournalTests::test_journal_without_subtitle", "tests/test_document.py::JournalTests::test_languages", "tests/test_document.py::JournalTests::test_languages_without_v350", "tests/test_document.py::JournalTests::test_load_issn_with_v935_and_v35_ONLINE", "tests/test_document.py::JournalTests::test_load_issn_with_v935_and_v35_PRINT", "tests/test_document.py::JournalTests::test_load_issn_with_v935_equal_v400_and_v35_ONLINE", "tests/test_document.py::JournalTests::test_load_issn_with_v935_equal_v400_and_v35_PRINT", "tests/test_document.py::JournalTests::test_load_issn_with_v935_without_v35", "tests/test_document.py::JournalTests::test_load_issn_without_v935_and_v35_ONLINE", "tests/test_document.py::JournalTests::test_load_issn_without_v935_and_v35_PRINT", "tests/test_document.py::JournalTests::test_load_issn_without_v935_without_v35", "tests/test_document.py::JournalTests::test_periodicity", "tests/test_document.py::JournalTests::test_periodicity_in_months", "tests/test_document.py::JournalTests::test_periodicity_in_months_out_of_choices", "tests/test_document.py::JournalTests::test_periodicity_out_of_choices", "tests/test_document.py::JournalTests::test_permission_id", "tests/test_document.py::JournalTests::test_permission_t0", "tests/test_document.py::JournalTests::test_permission_t1", "tests/test_document.py::JournalTests::test_permission_t2", "tests/test_document.py::JournalTests::test_permission_t3", "tests/test_document.py::JournalTests::test_permission_t4", "tests/test_document.py::JournalTests::test_permission_text", "tests/test_document.py::JournalTests::test_permission_url", "tests/test_document.py::JournalTests::test_permission_without_v540", "tests/test_document.py::JournalTests::test_permission_without_v540_t", "tests/test_document.py::JournalTests::test_publisher_loc", "tests/test_document.py::JournalTests::test_publisher_name", "tests/test_document.py::JournalTests::test_scielo_issn", "tests/test_document.py::JournalTests::test_status", "tests/test_document.py::JournalTests::test_status_lots_of_changes", "tests/test_document.py::JournalTests::test_status_lots_of_changes_study_case_1", "tests/test_document.py::JournalTests::test_status_lots_of_changes_with_reason", "tests/test_document.py::JournalTests::test_status_some_changes", "tests/test_document.py::JournalTests::test_status_without_v51", "tests/test_document.py::JournalTests::test_subject_areas", "tests/test_document.py::JournalTests::test_update_date", "tests/test_document.py::JournalTests::test_without_journal_abbreviated_title", "tests/test_document.py::JournalTests::test_without_journal_acronym", "tests/test_document.py::JournalTests::test_without_journal_title", "tests/test_document.py::JournalTests::test_without_journal_title_nlm", "tests/test_document.py::JournalTests::test_without_journal_url", "tests/test_document.py::JournalTests::test_without_periodicity", "tests/test_document.py::JournalTests::test_without_periodicity_in_months", "tests/test_document.py::JournalTests::test_without_publisher_loc", "tests/test_document.py::JournalTests::test_without_publisher_name", "tests/test_document.py::JournalTests::test_without_scielo_domain", "tests/test_document.py::JournalTests::test_without_scielo_domain_title_v690", "tests/test_document.py::JournalTests::test_without_subject_areas", "tests/test_document.py::JournalTests::test_without_wos_citation_indexes", "tests/test_document.py::JournalTests::test_without_wos_subject_areas", "tests/test_document.py::JournalTests::test_wos_citation_indexes", "tests/test_document.py::JournalTests::test_wos_subject_areas", "tests/test_document.py::ArticleTests::test_acceptance_date", "tests/test_document.py::ArticleTests::test_affiliation_just_with_affiliation_name", "tests/test_document.py::ArticleTests::test_affiliation_without_affiliation_name", "tests/test_document.py::ArticleTests::test_affiliations", "tests/test_document.py::ArticleTests::test_ahead_publication_date", "tests/test_document.py::ArticleTests::test_article", "tests/test_document.py::ArticleTests::test_author_with_two_affiliations", "tests/test_document.py::ArticleTests::test_author_with_two_role", "tests/test_document.py::ArticleTests::test_author_without_affiliations", "tests/test_document.py::ArticleTests::test_author_without_surname_and_given_names", "tests/test_document.py::ArticleTests::test_authors", "tests/test_document.py::ArticleTests::test_collection_acronym", "tests/test_document.py::ArticleTests::test_collection_acronym_priorizing_collection", "tests/test_document.py::ArticleTests::test_collection_acronym_retrieving_v992", "tests/test_document.py::ArticleTests::test_collection_name_brazil", "tests/test_document.py::ArticleTests::test_collection_name_undefined", "tests/test_document.py::ArticleTests::test_corporative_authors", "tests/test_document.py::ArticleTests::test_creation_date", "tests/test_document.py::ArticleTests::test_creation_date_1", "tests/test_document.py::ArticleTests::test_creation_date_2", "tests/test_document.py::ArticleTests::test_document_type", "tests/test_document.py::ArticleTests::test_doi", "tests/test_document.py::ArticleTests::test_doi_clean_1", "tests/test_document.py::ArticleTests::test_doi_clean_2", "tests/test_document.py::ArticleTests::test_doi_v237", "tests/test_document.py::ArticleTests::test_e_location", "tests/test_document.py::ArticleTests::test_end_page_loaded_crazy_legacy_way_1", "tests/test_document.py::ArticleTests::test_end_page_loaded_crazy_legacy_way_2", "tests/test_document.py::ArticleTests::test_end_page_loaded_through_xml", "tests/test_document.py::ArticleTests::test_file_code", "tests/test_document.py::ArticleTests::test_file_code_crazy_slashs_1", "tests/test_document.py::ArticleTests::test_file_code_crazy_slashs_2", "tests/test_document.py::ArticleTests::test_first_author", "tests/test_document.py::ArticleTests::test_first_author_without_author", "tests/test_document.py::ArticleTests::test_fulltexts_field_fulltexts", "tests/test_document.py::ArticleTests::test_fulltexts_without_field_fulltexts", "tests/test_document.py::ArticleTests::test_html_url", "tests/test_document.py::ArticleTests::test_invalid_document_type", "tests/test_document.py::ArticleTests::test_is_ahead", "tests/test_document.py::ArticleTests::test_issue", "tests/test_document.py::ArticleTests::test_issue_label_field_v4", "tests/test_document.py::ArticleTests::test_issue_label_without_field_v4", "tests/test_document.py::ArticleTests::test_issue_url", "tests/test_document.py::ArticleTests::test_journal_abbreviated_title", "tests/test_document.py::ArticleTests::test_journal_acronym", "tests/test_document.py::ArticleTests::test_journal_title", "tests/test_document.py::ArticleTests::test_keywords", "tests/test_document.py::ArticleTests::test_keywords_iso639_2", "tests/test_document.py::ArticleTests::test_keywords_with_undefined_language", "tests/test_document.py::ArticleTests::test_keywords_without_subfield_k", "tests/test_document.py::ArticleTests::test_keywords_without_subfield_l", "tests/test_document.py::ArticleTests::test_languages_field_fulltexts", "tests/test_document.py::ArticleTests::test_languages_field_v40", "tests/test_document.py::ArticleTests::test_last_page", "tests/test_document.py::ArticleTests::test_mixed_affiliations_1", "tests/test_document.py::ArticleTests::test_normalized_affiliations", "tests/test_document.py::ArticleTests::test_normalized_affiliations_undefined_ISO_3166_CODE", "tests/test_document.py::ArticleTests::test_normalized_affiliations_without_p", "tests/test_document.py::ArticleTests::test_order", "tests/test_document.py::ArticleTests::test_original_abstract_with_just_one_language_defined", "tests/test_document.py::ArticleTests::test_original_abstract_with_language_defined", "tests/test_document.py::ArticleTests::test_original_abstract_with_language_defined_but_different_of_the_article_original_language", "tests/test_document.py::ArticleTests::test_original_abstract_without_language_defined", "tests/test_document.py::ArticleTests::test_original_html_field_body", "tests/test_document.py::ArticleTests::test_original_language_invalid_iso639_2", "tests/test_document.py::ArticleTests::test_original_language_iso639_2", "tests/test_document.py::ArticleTests::test_original_language_original", "tests/test_document.py::ArticleTests::test_original_section_field_v49", "tests/test_document.py::ArticleTests::test_original_title_subfield_t", "tests/test_document.py::ArticleTests::test_original_title_with_just_one_language_defined", "tests/test_document.py::ArticleTests::test_original_title_with_language_defined", "tests/test_document.py::ArticleTests::test_original_title_with_language_defined_but_different_of_the_article_original_language", "tests/test_document.py::ArticleTests::test_original_title_without_language_defined", "tests/test_document.py::ArticleTests::test_pdf_url", "tests/test_document.py::ArticleTests::test_processing_date", "tests/test_document.py::ArticleTests::test_processing_date_1", "tests/test_document.py::ArticleTests::test_project_name", "tests/test_document.py::ArticleTests::test_project_sponsors", "tests/test_document.py::ArticleTests::test_publication_contract", "tests/test_document.py::ArticleTests::test_publication_date", "tests/test_document.py::ArticleTests::test_publisher_id", "tests/test_document.py::ArticleTests::test_publisher_loc", "tests/test_document.py::ArticleTests::test_publisher_name", "tests/test_document.py::ArticleTests::test_receive_date", "tests/test_document.py::ArticleTests::test_review_date", "tests/test_document.py::ArticleTests::test_secion_code_field_v49", "tests/test_document.py::ArticleTests::test_section_code_nd_field_v49", "tests/test_document.py::ArticleTests::test_section_code_without_field_v49", "tests/test_document.py::ArticleTests::test_section_field_v49", "tests/test_document.py::ArticleTests::test_section_nd_field_v49", "tests/test_document.py::ArticleTests::test_section_without_field_v49", "tests/test_document.py::ArticleTests::test_start_page", "tests/test_document.py::ArticleTests::test_start_page_loaded_crazy_legacy_way_1", "tests/test_document.py::ArticleTests::test_start_page_loaded_crazy_legacy_way_2", "tests/test_document.py::ArticleTests::test_start_page_loaded_through_xml", "tests/test_document.py::ArticleTests::test_subject_areas", "tests/test_document.py::ArticleTests::test_supplement_issue", "tests/test_document.py::ArticleTests::test_supplement_volume", "tests/test_document.py::ArticleTests::test_thesis_degree", "tests/test_document.py::ArticleTests::test_thesis_organization", "tests/test_document.py::ArticleTests::test_thesis_organization_and_division", "tests/test_document.py::ArticleTests::test_thesis_organization_without_name", "tests/test_document.py::ArticleTests::test_translated_abstracts", "tests/test_document.py::ArticleTests::test_translated_abstracts_without_v83", "tests/test_document.py::ArticleTests::test_translated_abtracts_iso639_2", "tests/test_document.py::ArticleTests::test_translated_htmls_field_body", "tests/test_document.py::ArticleTests::test_translated_section_field_v49", "tests/test_document.py::ArticleTests::test_translated_titles", "tests/test_document.py::ArticleTests::test_translated_titles_iso639_2", "tests/test_document.py::ArticleTests::test_translated_titles_without_v12", "tests/test_document.py::ArticleTests::test_update_date", "tests/test_document.py::ArticleTests::test_update_date_1", "tests/test_document.py::ArticleTests::test_update_date_2", "tests/test_document.py::ArticleTests::test_update_date_3", "tests/test_document.py::ArticleTests::test_volume", "tests/test_document.py::ArticleTests::test_whitwout_acceptance_date", "tests/test_document.py::ArticleTests::test_whitwout_ahead_publication_date", "tests/test_document.py::ArticleTests::test_whitwout_receive_date", "tests/test_document.py::ArticleTests::test_whitwout_review_date", "tests/test_document.py::ArticleTests::test_without_affiliations", "tests/test_document.py::ArticleTests::test_without_authors", "tests/test_document.py::ArticleTests::test_without_citations", "tests/test_document.py::ArticleTests::test_without_collection_acronym", "tests/test_document.py::ArticleTests::test_without_corporative_authors", "tests/test_document.py::ArticleTests::test_without_document_type", "tests/test_document.py::ArticleTests::test_without_doi", "tests/test_document.py::ArticleTests::test_without_e_location", "tests/test_document.py::ArticleTests::test_without_html_url", "tests/test_document.py::ArticleTests::test_without_issue", "tests/test_document.py::ArticleTests::test_without_issue_url", "tests/test_document.py::ArticleTests::test_without_journal_abbreviated_title", "tests/test_document.py::ArticleTests::test_without_journal_acronym", "tests/test_document.py::ArticleTests::test_without_journal_title", "tests/test_document.py::ArticleTests::test_without_keywords", "tests/test_document.py::ArticleTests::test_without_last_page", "tests/test_document.py::ArticleTests::test_without_normalized_affiliations", "tests/test_document.py::ArticleTests::test_without_order", "tests/test_document.py::ArticleTests::test_without_original_abstract", "tests/test_document.py::ArticleTests::test_without_original_title", "tests/test_document.py::ArticleTests::test_without_pages", "tests/test_document.py::ArticleTests::test_without_pdf_url", "tests/test_document.py::ArticleTests::test_without_processing_date", "tests/test_document.py::ArticleTests::test_without_project_name", "tests/test_document.py::ArticleTests::test_without_project_sponsor", "tests/test_document.py::ArticleTests::test_without_publication_contract", "tests/test_document.py::ArticleTests::test_without_publication_date", "tests/test_document.py::ArticleTests::test_without_publisher_id", "tests/test_document.py::ArticleTests::test_without_publisher_loc", "tests/test_document.py::ArticleTests::test_without_publisher_name", "tests/test_document.py::ArticleTests::test_without_scielo_domain", "tests/test_document.py::ArticleTests::test_without_scielo_domain_article_v69", "tests/test_document.py::ArticleTests::test_without_scielo_domain_article_v69_and_with_title_v690", "tests/test_document.py::ArticleTests::test_without_scielo_domain_title_v690", "tests/test_document.py::ArticleTests::test_without_start_page", "tests/test_document.py::ArticleTests::test_without_subject_areas", "tests/test_document.py::ArticleTests::test_without_suplement_issue", "tests/test_document.py::ArticleTests::test_without_supplement_volume", "tests/test_document.py::ArticleTests::test_without_thesis_degree", "tests/test_document.py::ArticleTests::test_without_thesis_organization", "tests/test_document.py::ArticleTests::test_without_volume", "tests/test_document.py::ArticleTests::test_without_wos_citation_indexes", "tests/test_document.py::ArticleTests::test_without_wos_subject_areas", "tests/test_document.py::ArticleTests::test_wos_citation_indexes", "tests/test_document.py::ArticleTests::test_wos_subject_areas", "tests/test_document.py::CitationTest::test_a_link_access_date", "tests/test_document.py::CitationTest::test_analytic_institution_for_a_article_citation", "tests/test_document.py::CitationTest::test_analytic_institution_for_a_book_citation", "tests/test_document.py::CitationTest::test_article_title", "tests/test_document.py::CitationTest::test_article_without_title", "tests/test_document.py::CitationTest::test_authors_article", "tests/test_document.py::CitationTest::test_authors_book", "tests/test_document.py::CitationTest::test_authors_link", "tests/test_document.py::CitationTest::test_authors_thesis", "tests/test_document.py::CitationTest::test_book_chapter_title", "tests/test_document.py::CitationTest::test_book_edition", "tests/test_document.py::CitationTest::test_book_volume", "tests/test_document.py::CitationTest::test_book_without_chapter_title", "tests/test_document.py::CitationTest::test_citation_sample_congress", "tests/test_document.py::CitationTest::test_citation_sample_link", "tests/test_document.py::CitationTest::test_citation_sample_link_without_comment", "tests/test_document.py::CitationTest::test_conference_edition", "tests/test_document.py::CitationTest::test_conference_name", "tests/test_document.py::CitationTest::test_conference_sponsor", "tests/test_document.py::CitationTest::test_conference_without_name", "tests/test_document.py::CitationTest::test_conference_without_sponsor", "tests/test_document.py::CitationTest::test_date", "tests/test_document.py::CitationTest::test_doi", "tests/test_document.py::CitationTest::test_editor", "tests/test_document.py::CitationTest::test_elocation_14", "tests/test_document.py::CitationTest::test_elocation_514", "tests/test_document.py::CitationTest::test_end_page_14", "tests/test_document.py::CitationTest::test_end_page_514", "tests/test_document.py::CitationTest::test_end_page_withdout_data", "tests/test_document.py::CitationTest::test_first_author_article", "tests/test_document.py::CitationTest::test_first_author_book", "tests/test_document.py::CitationTest::test_first_author_link", "tests/test_document.py::CitationTest::test_first_author_thesis", "tests/test_document.py::CitationTest::test_first_author_without_monographic_authors", "tests/test_document.py::CitationTest::test_first_author_without_monographic_authors_but_not_a_book_citation", "tests/test_document.py::CitationTest::test_index_number", "tests/test_document.py::CitationTest::test_institutions_all_fields", "tests/test_document.py::CitationTest::test_institutions_v11", "tests/test_document.py::CitationTest::test_institutions_v17", "tests/test_document.py::CitationTest::test_institutions_v29", "tests/test_document.py::CitationTest::test_institutions_v50", "tests/test_document.py::CitationTest::test_institutions_v58", "tests/test_document.py::CitationTest::test_invalid_edition", "tests/test_document.py::CitationTest::test_isbn", "tests/test_document.py::CitationTest::test_isbn_but_not_a_book", "tests/test_document.py::CitationTest::test_issn", "tests/test_document.py::CitationTest::test_issn_but_not_an_article", "tests/test_document.py::CitationTest::test_issue_part", "tests/test_document.py::CitationTest::test_issue_title", "tests/test_document.py::CitationTest::test_journal_issue", "tests/test_document.py::CitationTest::test_journal_volume", "tests/test_document.py::CitationTest::test_link", "tests/test_document.py::CitationTest::test_link_title", "tests/test_document.py::CitationTest::test_link_without_title", "tests/test_document.py::CitationTest::test_monographic_authors", "tests/test_document.py::CitationTest::test_monographic_first_author", "tests/test_document.py::CitationTest::test_pages_14", "tests/test_document.py::CitationTest::test_pages_514", "tests/test_document.py::CitationTest::test_pages_withdout_data", "tests/test_document.py::CitationTest::test_publication_type_article", "tests/test_document.py::CitationTest::test_publication_type_book", "tests/test_document.py::CitationTest::test_publication_type_conference", "tests/test_document.py::CitationTest::test_publication_type_link", "tests/test_document.py::CitationTest::test_publication_type_thesis", "tests/test_document.py::CitationTest::test_publication_type_undefined", "tests/test_document.py::CitationTest::test_publisher", "tests/test_document.py::CitationTest::test_publisher_address", "tests/test_document.py::CitationTest::test_publisher_address_without_e", "tests/test_document.py::CitationTest::test_series_book", "tests/test_document.py::CitationTest::test_series_but_neither_journal_book_or_conference_citation", "tests/test_document.py::CitationTest::test_series_conference", "tests/test_document.py::CitationTest::test_series_journal", "tests/test_document.py::CitationTest::test_source_book_title", "tests/test_document.py::CitationTest::test_source_journal", "tests/test_document.py::CitationTest::test_source_journal_without_journal_title", "tests/test_document.py::CitationTest::test_sponsor", "tests/test_document.py::CitationTest::test_start_page_14", "tests/test_document.py::CitationTest::test_start_page_514", "tests/test_document.py::CitationTest::test_start_page_withdout_data", "tests/test_document.py::CitationTest::test_thesis_institution", "tests/test_document.py::CitationTest::test_thesis_title", "tests/test_document.py::CitationTest::test_thesis_without_title", "tests/test_document.py::CitationTest::test_title_when_article_citation", "tests/test_document.py::CitationTest::test_title_when_conference_citation", "tests/test_document.py::CitationTest::test_title_when_link_citation", "tests/test_document.py::CitationTest::test_title_when_thesis_citation", "tests/test_document.py::CitationTest::test_with_volume_but_not_a_journal_article_neither_a_book", "tests/test_document.py::CitationTest::test_without_analytic_institution", "tests/test_document.py::CitationTest::test_without_authors", "tests/test_document.py::CitationTest::test_without_date", "tests/test_document.py::CitationTest::test_without_doi", "tests/test_document.py::CitationTest::test_without_edition", "tests/test_document.py::CitationTest::test_without_editor", "tests/test_document.py::CitationTest::test_without_first_author", "tests/test_document.py::CitationTest::test_without_index_number", "tests/test_document.py::CitationTest::test_without_institutions", "tests/test_document.py::CitationTest::test_without_issue", "tests/test_document.py::CitationTest::test_without_issue_part", "tests/test_document.py::CitationTest::test_without_issue_title", "tests/test_document.py::CitationTest::test_without_link", "tests/test_document.py::CitationTest::test_without_monographic_authors", "tests/test_document.py::CitationTest::test_without_monographic_authors_but_not_a_book_citation", "tests/test_document.py::CitationTest::test_without_publisher", "tests/test_document.py::CitationTest::test_without_publisher_address", "tests/test_document.py::CitationTest::test_without_series", "tests/test_document.py::CitationTest::test_without_sponsor", "tests/test_document.py::CitationTest::test_without_thesis_institution", "tests/test_document.py::CitationTest::test_without_volume" ]
[]
BSD 2-Clause "Simplified" License
440
scieloorg__xylose-93
b0fdfc8085316f491e0ad49d1abcba33641549b6
2016-02-18 18:46:17
b0fdfc8085316f491e0ad49d1abcba33641549b6
diff --git a/setup.py b/setup.py index d1e1950..03b10eb 100755 --- a/setup.py +++ b/setup.py @@ -7,7 +7,7 @@ except ImportError: setup( name="xylose", - version='0.43', + version='0.46', description="A SciELO library to abstract a JSON data structure that is a product of the ISIS2JSON conversion using the ISIS2JSON type 3 data model.", author="SciELO", author_email="[email protected]", diff --git a/xylose/scielodocument.py b/xylose/scielodocument.py index 15d73da..a446175 100644 --- a/xylose/scielodocument.py +++ b/xylose/scielodocument.py @@ -802,9 +802,9 @@ class Article(object): def data_model_version(self, fullpath=False): """ This method retrieves the document version - This method deals with the legacy fields (601). + This method deals with the legacy fields (120). """ - if 'v601' in self.data['article']: + if 'xml' in self.data['article'].get('v120', [{'_': ''}])[0]['_'].lower(): return 'xml' return 'html'
Ajuste em metodo que indica modelo de dados HTML ou XML.
scieloorg/xylose
diff --git a/tests/test_document.py b/tests/test_document.py index fa8d4c2..f9c49bf 100644 --- a/tests/test_document.py +++ b/tests/test_document.py @@ -985,6 +985,27 @@ class ArticleTests(unittest.TestCase): self.assertEqual(article.file_code(), '0034-8910-rsp-47-04-0675') + def test_data_model_version_html(self): + del(self.fulldoc['article']['v120']) + + article = Article(self.fulldoc) + + self.assertEqual(article.data_model_version, u'html') + + def test_data_model_version_html_1(self): + self.fulldoc['article']['v120'] = [{'_': '4.0'}] + + article = Article(self.fulldoc) + + self.assertEqual(article.data_model_version, u'html') + + def test_data_model_version_xml(self): + self.fulldoc['article']['v120'] = [{'_': 'XML_1.0'}] + + article = Article(self.fulldoc) + + self.assertEqual(article.data_model_version, u'xml') + def test_wos_subject_areas(self): self.fulldoc['title']['v854'] = [{u'_': u'MARINE & FRESHWATER BIOLOGY'}, {u'_': u'OCEANOGRAPHY'}]
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_many_modified_files" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 3, "test_score": 3 }, "num_modified_files": 2 }
0.45
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "nose", "coverage", "mocker", "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.9", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
coverage==7.8.0 exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work mocker==1.1.1 nose==1.3.7 packaging @ file:///croot/packaging_1734472117206/work pluggy @ file:///croot/pluggy_1733169602837/work pytest @ file:///croot/pytest_1738938843180/work tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work -e git+https://github.com/scieloorg/xylose.git@b0fdfc8085316f491e0ad49d1abcba33641549b6#egg=xylose
name: xylose channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - exceptiongroup=1.2.0=py39h06a4308_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - packaging=24.2=py39h06a4308_0 - pip=25.0=py39h06a4308_0 - pluggy=1.5.0=py39h06a4308_0 - pytest=8.3.4=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tomli=2.0.1=py39h06a4308_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - coverage==7.8.0 - mocker==1.1.1 - nose==1.3.7 prefix: /opt/conda/envs/xylose
[ "tests/test_document.py::ArticleTests::test_data_model_version_xml" ]
[]
[ "tests/test_document.py::ToolsTests::test_get_date_wrong_day", "tests/test_document.py::ToolsTests::test_get_date_wrong_day_month", "tests/test_document.py::ToolsTests::test_get_date_wrong_day_month_not_int", "tests/test_document.py::ToolsTests::test_get_date_wrong_day_not_int", "tests/test_document.py::ToolsTests::test_get_date_wrong_month_not_int", "tests/test_document.py::ToolsTests::test_get_date_year", "tests/test_document.py::ToolsTests::test_get_date_year_day", "tests/test_document.py::ToolsTests::test_get_date_year_month", "tests/test_document.py::ToolsTests::test_get_date_year_month_day", "tests/test_document.py::ToolsTests::test_get_date_year_month_day_31", "tests/test_document.py::ToolsTests::test_get_language_iso639_1_defined", "tests/test_document.py::ToolsTests::test_get_language_iso639_1_undefined", "tests/test_document.py::ToolsTests::test_get_language_iso639_2_defined", "tests/test_document.py::ToolsTests::test_get_language_iso639_2_undefined", "tests/test_document.py::ToolsTests::test_get_language_without_iso_format", "tests/test_document.py::JournalTests::test_any_issn_priority_electronic", "tests/test_document.py::JournalTests::test_any_issn_priority_electronic_without_electronic", "tests/test_document.py::JournalTests::test_any_issn_priority_print", "tests/test_document.py::JournalTests::test_any_issn_priority_print_without_print", "tests/test_document.py::JournalTests::test_collection_acronym", "tests/test_document.py::JournalTests::test_creation_date", "tests/test_document.py::JournalTests::test_current_status", "tests/test_document.py::JournalTests::test_current_status_lots_of_changes_study_case_1", "tests/test_document.py::JournalTests::test_current_status_some_changes", "tests/test_document.py::JournalTests::test_current_without_v51", "tests/test_document.py::JournalTests::test_journal", "tests/test_document.py::JournalTests::test_journal_abbreviated_title", "tests/test_document.py::JournalTests::test_journal_acronym", "tests/test_document.py::JournalTests::test_journal_fulltitle", "tests/test_document.py::JournalTests::test_journal_fulltitle_without_subtitle", "tests/test_document.py::JournalTests::test_journal_fulltitle_without_title", "tests/test_document.py::JournalTests::test_journal_subtitle", "tests/test_document.py::JournalTests::test_journal_title", "tests/test_document.py::JournalTests::test_journal_title_nlm", "tests/test_document.py::JournalTests::test_journal_url", "tests/test_document.py::JournalTests::test_journal_without_subtitle", "tests/test_document.py::JournalTests::test_languages", "tests/test_document.py::JournalTests::test_languages_without_v350", "tests/test_document.py::JournalTests::test_load_issn_with_v435", "tests/test_document.py::JournalTests::test_load_issn_with_v935_and_v35_ONLINE", "tests/test_document.py::JournalTests::test_load_issn_with_v935_and_v35_PRINT", "tests/test_document.py::JournalTests::test_load_issn_with_v935_equal_v400_and_v35_ONLINE", "tests/test_document.py::JournalTests::test_load_issn_with_v935_equal_v400_and_v35_PRINT", "tests/test_document.py::JournalTests::test_load_issn_with_v935_without_v35", "tests/test_document.py::JournalTests::test_load_issn_without_v935_and_v35_ONLINE", "tests/test_document.py::JournalTests::test_load_issn_without_v935_and_v35_PRINT", "tests/test_document.py::JournalTests::test_load_issn_without_v935_without_v35", "tests/test_document.py::JournalTests::test_periodicity", "tests/test_document.py::JournalTests::test_periodicity_in_months", "tests/test_document.py::JournalTests::test_periodicity_in_months_out_of_choices", "tests/test_document.py::JournalTests::test_periodicity_out_of_choices", "tests/test_document.py::JournalTests::test_permission_id", "tests/test_document.py::JournalTests::test_permission_t0", "tests/test_document.py::JournalTests::test_permission_t1", "tests/test_document.py::JournalTests::test_permission_t2", "tests/test_document.py::JournalTests::test_permission_t3", "tests/test_document.py::JournalTests::test_permission_t4", "tests/test_document.py::JournalTests::test_permission_text", "tests/test_document.py::JournalTests::test_permission_url", "tests/test_document.py::JournalTests::test_permission_without_v540", "tests/test_document.py::JournalTests::test_permission_without_v540_t", "tests/test_document.py::JournalTests::test_publisher_loc", "tests/test_document.py::JournalTests::test_publisher_name", "tests/test_document.py::JournalTests::test_scielo_issn", "tests/test_document.py::JournalTests::test_status", "tests/test_document.py::JournalTests::test_status_lots_of_changes", "tests/test_document.py::JournalTests::test_status_lots_of_changes_study_case_1", "tests/test_document.py::JournalTests::test_status_lots_of_changes_with_reason", "tests/test_document.py::JournalTests::test_status_some_changes", "tests/test_document.py::JournalTests::test_status_without_v51", "tests/test_document.py::JournalTests::test_subject_areas", "tests/test_document.py::JournalTests::test_update_date", "tests/test_document.py::JournalTests::test_without_journal_abbreviated_title", "tests/test_document.py::JournalTests::test_without_journal_acronym", "tests/test_document.py::JournalTests::test_without_journal_title", "tests/test_document.py::JournalTests::test_without_journal_title_nlm", "tests/test_document.py::JournalTests::test_without_journal_url", "tests/test_document.py::JournalTests::test_without_periodicity", "tests/test_document.py::JournalTests::test_without_periodicity_in_months", "tests/test_document.py::JournalTests::test_without_publisher_loc", "tests/test_document.py::JournalTests::test_without_publisher_name", "tests/test_document.py::JournalTests::test_without_scielo_domain", "tests/test_document.py::JournalTests::test_without_scielo_domain_title_v690", "tests/test_document.py::JournalTests::test_without_subject_areas", "tests/test_document.py::JournalTests::test_without_wos_citation_indexes", "tests/test_document.py::JournalTests::test_without_wos_subject_areas", "tests/test_document.py::JournalTests::test_wos_citation_indexes", "tests/test_document.py::JournalTests::test_wos_subject_areas", "tests/test_document.py::ArticleTests::test_acceptance_date", "tests/test_document.py::ArticleTests::test_affiliation_just_with_affiliation_name", "tests/test_document.py::ArticleTests::test_affiliation_without_affiliation_name", "tests/test_document.py::ArticleTests::test_affiliations", "tests/test_document.py::ArticleTests::test_ahead_publication_date", "tests/test_document.py::ArticleTests::test_article", "tests/test_document.py::ArticleTests::test_author_with_two_affiliations", "tests/test_document.py::ArticleTests::test_author_with_two_role", "tests/test_document.py::ArticleTests::test_author_without_affiliations", "tests/test_document.py::ArticleTests::test_author_without_surname_and_given_names", "tests/test_document.py::ArticleTests::test_authors", "tests/test_document.py::ArticleTests::test_collection_acronym", "tests/test_document.py::ArticleTests::test_collection_acronym_priorizing_collection", "tests/test_document.py::ArticleTests::test_collection_acronym_retrieving_v992", "tests/test_document.py::ArticleTests::test_collection_name_brazil", "tests/test_document.py::ArticleTests::test_collection_name_undefined", "tests/test_document.py::ArticleTests::test_corporative_authors", "tests/test_document.py::ArticleTests::test_creation_date", "tests/test_document.py::ArticleTests::test_creation_date_1", "tests/test_document.py::ArticleTests::test_creation_date_2", "tests/test_document.py::ArticleTests::test_data_model_version_html", "tests/test_document.py::ArticleTests::test_data_model_version_html_1", "tests/test_document.py::ArticleTests::test_document_type", "tests/test_document.py::ArticleTests::test_doi", "tests/test_document.py::ArticleTests::test_doi_clean_1", "tests/test_document.py::ArticleTests::test_doi_clean_2", "tests/test_document.py::ArticleTests::test_doi_v237", "tests/test_document.py::ArticleTests::test_e_location", "tests/test_document.py::ArticleTests::test_end_page_loaded_crazy_legacy_way_1", "tests/test_document.py::ArticleTests::test_end_page_loaded_crazy_legacy_way_2", "tests/test_document.py::ArticleTests::test_end_page_loaded_through_xml", "tests/test_document.py::ArticleTests::test_file_code", "tests/test_document.py::ArticleTests::test_file_code_crazy_slashs_1", "tests/test_document.py::ArticleTests::test_file_code_crazy_slashs_2", "tests/test_document.py::ArticleTests::test_first_author", "tests/test_document.py::ArticleTests::test_first_author_without_author", "tests/test_document.py::ArticleTests::test_fulltexts_field_fulltexts", "tests/test_document.py::ArticleTests::test_fulltexts_without_field_fulltexts", "tests/test_document.py::ArticleTests::test_html_url", "tests/test_document.py::ArticleTests::test_invalid_document_type", "tests/test_document.py::ArticleTests::test_is_ahead", "tests/test_document.py::ArticleTests::test_issue", "tests/test_document.py::ArticleTests::test_issue_label_field_v4", "tests/test_document.py::ArticleTests::test_issue_label_without_field_v4", "tests/test_document.py::ArticleTests::test_issue_url", "tests/test_document.py::ArticleTests::test_journal_abbreviated_title", "tests/test_document.py::ArticleTests::test_journal_acronym", "tests/test_document.py::ArticleTests::test_journal_title", "tests/test_document.py::ArticleTests::test_keywords", "tests/test_document.py::ArticleTests::test_keywords_iso639_2", "tests/test_document.py::ArticleTests::test_keywords_with_undefined_language", "tests/test_document.py::ArticleTests::test_keywords_without_subfield_k", "tests/test_document.py::ArticleTests::test_keywords_without_subfield_l", "tests/test_document.py::ArticleTests::test_languages_field_fulltexts", "tests/test_document.py::ArticleTests::test_languages_field_v40", "tests/test_document.py::ArticleTests::test_last_page", "tests/test_document.py::ArticleTests::test_mixed_affiliations_1", "tests/test_document.py::ArticleTests::test_normalized_affiliations", "tests/test_document.py::ArticleTests::test_normalized_affiliations_undefined_ISO_3166_CODE", "tests/test_document.py::ArticleTests::test_normalized_affiliations_without_p", "tests/test_document.py::ArticleTests::test_order", "tests/test_document.py::ArticleTests::test_original_abstract_with_just_one_language_defined", "tests/test_document.py::ArticleTests::test_original_abstract_with_language_defined", "tests/test_document.py::ArticleTests::test_original_abstract_with_language_defined_but_different_of_the_article_original_language", "tests/test_document.py::ArticleTests::test_original_abstract_without_language_defined", "tests/test_document.py::ArticleTests::test_original_html_field_body", "tests/test_document.py::ArticleTests::test_original_language_invalid_iso639_2", "tests/test_document.py::ArticleTests::test_original_language_iso639_2", "tests/test_document.py::ArticleTests::test_original_language_original", "tests/test_document.py::ArticleTests::test_original_section_field_v49", "tests/test_document.py::ArticleTests::test_original_title_subfield_t", "tests/test_document.py::ArticleTests::test_original_title_with_just_one_language_defined", "tests/test_document.py::ArticleTests::test_original_title_with_language_defined", "tests/test_document.py::ArticleTests::test_original_title_with_language_defined_but_different_of_the_article_original_language", "tests/test_document.py::ArticleTests::test_original_title_without_language_defined", "tests/test_document.py::ArticleTests::test_pdf_url", "tests/test_document.py::ArticleTests::test_processing_date", "tests/test_document.py::ArticleTests::test_processing_date_1", "tests/test_document.py::ArticleTests::test_project_name", "tests/test_document.py::ArticleTests::test_project_sponsors", "tests/test_document.py::ArticleTests::test_publication_contract", "tests/test_document.py::ArticleTests::test_publication_date", "tests/test_document.py::ArticleTests::test_publisher_id", "tests/test_document.py::ArticleTests::test_publisher_loc", "tests/test_document.py::ArticleTests::test_publisher_name", "tests/test_document.py::ArticleTests::test_receive_date", "tests/test_document.py::ArticleTests::test_review_date", "tests/test_document.py::ArticleTests::test_secion_code_field_v49", "tests/test_document.py::ArticleTests::test_section_code_nd_field_v49", "tests/test_document.py::ArticleTests::test_section_code_without_field_v49", "tests/test_document.py::ArticleTests::test_section_field_v49", "tests/test_document.py::ArticleTests::test_section_nd_field_v49", "tests/test_document.py::ArticleTests::test_section_without_field_v49", "tests/test_document.py::ArticleTests::test_start_page", "tests/test_document.py::ArticleTests::test_start_page_loaded_crazy_legacy_way_1", "tests/test_document.py::ArticleTests::test_start_page_loaded_crazy_legacy_way_2", "tests/test_document.py::ArticleTests::test_start_page_loaded_through_xml", "tests/test_document.py::ArticleTests::test_subject_areas", "tests/test_document.py::ArticleTests::test_supplement_issue", "tests/test_document.py::ArticleTests::test_supplement_volume", "tests/test_document.py::ArticleTests::test_thesis_degree", "tests/test_document.py::ArticleTests::test_thesis_organization", "tests/test_document.py::ArticleTests::test_thesis_organization_and_division", "tests/test_document.py::ArticleTests::test_thesis_organization_without_name", "tests/test_document.py::ArticleTests::test_translated_abstracts", "tests/test_document.py::ArticleTests::test_translated_abstracts_without_v83", "tests/test_document.py::ArticleTests::test_translated_abtracts_iso639_2", "tests/test_document.py::ArticleTests::test_translated_htmls_field_body", "tests/test_document.py::ArticleTests::test_translated_section_field_v49", "tests/test_document.py::ArticleTests::test_translated_titles", "tests/test_document.py::ArticleTests::test_translated_titles_iso639_2", "tests/test_document.py::ArticleTests::test_translated_titles_without_v12", "tests/test_document.py::ArticleTests::test_update_date", "tests/test_document.py::ArticleTests::test_update_date_1", "tests/test_document.py::ArticleTests::test_update_date_2", "tests/test_document.py::ArticleTests::test_update_date_3", "tests/test_document.py::ArticleTests::test_volume", "tests/test_document.py::ArticleTests::test_whitwout_acceptance_date", "tests/test_document.py::ArticleTests::test_whitwout_ahead_publication_date", "tests/test_document.py::ArticleTests::test_whitwout_receive_date", "tests/test_document.py::ArticleTests::test_whitwout_review_date", "tests/test_document.py::ArticleTests::test_without_affiliations", "tests/test_document.py::ArticleTests::test_without_authors", "tests/test_document.py::ArticleTests::test_without_citations", "tests/test_document.py::ArticleTests::test_without_collection_acronym", "tests/test_document.py::ArticleTests::test_without_corporative_authors", "tests/test_document.py::ArticleTests::test_without_document_type", "tests/test_document.py::ArticleTests::test_without_doi", "tests/test_document.py::ArticleTests::test_without_e_location", "tests/test_document.py::ArticleTests::test_without_html_url", "tests/test_document.py::ArticleTests::test_without_issue", "tests/test_document.py::ArticleTests::test_without_issue_url", "tests/test_document.py::ArticleTests::test_without_journal_abbreviated_title", "tests/test_document.py::ArticleTests::test_without_journal_acronym", "tests/test_document.py::ArticleTests::test_without_journal_title", "tests/test_document.py::ArticleTests::test_without_keywords", "tests/test_document.py::ArticleTests::test_without_last_page", "tests/test_document.py::ArticleTests::test_without_normalized_affiliations", "tests/test_document.py::ArticleTests::test_without_order", "tests/test_document.py::ArticleTests::test_without_original_abstract", "tests/test_document.py::ArticleTests::test_without_original_title", "tests/test_document.py::ArticleTests::test_without_pages", "tests/test_document.py::ArticleTests::test_without_pdf_url", "tests/test_document.py::ArticleTests::test_without_processing_date", "tests/test_document.py::ArticleTests::test_without_project_name", "tests/test_document.py::ArticleTests::test_without_project_sponsor", "tests/test_document.py::ArticleTests::test_without_publication_contract", "tests/test_document.py::ArticleTests::test_without_publication_date", "tests/test_document.py::ArticleTests::test_without_publisher_id", "tests/test_document.py::ArticleTests::test_without_publisher_loc", "tests/test_document.py::ArticleTests::test_without_publisher_name", "tests/test_document.py::ArticleTests::test_without_scielo_domain", "tests/test_document.py::ArticleTests::test_without_scielo_domain_article_v69", "tests/test_document.py::ArticleTests::test_without_scielo_domain_article_v69_and_with_title_v690", "tests/test_document.py::ArticleTests::test_without_scielo_domain_title_v690", "tests/test_document.py::ArticleTests::test_without_start_page", "tests/test_document.py::ArticleTests::test_without_subject_areas", "tests/test_document.py::ArticleTests::test_without_suplement_issue", "tests/test_document.py::ArticleTests::test_without_supplement_volume", "tests/test_document.py::ArticleTests::test_without_thesis_degree", "tests/test_document.py::ArticleTests::test_without_thesis_organization", "tests/test_document.py::ArticleTests::test_without_volume", "tests/test_document.py::ArticleTests::test_without_wos_citation_indexes", "tests/test_document.py::ArticleTests::test_without_wos_subject_areas", "tests/test_document.py::ArticleTests::test_wos_citation_indexes", "tests/test_document.py::ArticleTests::test_wos_subject_areas", "tests/test_document.py::CitationTest::test_a_link_access_date", "tests/test_document.py::CitationTest::test_analytic_institution_for_a_article_citation", "tests/test_document.py::CitationTest::test_analytic_institution_for_a_book_citation", "tests/test_document.py::CitationTest::test_article_title", "tests/test_document.py::CitationTest::test_article_without_title", "tests/test_document.py::CitationTest::test_authors_article", "tests/test_document.py::CitationTest::test_authors_book", "tests/test_document.py::CitationTest::test_authors_link", "tests/test_document.py::CitationTest::test_authors_thesis", "tests/test_document.py::CitationTest::test_book_chapter_title", "tests/test_document.py::CitationTest::test_book_edition", "tests/test_document.py::CitationTest::test_book_volume", "tests/test_document.py::CitationTest::test_book_without_chapter_title", "tests/test_document.py::CitationTest::test_citation_sample_congress", "tests/test_document.py::CitationTest::test_citation_sample_link", "tests/test_document.py::CitationTest::test_citation_sample_link_without_comment", "tests/test_document.py::CitationTest::test_conference_edition", "tests/test_document.py::CitationTest::test_conference_name", "tests/test_document.py::CitationTest::test_conference_sponsor", "tests/test_document.py::CitationTest::test_conference_without_name", "tests/test_document.py::CitationTest::test_conference_without_sponsor", "tests/test_document.py::CitationTest::test_date", "tests/test_document.py::CitationTest::test_doi", "tests/test_document.py::CitationTest::test_editor", "tests/test_document.py::CitationTest::test_elocation_14", "tests/test_document.py::CitationTest::test_elocation_514", "tests/test_document.py::CitationTest::test_end_page_14", "tests/test_document.py::CitationTest::test_end_page_514", "tests/test_document.py::CitationTest::test_end_page_withdout_data", "tests/test_document.py::CitationTest::test_first_author_article", "tests/test_document.py::CitationTest::test_first_author_book", "tests/test_document.py::CitationTest::test_first_author_link", "tests/test_document.py::CitationTest::test_first_author_thesis", "tests/test_document.py::CitationTest::test_first_author_without_monographic_authors", "tests/test_document.py::CitationTest::test_first_author_without_monographic_authors_but_not_a_book_citation", "tests/test_document.py::CitationTest::test_index_number", "tests/test_document.py::CitationTest::test_institutions_all_fields", "tests/test_document.py::CitationTest::test_institutions_v11", "tests/test_document.py::CitationTest::test_institutions_v17", "tests/test_document.py::CitationTest::test_institutions_v29", "tests/test_document.py::CitationTest::test_institutions_v50", "tests/test_document.py::CitationTest::test_institutions_v58", "tests/test_document.py::CitationTest::test_invalid_edition", "tests/test_document.py::CitationTest::test_isbn", "tests/test_document.py::CitationTest::test_isbn_but_not_a_book", "tests/test_document.py::CitationTest::test_issn", "tests/test_document.py::CitationTest::test_issn_but_not_an_article", "tests/test_document.py::CitationTest::test_issue_part", "tests/test_document.py::CitationTest::test_issue_title", "tests/test_document.py::CitationTest::test_journal_issue", "tests/test_document.py::CitationTest::test_journal_volume", "tests/test_document.py::CitationTest::test_link", "tests/test_document.py::CitationTest::test_link_title", "tests/test_document.py::CitationTest::test_link_without_title", "tests/test_document.py::CitationTest::test_monographic_authors", "tests/test_document.py::CitationTest::test_monographic_first_author", "tests/test_document.py::CitationTest::test_pages_14", "tests/test_document.py::CitationTest::test_pages_514", "tests/test_document.py::CitationTest::test_pages_withdout_data", "tests/test_document.py::CitationTest::test_publication_type_article", "tests/test_document.py::CitationTest::test_publication_type_book", "tests/test_document.py::CitationTest::test_publication_type_conference", "tests/test_document.py::CitationTest::test_publication_type_link", "tests/test_document.py::CitationTest::test_publication_type_thesis", "tests/test_document.py::CitationTest::test_publication_type_undefined", "tests/test_document.py::CitationTest::test_publisher", "tests/test_document.py::CitationTest::test_publisher_address", "tests/test_document.py::CitationTest::test_publisher_address_without_e", "tests/test_document.py::CitationTest::test_series_book", "tests/test_document.py::CitationTest::test_series_but_neither_journal_book_or_conference_citation", "tests/test_document.py::CitationTest::test_series_conference", "tests/test_document.py::CitationTest::test_series_journal", "tests/test_document.py::CitationTest::test_source_book_title", "tests/test_document.py::CitationTest::test_source_journal", "tests/test_document.py::CitationTest::test_source_journal_without_journal_title", "tests/test_document.py::CitationTest::test_sponsor", "tests/test_document.py::CitationTest::test_start_page_14", "tests/test_document.py::CitationTest::test_start_page_514", "tests/test_document.py::CitationTest::test_start_page_withdout_data", "tests/test_document.py::CitationTest::test_thesis_institution", "tests/test_document.py::CitationTest::test_thesis_title", "tests/test_document.py::CitationTest::test_thesis_without_title", "tests/test_document.py::CitationTest::test_title_when_article_citation", "tests/test_document.py::CitationTest::test_title_when_conference_citation", "tests/test_document.py::CitationTest::test_title_when_link_citation", "tests/test_document.py::CitationTest::test_title_when_thesis_citation", "tests/test_document.py::CitationTest::test_with_volume_but_not_a_journal_article_neither_a_book", "tests/test_document.py::CitationTest::test_without_analytic_institution", "tests/test_document.py::CitationTest::test_without_authors", "tests/test_document.py::CitationTest::test_without_date", "tests/test_document.py::CitationTest::test_without_doi", "tests/test_document.py::CitationTest::test_without_edition", "tests/test_document.py::CitationTest::test_without_editor", "tests/test_document.py::CitationTest::test_without_first_author", "tests/test_document.py::CitationTest::test_without_index_number", "tests/test_document.py::CitationTest::test_without_institutions", "tests/test_document.py::CitationTest::test_without_issue", "tests/test_document.py::CitationTest::test_without_issue_part", "tests/test_document.py::CitationTest::test_without_issue_title", "tests/test_document.py::CitationTest::test_without_link", "tests/test_document.py::CitationTest::test_without_monographic_authors", "tests/test_document.py::CitationTest::test_without_monographic_authors_but_not_a_book_citation", "tests/test_document.py::CitationTest::test_without_publisher", "tests/test_document.py::CitationTest::test_without_publisher_address", "tests/test_document.py::CitationTest::test_without_series", "tests/test_document.py::CitationTest::test_without_sponsor", "tests/test_document.py::CitationTest::test_without_thesis_institution", "tests/test_document.py::CitationTest::test_without_volume" ]
[]
BSD 2-Clause "Simplified" License
441
scrapy__scrapy-1793
7ce32422c4e266b0799c06c1b8263d28fc0d9df7
2016-02-18 21:57:39
a975a50558cd78a1573bee2e957afcb419fd1bd6
diff --git a/scrapy/responsetypes.py b/scrapy/responsetypes.py index 4880cc7b9..c667b141d 100644 --- a/scrapy/responsetypes.py +++ b/scrapy/responsetypes.py @@ -59,7 +59,8 @@ class ResponseTypes(object): def from_content_disposition(self, content_disposition): try: - filename = to_native_str(content_disposition).split(';')[1].split('=')[1] + filename = to_native_str(content_disposition, + encoding='latin-1', errors='replace').split(';')[1].split('=')[1] filename = filename.strip('"\'') return self.from_filename(filename) except IndexError:
PY3: error decoding Content-Disposition header This request ``` scrapy shell 'http://npe.com.cn/plus/save_to_doc.php?id=1666' ``` raises this error: ``` Traceback (most recent call last): File "/Users/kmike/envs/dl/bin/scrapy", line 9, in <module> load_entry_point('Scrapy', 'console_scripts', 'scrapy')() File "/Users/kmike/svn/scrapy/scrapy/cmdline.py", line 142, in execute _run_print_help(parser, _run_command, cmd, args, opts) File "/Users/kmike/svn/scrapy/scrapy/cmdline.py", line 88, in _run_print_help func(*a, **kw) File "/Users/kmike/svn/scrapy/scrapy/cmdline.py", line 149, in _run_command cmd.run(args, opts) File "/Users/kmike/svn/scrapy/scrapy/commands/shell.py", line 71, in run shell.start(url=url) File "/Users/kmike/svn/scrapy/scrapy/shell.py", line 47, in start self.fetch(url, spider) File "/Users/kmike/svn/scrapy/scrapy/shell.py", line 112, in fetch reactor, self._schedule, request, spider) File "/Users/kmike/envs/dl/lib/python3.5/site-packages/Twisted-15.5.0-py3.5.egg/twisted/internet/threads.py", line 122, in blockingCallFromThread result.raiseException() File "/Users/kmike/envs/dl/lib/python3.5/site-packages/Twisted-15.5.0-py3.5.egg/twisted/python/failure.py", line 368, in raiseException raise self.value.with_traceback(self.tb) UnicodeDecodeError: 'utf-8' codec can't decode byte 0xb8 in position 25: invalid start byte ``` The error points to a wrong location (similar to #1760); the real traceback is ``` Traceback (most recent call last): File "/Users/kmike/envs/dl/lib/python3.5/site-packages/Twisted-15.5.0-py3.5.egg/twisted/internet/defer.py", line 1126, in _inlineCallbacks result = result.throwExceptionIntoGenerator(g) File "/Users/kmike/envs/dl/lib/python3.5/site-packages/Twisted-15.5.0-py3.5.egg/twisted/python/failure.py", line 389, in throwExceptionIntoGenerator return g.throw(self.type, self.value, self.tb) File "/Users/kmike/svn/scrapy/scrapy/core/downloader/middleware.py", line 43, in process_request defer.returnValue((yield download_func(request=request,spider=spider))) File "/Users/kmike/envs/dl/lib/python3.5/site-packages/Twisted-15.5.0-py3.5.egg/twisted/internet/defer.py", line 588, in _runCallbacks current.result = callback(current.result, *args, **kw) File "/Users/kmike/svn/scrapy/scrapy/core/downloader/handlers/http11.py", line 272, in _cb_bodydone respcls = responsetypes.from_args(headers=headers, url=url) File "/Users/kmike/svn/scrapy/scrapy/responsetypes.py", line 110, in from_args cls = self.from_headers(headers) File "/Users/kmike/svn/scrapy/scrapy/responsetypes.py", line 78, in from_headers cls = self.from_content_disposition(headers[b'Content-Disposition']) File "/Users/kmike/svn/scrapy/scrapy/responsetypes.py", line 62, in from_content_disposition filename = to_native_str(content_disposition).split(';')[1].split('=')[1] File "/Users/kmike/svn/scrapy/scrapy/utils/python.py", line 129, in to_native_str return to_unicode(text, encoding, errors) File "/Users/kmike/svn/scrapy/scrapy/utils/python.py", line 107, in to_unicode return text.decode(encoding, errors) UnicodeDecodeError: 'utf-8' codec can't decode byte 0xb8 in position 25: invalid start byte ``` It looks like Content-Disposition is decoded using utf-8, but the encoding was not UTF-8.
scrapy/scrapy
diff --git a/tests/test_responsetypes.py b/tests/test_responsetypes.py index 2374d518f..118136ac4 100644 --- a/tests/test_responsetypes.py +++ b/tests/test_responsetypes.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- import unittest from scrapy.responsetypes import responsetypes @@ -20,8 +21,14 @@ class ResponseTypesTest(unittest.TestCase): def test_from_content_disposition(self): mappings = [ - ('attachment; filename="data.xml"', XmlResponse), - ('attachment; filename=data.xml', XmlResponse), + (b'attachment; filename="data.xml"', XmlResponse), + (b'attachment; filename=data.xml', XmlResponse), + (u'attachment;filename=data£.tar.gz'.encode('utf-8'), Response), + (u'attachment;filename=dataµ.tar.gz'.encode('latin-1'), Response), + (u'attachment;filename=data高.doc'.encode('gbk'), Response), + (u'attachment;filename=دورهdata.html'.encode('cp720'), HtmlResponse), + (u'attachment;filename=日本語版Wikipedia.xml'.encode('iso2022_jp'), XmlResponse), + ] for source, cls in mappings: retcls = responsetypes.from_content_disposition(source)
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_hyperlinks", "has_issue_reference" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 1, "test_score": 2 }, "num_modified_files": 1 }
1.1
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc libxml2-dev libxslt1-dev zlib1g-dev libffi-dev libssl-dev" ], "python": "3.5", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==22.2.0 Automat==22.10.0 certifi==2021.5.30 cffi==1.15.1 constantly==15.1.0 cryptography==40.0.2 cssselect==1.1.0 hyperlink==21.0.0 idna==3.10 importlib-metadata==4.8.3 incremental==22.10.0 iniconfig==1.1.1 lxml==5.3.1 packaging==21.3 parsel==1.6.0 pluggy==1.0.0 py==1.11.0 pyasn1==0.5.1 pyasn1-modules==0.3.0 pycparser==2.21 PyDispatcher==2.0.7 pyOpenSSL==23.2.0 pyparsing==3.1.4 pytest==7.0.1 queuelib==1.6.2 -e git+https://github.com/scrapy/scrapy.git@7ce32422c4e266b0799c06c1b8263d28fc0d9df7#egg=Scrapy service-identity==21.1.0 six==1.17.0 tomli==1.2.3 Twisted==22.4.0 typing_extensions==4.1.1 w3lib==2.0.1 zipp==3.6.0 zope.interface==5.5.2
name: scrapy channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==22.2.0 - automat==22.10.0 - cffi==1.15.1 - constantly==15.1.0 - cryptography==40.0.2 - cssselect==1.1.0 - hyperlink==21.0.0 - idna==3.10 - importlib-metadata==4.8.3 - incremental==22.10.0 - iniconfig==1.1.1 - lxml==5.3.1 - packaging==21.3 - parsel==1.6.0 - pluggy==1.0.0 - py==1.11.0 - pyasn1==0.5.1 - pyasn1-modules==0.3.0 - pycparser==2.21 - pydispatcher==2.0.7 - pyopenssl==23.2.0 - pyparsing==3.1.4 - pytest==7.0.1 - queuelib==1.6.2 - service-identity==21.1.0 - six==1.17.0 - tomli==1.2.3 - twisted==22.4.0 - typing-extensions==4.1.1 - w3lib==2.0.1 - zipp==3.6.0 - zope-interface==5.5.2 prefix: /opt/conda/envs/scrapy
[ "tests/test_responsetypes.py::ResponseTypesTest::test_from_content_disposition" ]
[]
[ "tests/test_responsetypes.py::ResponseTypesTest::test_custom_mime_types_loaded", "tests/test_responsetypes.py::ResponseTypesTest::test_from_args", "tests/test_responsetypes.py::ResponseTypesTest::test_from_body", "tests/test_responsetypes.py::ResponseTypesTest::test_from_content_type", "tests/test_responsetypes.py::ResponseTypesTest::test_from_filename", "tests/test_responsetypes.py::ResponseTypesTest::test_from_headers" ]
[]
BSD 3-Clause "New" or "Revised" License
442
bukzor__RefactorLib-24
181ff5525a5904eb7cf31653e80723450b7fc45e
2016-02-19 07:17:40
181ff5525a5904eb7cf31653e80723450b7fc45e
diff --git a/.travis.yml b/.travis.yml index bb006db..c471dc8 100644 --- a/.travis.yml +++ b/.travis.yml @@ -2,10 +2,15 @@ language: python env: # These should match the tox env list - TOXENV=py26 - TOXENV=py27 + - TOXENV=py34 install: - - pip install coveralls tox --use-mirrors + - pip install coveralls tox - npm install reflect script: tox after_success: - coveralls sudo: false +cache: + directories: + - $HOME/.cache/pip + - $HOME/.pre-commit diff --git a/refactorlib/cheetah/node.py b/refactorlib/cheetah/node.py index 755fbfa..48e163f 100644 --- a/refactorlib/cheetah/node.py +++ b/refactorlib/cheetah/node.py @@ -1,20 +1,28 @@ """ cheetah-specific additions to the lxml element node class. """ +import six + from lxml import etree from refactorlib.node import RefactorLibNodeBase, one class CheetahNodeBase(RefactorLibNodeBase): def find_calls(self, func_name): + if isinstance(func_name, bytes): + func_name = func_name.decode('UTF-8') return self.xpath( './/Placeholder' '[./CheetahVarNameChunks/CallArgString]' - '[./CheetahVarNameChunks/DottedName="%s"]' % func_name + '[./CheetahVarNameChunks/DottedName="{0}"]'.format( + func_name, + ) ) + self.xpath( './/CheetahVar' '[./CheetahVarBody/CheetahVarNameChunks/CallArgString]' - '[./CheetahVarBody/CheetahVarNameChunks/DottedName="%s"]' % func_name + '[./CheetahVarBody/CheetahVarNameChunks/DottedName="{0}"]'.format( + func_name, + ) ) def find_decorators(self, dec_name): @@ -72,6 +80,8 @@ class CheetahNodeBase(RefactorLibNodeBase): return comment def is_in_context(self, directive_string): + if isinstance(directive_string, bytes): + directive_string = directive_string.decode('UTF-8') try: directive_name, var = directive_string.split(None, 1) except ValueError: @@ -86,7 +96,7 @@ class CheetahNodeBase(RefactorLibNodeBase): directive.name == directive_name and ( directive.var is None and var is None or - directive.var.totext(with_tail=False) == var + directive.var.totext(with_tail=False).decode('UTF-8') == var ) ): return True @@ -158,7 +168,7 @@ class CheetahVariable(CheetahNodeBase): args = self.args if not args: # no arguments. - assert args_container.totext().strip('(\n\t )') == '', args_container.totext() + assert args_container.totext().strip(b'(\n\t )') == b'', args_container.totext() self.remove_self() return @@ -225,7 +235,7 @@ class CheetahDecorator(CheetahNodeBase): class CheetahDirective(CheetahNodeBase): def replace_directive(self, other): - if isinstance(other, basestring): + if isinstance(other, six.string_types): var = self.makeelement('CheetahVar') try: directive, var.text = other.split(None, 1) @@ -251,7 +261,7 @@ class CheetahDirective(CheetahNodeBase): @property def is_multiline_directive(self): return ( - self.totext().strip().endswith(':') or + self.totext().strip().endswith(b':') or not self.xpath( './self::Directive[starts-with(., "#end")] or ' './SimpleExprDirective or ' diff --git a/refactorlib/cheetah/parse.py b/refactorlib/cheetah/parse.py index ac908b1..2c86bbc 100644 --- a/refactorlib/cheetah/parse.py +++ b/refactorlib/cheetah/parse.py @@ -153,7 +153,7 @@ def detect_encoding(source): def parse(cheetah_content, encoding=None): - # yelp_cheetah requires unicode + # yelp_cheetah requires text if type(cheetah_content) is bytes: cheetah_content = cheetah_content.decode('UTF-8') diff --git a/refactorlib/cli/xmlstrip.py b/refactorlib/cli/xmlstrip.py index 376d498..79c4e0b 100644 --- a/refactorlib/cli/xmlstrip.py +++ b/refactorlib/cli/xmlstrip.py @@ -7,7 +7,7 @@ This is the inverse operation of `xmlfrom`. def xmlstrip(filename): from lxml.etree import XML, tostring - tree = XML(open(filename).read()).getroottree() + tree = XML(open(filename, 'rb').read()).getroottree() encoding = tree.docinfo.encoding return tostring(tree, method='text', encoding=encoding) diff --git a/refactorlib/javascript/parse.py b/refactorlib/javascript/parse.py index 73038bf..107eb65 100644 --- a/refactorlib/javascript/parse.py +++ b/refactorlib/javascript/parse.py @@ -1,3 +1,5 @@ +import six + from refactorlib.util import static @@ -5,8 +7,7 @@ DEBUG = False def parse(javascript_contents, encoding='ascii'): - """ - Given some javascript contents, as a unicode string, return the lxml representation. + """Given some javascript contents, as a text string, return the lxml representation. "reflectjs" below refers to the Mozilla Reflect protocol: * https://developer.mozilla.org/en-US/docs/SpiderMonkey/Parser_API * https://npmjs.org/package/reflect @@ -36,7 +37,10 @@ def reflectjs_parse(javascript_contents): from refactorlib.util import Popen, PIPE from os.path import join from simplejson import loads - from simplejson.ordered_dict import OrderedDict + try: + from collections import OrderedDict + except ImportError: + from ordereddict import OrderedDict reflectjs_script = join(TOP, 'javascript/reflectjs.js') reflectjs = Popen([find_nodejs(), reflectjs_script], stdin=PIPE, stdout=PIPE) @@ -56,7 +60,6 @@ def reflectjs_to_dictnode(tree): text or tail, and may have some overlap issues. """ from refactorlib.dictnode import DictNode - from types import NoneType root_dictnode = DictNode(parent=None) stack = [(tree, root_dictnode)] @@ -78,15 +81,17 @@ def reflectjs_to_dictnode(tree): else: attrs[val['type']] = val['name'] elif attr == 'value': - attrs[attr] = unicode(val) + attrs[attr] = six.text_type(val) # We would normally lose this type information, as lxml # wants everything to be a string. attrs['type'] = type(val).__name__ - elif isinstance(val, unicode): + elif isinstance(val, six.text_type): attrs[attr] = val - elif isinstance(val, (bool, NoneType, str)): + elif isinstance(val, bytes): + attrs[attr] = val.decode('UTF-8') + elif isinstance(val, (bool, type(None))): # TODO: figure out what happens with non-ascii data. - attrs[attr] = unicode(val) + attrs[attr] = six.text_type(val) else: # Should never happen assert False @@ -97,5 +102,5 @@ def reflectjs_to_dictnode(tree): children=[DictNode(parent=dictnode) for child in children], attrs=attrs, )) - stack.extend(reversed(zip(children, dictnode['children']))) + stack.extend(reversed(list(zip(children, dictnode['children'])))) return root_dictnode diff --git a/refactorlib/parse.py b/refactorlib/parse.py index 8e93315..d641074 100644 --- a/refactorlib/parse.py +++ b/refactorlib/parse.py @@ -5,18 +5,15 @@ def parse(filename, filetype=None, encoding=None): from refactorlib.filetypes import FILETYPES filetype = FILETYPES.detect_filetype(filename, filetype) - source = open(filename).read() + source = open(filename, 'rb').read() # If no encoding was explicitly specified, see if we can parse # it out from the contents of the file. if encoding is None: encoding = filetype.encoding_detector(source) - if encoding: - source = unicode(source, encoding) - else: - # I don't see why encoding=None is different from not specifying the encoding. - source = unicode(source) + encoding = encoding if encoding else 'UTF-8' + source = source.decode(encoding) return filetype.parser(source, encoding) @@ -36,10 +33,7 @@ def dictnode_to_lxml(tree, node_lookup=None, encoding=None): if not node_lookup: from refactorlib.node import node_lookup - from lxml.etree import XMLParser, fromstring - lxml_parser_object = XMLParser(encoding=encoding) - lxml_parser_object.set_element_class_lookup(node_lookup) - Element = lxml_parser_object.makeelement + from lxml.etree import Element, XMLParser root = None stack = [(tree, root)] @@ -50,7 +44,10 @@ def dictnode_to_lxml(tree, node_lookup=None, encoding=None): if parent is None: # We use this roundabout method becuase the encoding is always set # to 'UTF8' if we use parser.makeelement() - lxmlnode = fromstring('<trash></trash>', parser=lxml_parser_object) + parser = XMLParser(encoding=encoding) + parser.set_element_class_lookup(node_lookup) + parser.feed(b'<trash></trash>') + lxmlnode = parser.close() lxmlnode.tag = node['name'] lxmlnode.attrib.update(node.get('attrs', {})) root = lxmlnode diff --git a/refactorlib/python/parse.py b/refactorlib/python/parse.py index 28a477b..a3cbc27 100644 --- a/refactorlib/python/parse.py +++ b/refactorlib/python/parse.py @@ -3,7 +3,7 @@ import re # regex taken from inducer/pudb's detect_encoding -pythonEncodingDirectiveRE = re.compile(r"^\s*#.*coding[:=]\s*([-\w.]+)") +encoding_re = re.compile(r"^\s*#.*coding[:=]\s*([-\w.]+)".encode('UTF-8')) def detect_encoding(source): @@ -11,20 +11,19 @@ def detect_encoding(source): Given some python contents as a byte string, return the name of the encoding, or else None. """ # According to the PEP0263, the encoding directive must appear on one of the first two lines of the file - top_lines = source.split('\n', 2)[:2] + top_lines = source.split(b'\n', 2)[:2] for line in top_lines: - encodingMatch = pythonEncodingDirectiveRE.search(line) - if encodingMatch: - return encodingMatch.group(1) + encoding_match = encoding_re.search(line) + if encoding_match: + return encoding_match.group(1).decode('UTF-8') # We didn't find anything. return None def parse(python_contents, encoding): - """ - Given some python contents as a unicode string, return the lxml representation. + """Given some python contents as a text string, return the lxml representation. """ lib2to3_python = lib2to3_parse(python_contents) dictnode_python = lib2to3_to_dictnode(lib2to3_python) diff --git a/refactorlib/util.py b/refactorlib/util.py index 9b9d983..1c5e01d 100644 --- a/refactorlib/util.py +++ b/refactorlib/util.py @@ -42,7 +42,7 @@ class Popen(_Popen): CalledProcessError object will have the return code in the returncode attribute and output in the output attribute. """ - output, _ = self.communicate(stdin) + output, _ = self.communicate(stdin.encode('UTF-8')) retcode = self.poll() if retcode: cmd = self.args diff --git a/setup.py b/setup.py index 8397031..98690bc 100755 --- a/setup.py +++ b/setup.py @@ -19,6 +19,8 @@ setuptools.setup( install_requires=[ 'cached-property', 'lxml>=2.2', # We run with 2.2.4.0 + 'ordereddict', + 'six', ], extras_require={ 'javascript': ['simplejson'], @@ -48,6 +50,8 @@ setuptools.setup( 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.4', 'Topic :: Software Development :: Libraries :: Python Modules', ], ) diff --git a/tox.ini b/tox.ini index 9bd7b2c..2737f17 100644 --- a/tox.ini +++ b/tox.ini @@ -1,11 +1,10 @@ [tox] project = refactorlib # These should match the travis env list -envlist = py26,py27 +envlist = py26,py27,py34 skipsdist = true [testenv] -install_command = pip install --use-wheel {opts} {packages} deps = -rrequirements-dev.txt commands = coverage erase
Support python3
bukzor/RefactorLib
diff --git a/testing/util.py b/testing/util.py index 0170f44..fa9afce 100644 --- a/testing/util.py +++ b/testing/util.py @@ -1,6 +1,8 @@ """ A home for the 'yellow code' of testing. """ +from __future__ import unicode_literals + from os.path import join @@ -16,7 +18,6 @@ def example_dir(func): def get_examples(func): - from os import listdir from os.path import isfile @@ -77,13 +78,13 @@ def parametrize(arg_finder): def assert_same_content(old_file, new_content, extra_suffix=''): new_file = ''.join((old_file, extra_suffix, FAILURE_SUFFIX)) try: - open(new_file, 'w').write(new_content) + open(new_file, 'wb').write(new_content) except IOError as e: if e.errno == 2: # No such file. from os import makedirs from os.path import dirname makedirs(dirname(new_file)) - open(new_file, 'w').write(new_content) + open(new_file, 'wb').write(new_content) else: raise @@ -91,15 +92,16 @@ def assert_same_content(old_file, new_content, extra_suffix=''): def assert_same_file_content(old_file, new_file): - old_content = open(old_file).readlines() - new_content = open(new_file).readlines() + old_content = open(old_file, 'rb').readlines() + new_content = open(new_file, 'rb').readlines() diffs = diff(old_content, new_content) if diffs: diffs = 'Results differ:\n--- %s\n+++ %s\n%s' % (old_file, new_file, diffs) - # py.test derps on non-utf8 bytes, so I force unicode like so: - diffs = diffs.decode('UTF-8', 'replace') + # py.test derps on non-utf8 bytes, so I force text like so: + if isinstance(diffs, bytes): + diffs = diffs.decode('UTF-8', 'replace') raise AssertionError(diffs) else: from os import unlink @@ -112,7 +114,7 @@ def diff(old_content, new_content, n=3): diffdata = tuple(diff(old_content, new_content)) difflines = set() for lineno, line in enumerate(diffdata): - if not line.startswith(' '): # Ignore the similar lines. + if not line.startswith(str(' ')): # Ignore the similar lines. difflines.update(range(lineno - n, lineno + n + 1)) return '\n'.join( diff --git a/tests/cheetah/add_comment_test.py b/tests/cheetah/add_comment_test.py index 15ecc6c..33f78c5 100644 --- a/tests/cheetah/add_comment_test.py +++ b/tests/cheetah/add_comment_test.py @@ -30,7 +30,7 @@ def test_can_add_comments(): calls[0].add_comment('2 esc') calls[0].add_comment('3 esc') - assert ''' + assert b''' #def foo() ## 1 esc ## 2 esc diff --git a/tests/cheetah/directive_test.py b/tests/cheetah/directive_test.py index 2adf1fe..01071b0 100644 --- a/tests/cheetah/directive_test.py +++ b/tests/cheetah/directive_test.py @@ -14,19 +14,19 @@ def test_find_end_directive(example, output): new_output = [] for directive in lxmlnode.xpath('//Directive'): new_output.append( - 'Directive: %s' % tree.getpath(directive), + b'Directive: ' + tree.getpath(directive).encode('UTF-8'), ) if directive.is_multiline_directive: new_output.append( - 'End: %s' % tree.getpath(directive.get_end_directive()), + b'End: ' + tree.getpath(directive.get_end_directive()).encode('UTF-8'), ) else: new_output.append( - 'Single-line: %s' % directive.totext() + b'Single-line: ' + directive.totext() ) - new_output.append('') + new_output.append(b'') - new_output = '\n'.join(new_output) + new_output = b'\n'.join(new_output) assert_same_content(output, new_output) @@ -40,7 +40,7 @@ def test_replace_directive(example, output): if directive.var is None: directive.replace_directive('#{{{%s}}}' % directive.name) else: - directive.replace_directive('#{{{%s}}} [%s]' % (directive.name, directive.var.totext(with_tail=False))) + directive.replace_directive('#{{{%s}}} [%s]' % (directive.name, directive.var.totext(with_tail=False).decode('UTF-8'))) new_output = lxmlnode.totext() assert_same_content(output, new_output) @@ -67,11 +67,11 @@ def test_get_enclosing_blocks(example, output): new_output = [] for context, directive in sorted(unique_contexts.items()): new_output.append( - 'Directive: %s' % tree.getpath(directive) + b'Directive: ' + tree.getpath(directive).encode('UTF-8') ) for c in context: - new_output.append(' ' + c) - new_output.append('') + new_output.append(b' ' + c.encode('UTF-8')) + new_output.append(b'') - new_output = '\n'.join(new_output) + new_output = b'\n'.join(new_output) assert_same_content(output, new_output) diff --git a/tests/cheetah/is_in_context_test.py b/tests/cheetah/is_in_context_test.py index 456c9ba..0a127ae 100644 --- a/tests/cheetah/is_in_context_test.py +++ b/tests/cheetah/is_in_context_test.py @@ -1,3 +1,5 @@ +import six + from testing.util import parametrize, get_output, assert_same_content from . import xfailif_no_cheetah @@ -11,9 +13,9 @@ def test_is_in_context(example, output): top_level_directives = lxmlnode.xpath('/cheetah/*/*[1][self::Directive]') top_level_directives = [ - "#%s %s" % (d.name, d.var.totext(with_tail=False)) - if d.var else - "#%s" % d.name + b'#' + d.name.encode('UTF-8') + b' ' + d.var.totext(with_tail=False) + if d.var is not None else + b'#' + d.name.encode('UTF-8') for d in top_level_directives ] @@ -23,13 +25,14 @@ def test_is_in_context(example, output): new_output = [] for placeholder in lxmlnode.xpath('//Placeholder'): new_output.append( - 'Placeholder: %s' % placeholder.totext(with_tail=False) + b'Placeholder: ' + placeholder.totext(with_tail=False) ) for d in top_level_directives: new_output.append( - ' %s %s' % (d, placeholder.is_in_context(d)) + b' ' + d + b' ' + + six.text_type(placeholder.is_in_context(d)).encode('UTF-8') ) - new_output.append('') + new_output.append(b'') - new_output = '\n'.join(new_output) + new_output = b'\n'.join(new_output) assert_same_content(output, new_output) diff --git a/tests/cheetah/parse_test.py b/tests/cheetah/parse_test.py index 418865e..059804a 100644 --- a/tests/cheetah/parse_test.py +++ b/tests/cheetah/parse_test.py @@ -6,7 +6,7 @@ from . import xfailif_no_cheetah @xfailif_no_cheetah @parametrize(get_examples) def test_can_make_round_trip(example): - text = open(example).read() + text = open(example, 'rb').read() example = parse(example) assert text == example.totext() diff --git a/tests/cheetah/remove_call_test.py b/tests/cheetah/remove_call_test.py index 8f36629..1084902 100644 --- a/tests/cheetah/remove_call_test.py +++ b/tests/cheetah/remove_call_test.py @@ -12,7 +12,7 @@ def test_can_find_calls(): calls = example.find_calls('foo') assert len(calls) == 1 - assert calls[0].totext() == '$foo()' + assert calls[0].totext() == b'$foo()' @xfailif_no_cheetah diff --git a/tests/javascript/parse_test.py b/tests/javascript/parse_test.py index 2c86c0d..2122e8c 100644 --- a/tests/javascript/parse_test.py +++ b/tests/javascript/parse_test.py @@ -27,7 +27,7 @@ else: def test_can_make_round_trip(example): text = open(example).read() example = parse(example) - assert text == example.totext() + assert text == example.totext().decode('UTF-8') @xfailif_no_js diff --git a/tests/python/parse_test.py b/tests/python/parse_test.py index a0158b6..5677255 100644 --- a/tests/python/parse_test.py +++ b/tests/python/parse_test.py @@ -4,7 +4,7 @@ from refactorlib.parse import parse @parametrize(get_examples) def test_can_make_round_trip(example): - text = open(example).read() + text = open(example, 'rb').read() example = parse(example) assert text == example.totext() @@ -12,7 +12,7 @@ def test_can_make_round_trip(example): @parametrize(get_examples) def test_encoding_detection(example): from refactorlib.python.parse import detect_encoding - text = open(example).read() + text = open(example, 'rb').read() example = parse(example) detected_encoding = detect_encoding(text)
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 3, "test_score": 3 }, "num_modified_files": 10 }
0.9
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[cheetah,javascript]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": [], "python": "3.9", "reqs_path": [ "requirements-dev.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
astroid==3.3.9 cached-property==2.0.1 cfgv==3.4.0 coverage==7.8.0 dill==0.3.9 distlib==0.3.9 exceptiongroup==1.2.2 filelock==3.18.0 flake8==7.2.0 identify==2.6.9 iniconfig==2.1.0 isort==6.0.1 lxml==5.3.1 MarkupSafe==3.0.2 mccabe==0.7.0 mock==5.2.0 nodeenv==1.9.1 packaging==24.2 platformdirs==4.3.7 pluggy==1.5.0 pre_commit==4.2.0 pycodestyle==2.13.0 pyflakes==3.3.1 pylint==3.3.6 pytest==8.3.5 PyYAML==6.0.2 -e git+https://github.com/bukzor/RefactorLib.git@181ff5525a5904eb7cf31653e80723450b7fc45e#egg=refactorlib simplejson==3.20.1 six==1.17.0 tomli==2.2.1 tomlkit==0.13.2 typing_extensions==4.13.0 virtualenv==20.29.3 yelp_cheetah==0.12.1
name: RefactorLib channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - argparse==1.4.0 - astroid==3.3.9 - cached-property==2.0.1 - cfgv==3.4.0 - coverage==7.8.0 - dill==0.3.9 - distlib==0.3.9 - exceptiongroup==1.2.2 - filelock==3.18.0 - flake8==7.2.0 - identify==2.6.9 - iniconfig==2.1.0 - isort==6.0.1 - lxml==5.3.1 - markupsafe==3.0.2 - mccabe==0.7.0 - mock==5.2.0 - nodeenv==1.9.1 - packaging==24.2 - platformdirs==4.3.7 - pluggy==1.5.0 - pre-commit==4.2.0 - pycodestyle==2.13.0 - pyflakes==3.3.1 - pylint==3.3.6 - pytest==8.3.5 - pyyaml==6.0.2 - simplejson==3.20.1 - six==1.17.0 - tomli==2.2.1 - tomlkit==0.13.2 - typing-extensions==4.13.0 - virtualenv==20.29.3 - yelp-cheetah==0.12.1 prefix: /opt/conda/envs/RefactorLib
[ "tests/cheetah/directive_test.py::test_find_end_directive[tests/cheetah/directive_data/single-line.tmpl-tests/cheetah/directive_data/test_find_end_directive/single-line.txt]", "tests/cheetah/directive_test.py::test_find_end_directive[tests/cheetah/directive_data/simple.tmpl-tests/cheetah/directive_data/test_find_end_directive/simple.txt]", "tests/cheetah/directive_test.py::test_find_end_directive[tests/cheetah/directive_data/nested.tmpl-tests/cheetah/directive_data/test_find_end_directive/nested.txt]", "tests/cheetah/directive_test.py::test_replace_directive[tests/cheetah/directive_data/single-line.tmpl-tests/cheetah/directive_data/test_replace_directive/single-line.tmpl]", "tests/cheetah/directive_test.py::test_replace_directive[tests/cheetah/directive_data/simple.tmpl-tests/cheetah/directive_data/test_replace_directive/simple.tmpl]", "tests/cheetah/directive_test.py::test_replace_directive[tests/cheetah/directive_data/nested.tmpl-tests/cheetah/directive_data/test_replace_directive/nested.tmpl]", "tests/cheetah/is_in_context_test.py::test_is_in_context[tests/cheetah/is_in_context_data/simple.tmpl-tests/cheetah/is_in_context_data/test_is_in_context/simple.txt]", "tests/cheetah/is_in_context_test.py::test_is_in_context[tests/cheetah/is_in_context_data/indirect.tmpl-tests/cheetah/is_in_context_data/test_is_in_context/indirect.txt]", "tests/cheetah/parse_test.py::test_can_make_round_trip[tests/cheetah/parse_data/example1.tmpl]", "tests/cheetah/parse_test.py::test_can_make_round_trip[tests/cheetah/parse_data/continuation.tmpl]", "tests/cheetah/parse_test.py::test_can_make_round_trip[tests/cheetah/parse_data/all_directives.tmpl]", "tests/cheetah/parse_test.py::test_can_make_round_trip[tests/cheetah/parse_data/unicode.tmpl]", "tests/cheetah/parse_test.py::test_can_make_round_trip[tests/cheetah/parse_data/single-line.tmpl]", "tests/cheetah/parse_test.py::test_can_make_round_trip[tests/cheetah/parse_data/simple_call.tmpl]", "tests/cheetah/parse_test.py::test_can_make_round_trip[tests/cheetah/parse_data/simple.tmpl]", "tests/cheetah/parse_test.py::test_can_make_round_trip[tests/cheetah/parse_data/nested_call.tmpl]", "tests/cheetah/parse_test.py::test_can_make_round_trip[tests/cheetah/parse_data/nested2.tmpl]", "tests/cheetah/parse_test.py::test_can_make_round_trip[tests/cheetah/parse_data/nested.tmpl]", "tests/cheetah/parse_test.py::test_can_make_round_trip[tests/cheetah/parse_data/multiline.tmpl]", "tests/cheetah/parse_test.py::test_can_make_round_trip[tests/cheetah/parse_data/complex_call.tmpl]", "tests/cheetah/parse_test.py::test_matches_known_good_parsing[tests/cheetah/parse_data/example1.tmpl-tests/cheetah/parse_data/test_matches_known_good_parsing/example1.xml]", "tests/cheetah/parse_test.py::test_matches_known_good_parsing[tests/cheetah/parse_data/continuation.tmpl-tests/cheetah/parse_data/test_matches_known_good_parsing/continuation.xml]", "tests/cheetah/parse_test.py::test_matches_known_good_parsing[tests/cheetah/parse_data/all_directives.tmpl-tests/cheetah/parse_data/test_matches_known_good_parsing/all_directives.xml]", "tests/cheetah/parse_test.py::test_matches_known_good_parsing[tests/cheetah/parse_data/unicode.tmpl-tests/cheetah/parse_data/test_matches_known_good_parsing/unicode.xml]", "tests/cheetah/parse_test.py::test_matches_known_good_parsing[tests/cheetah/parse_data/single-line.tmpl-tests/cheetah/parse_data/test_matches_known_good_parsing/single-line.xml]", "tests/cheetah/parse_test.py::test_matches_known_good_parsing[tests/cheetah/parse_data/simple_call.tmpl-tests/cheetah/parse_data/test_matches_known_good_parsing/simple_call.xml]", "tests/cheetah/parse_test.py::test_matches_known_good_parsing[tests/cheetah/parse_data/simple.tmpl-tests/cheetah/parse_data/test_matches_known_good_parsing/simple.xml]", "tests/cheetah/parse_test.py::test_matches_known_good_parsing[tests/cheetah/parse_data/nested_call.tmpl-tests/cheetah/parse_data/test_matches_known_good_parsing/nested_call.xml]", "tests/cheetah/parse_test.py::test_matches_known_good_parsing[tests/cheetah/parse_data/nested2.tmpl-tests/cheetah/parse_data/test_matches_known_good_parsing/nested2.xml]", "tests/cheetah/parse_test.py::test_matches_known_good_parsing[tests/cheetah/parse_data/nested.tmpl-tests/cheetah/parse_data/test_matches_known_good_parsing/nested.xml]", "tests/cheetah/parse_test.py::test_matches_known_good_parsing[tests/cheetah/parse_data/multiline.tmpl-tests/cheetah/parse_data/test_matches_known_good_parsing/multiline.xml]", "tests/cheetah/parse_test.py::test_matches_known_good_parsing[tests/cheetah/parse_data/complex_call.tmpl-tests/cheetah/parse_data/test_matches_known_good_parsing/complex_call.xml]", "tests/cheetah/parse_test.py::test_cli_output[tests/cheetah/parse_data/example1.tmpl-tests/cheetah/parse_data/test_matches_known_good_parsing/example1.xml]", "tests/cheetah/parse_test.py::test_cli_output[tests/cheetah/parse_data/continuation.tmpl-tests/cheetah/parse_data/test_matches_known_good_parsing/continuation.xml]", "tests/cheetah/parse_test.py::test_cli_output[tests/cheetah/parse_data/all_directives.tmpl-tests/cheetah/parse_data/test_matches_known_good_parsing/all_directives.xml]", "tests/cheetah/parse_test.py::test_cli_output[tests/cheetah/parse_data/unicode.tmpl-tests/cheetah/parse_data/test_matches_known_good_parsing/unicode.xml]", "tests/cheetah/parse_test.py::test_cli_output[tests/cheetah/parse_data/single-line.tmpl-tests/cheetah/parse_data/test_matches_known_good_parsing/single-line.xml]", "tests/cheetah/parse_test.py::test_cli_output[tests/cheetah/parse_data/simple_call.tmpl-tests/cheetah/parse_data/test_matches_known_good_parsing/simple_call.xml]", "tests/cheetah/parse_test.py::test_cli_output[tests/cheetah/parse_data/simple.tmpl-tests/cheetah/parse_data/test_matches_known_good_parsing/simple.xml]", "tests/cheetah/parse_test.py::test_cli_output[tests/cheetah/parse_data/nested_call.tmpl-tests/cheetah/parse_data/test_matches_known_good_parsing/nested_call.xml]", "tests/cheetah/parse_test.py::test_cli_output[tests/cheetah/parse_data/nested2.tmpl-tests/cheetah/parse_data/test_matches_known_good_parsing/nested2.xml]", "tests/cheetah/parse_test.py::test_cli_output[tests/cheetah/parse_data/nested.tmpl-tests/cheetah/parse_data/test_matches_known_good_parsing/nested.xml]", "tests/cheetah/parse_test.py::test_cli_output[tests/cheetah/parse_data/multiline.tmpl-tests/cheetah/parse_data/test_matches_known_good_parsing/multiline.xml]", "tests/cheetah/parse_test.py::test_cli_output[tests/cheetah/parse_data/complex_call.tmpl-tests/cheetah/parse_data/test_matches_known_good_parsing/complex_call.xml]", "tests/cheetah/remove_call_test.py::test_can_remove_calls[tests/cheetah/remove_call_data/simple.tmpl-tests/cheetah/remove_call_data/test_can_remove_calls/simple.tmpl]", "tests/cheetah/remove_call_test.py::test_can_remove_calls[tests/cheetah/remove_call_data/multiline.tmpl-tests/cheetah/remove_call_data/test_can_remove_calls/multiline.tmpl]", "tests/python/parse_test.py::test_can_make_round_trip[tests/python/parse_data/unicode2.py]", "tests/python/parse_test.py::test_can_make_round_trip[tests/python/parse_data/unicode.py]", "tests/python/parse_test.py::test_can_make_round_trip[tests/python/parse_data/example2_utf8.py]", "tests/python/parse_test.py::test_can_make_round_trip[tests/python/parse_data/example1.py]", "tests/python/parse_test.py::test_can_make_round_trip[tests/python/parse_data/comment_only.py]", "tests/python/parse_test.py::test_encoding_detection[tests/python/parse_data/unicode2.py]", "tests/python/parse_test.py::test_encoding_detection[tests/python/parse_data/unicode.py]", "tests/python/parse_test.py::test_encoding_detection[tests/python/parse_data/example1.py]", "tests/python/parse_test.py::test_matches_known_good_parsing[tests/python/parse_data/unicode2.py-tests/python/parse_data/test_matches_known_good_parsing/unicode2.xml]", "tests/python/parse_test.py::test_matches_known_good_parsing[tests/python/parse_data/unicode.py-tests/python/parse_data/test_matches_known_good_parsing/unicode.xml]", "tests/python/parse_test.py::test_matches_known_good_parsing[tests/python/parse_data/example2_utf8.py-tests/python/parse_data/test_matches_known_good_parsing/example2_utf8.xml]", "tests/python/parse_test.py::test_matches_known_good_parsing[tests/python/parse_data/example1.py-tests/python/parse_data/test_matches_known_good_parsing/example1.xml]", "tests/python/parse_test.py::test_matches_known_good_parsing[tests/python/parse_data/comment_only.py-tests/python/parse_data/test_matches_known_good_parsing/comment_only.xml]", "tests/python/parse_test.py::test_cli_output[tests/python/parse_data/unicode2.py-tests/python/parse_data/test_matches_known_good_parsing/unicode2.xml]", "tests/python/parse_test.py::test_cli_output[tests/python/parse_data/unicode.py-tests/python/parse_data/test_matches_known_good_parsing/unicode.xml]", "tests/python/parse_test.py::test_cli_output[tests/python/parse_data/example2_utf8.py-tests/python/parse_data/test_matches_known_good_parsing/example2_utf8.xml]", "tests/python/parse_test.py::test_cli_output[tests/python/parse_data/example1.py-tests/python/parse_data/test_matches_known_good_parsing/example1.xml]", "tests/python/parse_test.py::test_cli_output[tests/python/parse_data/comment_only.py-tests/python/parse_data/test_matches_known_good_parsing/comment_only.xml]" ]
[ "tests/python/parse_test.py::test_encoding_detection[tests/python/parse_data/example2_utf8.py]", "tests/python/parse_test.py::test_encoding_detection[tests/python/parse_data/comment_only.py]" ]
[ "tests/cheetah/add_comment_test.py::test_can_add_comments", "tests/cheetah/directive_test.py::test_get_enclosing_blocks[tests/cheetah/directive_data/single-line.tmpl-tests/cheetah/directive_data/test_get_enclosing_blocks/single-line.txt]", "tests/cheetah/directive_test.py::test_get_enclosing_blocks[tests/cheetah/directive_data/simple.tmpl-tests/cheetah/directive_data/test_get_enclosing_blocks/simple.txt]", "tests/cheetah/directive_test.py::test_get_enclosing_blocks[tests/cheetah/directive_data/nested.tmpl-tests/cheetah/directive_data/test_get_enclosing_blocks/nested.txt]", "tests/cheetah/remove_call_test.py::test_can_find_calls", "tests/cheetah/remove_call_test.py::test_can_remove_calls[tests/cheetah/remove_call_data/nested2.tmpl-tests/cheetah/remove_call_data/test_can_remove_calls/nested2.tmpl]", "tests/cheetah/remove_call_test.py::test_can_remove_calls[tests/cheetah/remove_call_data/nested.tmpl-tests/cheetah/remove_call_data/test_can_remove_calls/nested.tmpl]" ]
[]
null
443
sympy__sympy-10635
72058725d8bd819aa68450ddfb619b681d9e1dd2
2016-02-19 19:36:52
0241b35cae5f2adc04dc37b25f7dc9c5f00bd746
diff --git a/sympy/core/relational.py b/sympy/core/relational.py index 136e305b93..10e984362c 100644 --- a/sympy/core/relational.py +++ b/sympy/core/relational.py @@ -8,7 +8,7 @@ from .sympify import _sympify from .evaluate import global_evaluate -from sympy.logic.boolalg import Boolean +from sympy.logic.boolalg import Boolean, BooleanAtom __all__ = ( 'Rel', 'Eq', 'Ne', 'Lt', 'Le', 'Gt', 'Ge', @@ -310,6 +310,10 @@ def __new__(cls, lhs, rhs=0, **options): if r is not None: return _sympify(r) + # If expression have both Boolean terms + if all(isinstance(i, BooleanAtom) for i in (rhs, lhs)): + return S.false # equal args already evaluated + return Relational.__new__(cls, lhs, rhs, **options) @classmethod
Eq(True, False) doesn't evaluate ``` >>> Eq(True, False) Eq(True, False) ``` (expecting S.false as the answer)
sympy/sympy
diff --git a/sympy/core/tests/test_relational.py b/sympy/core/tests/test_relational.py index 4309441a6d..50ecadb032 100644 --- a/sympy/core/tests/test_relational.py +++ b/sympy/core/tests/test_relational.py @@ -661,3 +661,9 @@ def test_issue_10304(): assert d.is_comparable is False # if this fails, find a new d e = 1 + d*I assert simplify(Eq(e, 0)) is S.false + +def test_issue_10633(): + assert Eq(True, False) == False + assert Eq(False, True) == False + assert Eq(True, True) == True + assert Eq(False, False) == True
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 1, "test_score": 0 }, "num_modified_files": 1 }
0.7
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "mpmath>=0.19", "pytest", "pytest-cov", "pytest-xdist", "pytest-mock", "pytest-asyncio" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.7", "reqs_path": [ "requirements/base.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
certifi @ file:///croot/certifi_1671487769961/work/certifi coverage==7.2.7 exceptiongroup==1.2.2 execnet==2.0.2 importlib-metadata==6.7.0 iniconfig==2.0.0 mpmath==1.3.0 packaging==24.0 pluggy==1.2.0 pytest==7.4.4 pytest-asyncio==0.21.2 pytest-cov==4.1.0 pytest-mock==3.11.1 pytest-xdist==3.5.0 -e git+https://github.com/sympy/sympy.git@72058725d8bd819aa68450ddfb619b681d9e1dd2#egg=sympy tomli==2.0.1 typing_extensions==4.7.1 zipp==3.15.0
name: sympy channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2022.12.7=py37h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=22.3.1=py37h06a4308_0 - python=3.7.16=h7a1cb2a_0 - readline=8.2=h5eee18b_0 - setuptools=65.6.3=py37h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.38.4=py37h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - coverage==7.2.7 - exceptiongroup==1.2.2 - execnet==2.0.2 - importlib-metadata==6.7.0 - iniconfig==2.0.0 - mpmath==1.3.0 - packaging==24.0 - pluggy==1.2.0 - pytest==7.4.4 - pytest-asyncio==0.21.2 - pytest-cov==4.1.0 - pytest-mock==3.11.1 - pytest-xdist==3.5.0 - tomli==2.0.1 - typing-extensions==4.7.1 - zipp==3.15.0 prefix: /opt/conda/envs/sympy
[ "sympy/core/tests/test_relational.py::test_issue_10633" ]
[ "sympy/core/tests/test_relational.py::test_multivariate_relational_as_set", "sympy/core/tests/test_relational.py::test_issue_8444" ]
[ "sympy/core/tests/test_relational.py::test_rel_ne", "sympy/core/tests/test_relational.py::test_rel_subs", "sympy/core/tests/test_relational.py::test_wrappers", "sympy/core/tests/test_relational.py::test_Eq", "sympy/core/tests/test_relational.py::test_rel_Infinity", "sympy/core/tests/test_relational.py::test_bool", "sympy/core/tests/test_relational.py::test_rich_cmp", "sympy/core/tests/test_relational.py::test_doit", "sympy/core/tests/test_relational.py::test_new_relational", "sympy/core/tests/test_relational.py::test_relational_bool_output", "sympy/core/tests/test_relational.py::test_relational_logic_symbols", "sympy/core/tests/test_relational.py::test_univariate_relational_as_set", "sympy/core/tests/test_relational.py::test_Not", "sympy/core/tests/test_relational.py::test_evaluate", "sympy/core/tests/test_relational.py::test_imaginary_compare_raises_TypeError", "sympy/core/tests/test_relational.py::test_complex_compare_not_real", "sympy/core/tests/test_relational.py::test_imaginary_and_inf_compare_raises_TypeError", "sympy/core/tests/test_relational.py::test_complex_pure_imag_not_ordered", "sympy/core/tests/test_relational.py::test_x_minus_y_not_same_as_x_lt_y", "sympy/core/tests/test_relational.py::test_nan_equality_exceptions", "sympy/core/tests/test_relational.py::test_nan_inequality_raise_errors", "sympy/core/tests/test_relational.py::test_nan_complex_inequalities", "sympy/core/tests/test_relational.py::test_complex_infinity_inequalities", "sympy/core/tests/test_relational.py::test_inequalities_symbol_name_same", "sympy/core/tests/test_relational.py::test_inequalities_symbol_name_same_complex", "sympy/core/tests/test_relational.py::test_inequalities_cant_sympify_other", "sympy/core/tests/test_relational.py::test_ineq_avoid_wild_symbol_flip", "sympy/core/tests/test_relational.py::test_issue_8245", "sympy/core/tests/test_relational.py::test_issue_8449", "sympy/core/tests/test_relational.py::test_simplify", "sympy/core/tests/test_relational.py::test_equals", "sympy/core/tests/test_relational.py::test_reversed", "sympy/core/tests/test_relational.py::test_canonical", "sympy/core/tests/test_relational.py::test_issue_10304" ]
[]
BSD
444
sympy__sympy-10636
bb46f5dd1ee5e6a98ac3bd442bed2737dbb754d6
2016-02-19 19:43:23
0241b35cae5f2adc04dc37b25f7dc9c5f00bd746
diff --git a/doc/src/modules/solvers/solveset.rst b/doc/src/modules/solvers/solveset.rst index a3f93dcfb5..025248d570 100644 --- a/doc/src/modules/solvers/solveset.rst +++ b/doc/src/modules/solvers/solveset.rst @@ -398,7 +398,7 @@ How does ``solveset`` ensure that it is not returning any wrong solution? >>> from sympy import symbols, S, pprint, solveset >>> x, n = symbols('x, n') >>> pprint(solveset(abs(x) - n, x, domain=S.Reals), use_unicode=True) - [0, ∞) ∩ {n} + ([0, ∞) ∩ {n}) ∪ ((-∞, 0] ∩ {-n}) Though, there still a lot of work needs to be done in this regard. diff --git a/sympy/assumptions/refine.py b/sympy/assumptions/refine.py index 612ae724df..14b2f2904f 100644 --- a/sympy/assumptions/refine.py +++ b/sympy/assumptions/refine.py @@ -30,7 +30,7 @@ def refine(expr, assumptions=True): # TODO: this will probably not work with Integral or Polynomial expr = expr.func(*args) if hasattr(expr, '_eval_refine'): - ref_expr = expr._eval_refine(assumptions) + ref_expr = expr._eval_refine() if ref_expr is not None: return ref_expr name = expr.__class__.__name__ diff --git a/sympy/core/evalf.py b/sympy/core/evalf.py index ed767caf38..9e75733a05 100644 --- a/sympy/core/evalf.py +++ b/sympy/core/evalf.py @@ -1281,19 +1281,20 @@ def evalf(x, prec, options): # Fall back to ordinary evalf if possible if 'subs' in options: x = x.subs(evalf_subs(prec, options['subs'])) - re, im = x._eval_evalf(prec).as_real_imag() + xe = x._eval_evalf(prec) + re, im = xe.as_real_imag() if re.has(re_) or im.has(im_): raise NotImplementedError if re == 0: re = None reprec = None - else: + elif re.is_number: re = re._to_mpmath(prec, allow_ints=False)._mpf_ reprec = prec if im == 0: im = None imprec = None - else: + elif im.is_number: im = im._to_mpmath(prec, allow_ints=False)._mpf_ imprec = prec r = re, im, reprec, imprec diff --git a/sympy/core/numbers.py b/sympy/core/numbers.py index 43c8e7c5a5..765b546b28 100644 --- a/sympy/core/numbers.py +++ b/sympy/core/numbers.py @@ -1587,11 +1587,34 @@ def factors(self, limit=None, use_trial=True, use_rho=False, smaller than limit (or cheap to compute). Special methods of factoring are disabled by default so that only trial division is used. """ - from sympy.ntheory import factorrat + from sympy.ntheory import factorint - return factorrat(self, limit=limit, use_trial=use_trial, + f = factorint(self.p, limit=limit, use_trial=use_trial, use_rho=use_rho, use_pm1=use_pm1, verbose=verbose).copy() + f = defaultdict(int, f) + for p, e in factorint(self.q, limit=limit, + use_trial=use_trial, + use_rho=use_rho, + use_pm1=use_pm1, + verbose=verbose).items(): + f[p] += -e + + if len(f) > 1 and 1 in f: + del f[1] + if not f: + f = {1: 1} + if not visual: + return dict(f) + else: + if -1 in f: + f.pop(-1) + args = [S.NegativeOne] + else: + args = [] + args.extend([Pow(*i, evaluate=False) + for i in sorted(f.items())]) + return Mul(*args, evaluate=False) @_sympifyit('other', NotImplemented) def gcd(self, other): @@ -2275,8 +2298,7 @@ def factors(limit=None, use_trial=True, use_rho=False, use_pm1=False, verbose=False, visual=False): if visual: return S.One - else: - return {} + return {1: 1} class NegativeOne(with_metaclass(Singleton, IntegerConstant)): diff --git a/sympy/core/operations.py b/sympy/core/operations.py index 30b1fcb74d..a0869f8459 100644 --- a/sympy/core/operations.py +++ b/sympy/core/operations.py @@ -304,32 +304,36 @@ def _eval_evalf(self, prec): walks the args of the non-number part recursively (doing the same thing). """ - from sympy import Symbol - from sympy.core.function import AppliedUndef - x, tail = self.as_independent(Symbol, AppliedUndef) - - # if x is an AssocOp Function then the _evalf below will - # call _eval_evalf (here) so we must break the recursion - if not (tail is self.identity or - isinstance(x, AssocOp) and x.is_Function): - # here, we have a number so we just call to _evalf with prec; - # prec is not the same as n, it is the binary precision so - # that's why we don't call to evalf. - x = x._evalf(prec) if x is not self.identity else self.identity - args = [] - for a in self.func.make_args(tail): - # here we call to _eval_evalf since we don't know what we - # are dealing with and all other _eval_evalf routines should - # be doing the same thing (i.e. taking binary prec and - # finding the evalf-able args) - newa = a._eval_evalf(prec) - if newa is None: - args.append(a) - else: - args.append(newa) - if not _aresame(tuple(args), self.func.make_args(tail)): - tail = self.func(*args) - return self.func(x, tail) + from .add import Add + from .mul import Mul + from .symbol import Symbol + from .function import AppliedUndef + if isinstance(self, (Mul, Add)): + x, tail = self.as_independent(Symbol, AppliedUndef) + # if x is an AssocOp Function then the _evalf below will + # call _eval_evalf (here) so we must break the recursion + if not (tail is self.identity or + isinstance(x, AssocOp) and x.is_Function or + x is self.identity and isinstance(tail, AssocOp)): + # here, we have a number so we just call to _evalf with prec; + # prec is not the same as n, it is the binary precision so + # that's why we don't call to evalf. + x = x._evalf(prec) if x is not self.identity else self.identity + args = [] + tail_args = tuple(self.func.make_args(tail)) + for a in tail_args: + # here we call to _eval_evalf since we don't know what we + # are dealing with and all other _eval_evalf routines should + # be doing the same thing (i.e. taking binary prec and + # finding the evalf-able args) + newa = a._eval_evalf(prec) + if newa is None: + args.append(a) + else: + args.append(newa) + if not _aresame(tuple(args), tail_args): + tail = self.func(*args) + return self.func(x, tail) # this is the same as above, but there were no pure-number args to # deal with diff --git a/sympy/core/power.py b/sympy/core/power.py index 3eaa577d84..3b7d7fad26 100644 --- a/sympy/core/power.py +++ b/sympy/core/power.py @@ -220,13 +220,12 @@ def exp(self): def class_key(cls): return 3, 2, cls.__name__ - def _eval_refine(self, assumptions): - from sympy.assumptions.ask import ask, Q + def _eval_refine(self): b, e = self.as_base_exp() - if ask(Q.integer(e), assumptions) and _coeff_isneg(b): - if ask(Q.even(e), assumptions): + if e.is_integer and _coeff_isneg(b): + if e.is_even: return Pow(-b, e) - elif ask(Q.odd(e), assumptions): + elif e.is_odd: return -Pow(-b, e) def _eval_power(self, other): diff --git a/sympy/core/relational.py b/sympy/core/relational.py index 10e984362c..136e305b93 100644 --- a/sympy/core/relational.py +++ b/sympy/core/relational.py @@ -8,7 +8,7 @@ from .sympify import _sympify from .evaluate import global_evaluate -from sympy.logic.boolalg import Boolean, BooleanAtom +from sympy.logic.boolalg import Boolean __all__ = ( 'Rel', 'Eq', 'Ne', 'Lt', 'Le', 'Gt', 'Ge', @@ -310,10 +310,6 @@ def __new__(cls, lhs, rhs=0, **options): if r is not None: return _sympify(r) - # If expression have both Boolean terms - if all(isinstance(i, BooleanAtom) for i in (rhs, lhs)): - return S.false # equal args already evaluated - return Relational.__new__(cls, lhs, rhs, **options) @classmethod diff --git a/sympy/functions/elementary/exponential.py b/sympy/functions/elementary/exponential.py index 6499c40fa7..6bdb6d732e 100644 --- a/sympy/functions/elementary/exponential.py +++ b/sympy/functions/elementary/exponential.py @@ -205,7 +205,7 @@ def fdiff(self, argindex=1): else: raise ArgumentIndexError(self, argindex) - def _eval_refine(self, assumptions): + def _eval_refine(self): from sympy.assumptions import ask, Q arg = self.args[0] if arg.is_Mul: diff --git a/sympy/ntheory/__init__.py b/sympy/ntheory/__init__.py index 808a01cc5b..ccec05575c 100644 --- a/sympy/ntheory/__init__.py +++ b/sympy/ntheory/__init__.py @@ -7,7 +7,7 @@ from .primetest import isprime from .factor_ import divisors, factorint, multiplicity, perfect_power, \ pollard_pm1, pollard_rho, primefactors, totient, trailing, divisor_count, \ - divisor_sigma, factorrat + divisor_sigma from .partitions_ import npartitions from .residue_ntheory import is_primitive_root, is_quad_residue, \ legendre_symbol, jacobi_symbol, n_order, sqrt_mod, quadratic_residues, \ diff --git a/sympy/ntheory/factor_.py b/sympy/ntheory/factor_.py index 9242120dfd..f9aab9867d 100644 --- a/sympy/ntheory/factor_.py +++ b/sympy/ntheory/factor_.py @@ -1171,58 +1171,6 @@ def factorint(n, limit=None, use_trial=True, use_rho=True, use_pm1=True, low, high = high, high*2 -def factorrat(rat, limit=None, use_trial=True, use_rho=True, use_pm1=True, - verbose=False, visual=None): - r""" - Given a Rational ``r``, ``factorrat(r)`` returns a dict containing - the prime factors of ``r`` as keys and their respective multiplicities - as values. For example: - - >>> from sympy.ntheory import factorrat - >>> from sympy.core.symbol import S - >>> factorrat(S(8)/9) # 8/9 = (2**3) * (3**-2) - {2: 3, 3: -2} - >>> factorrat(S(-1)/987) # -1/789 = -1 * (3**-1) * (7**-1) * (47**-1) - {-1: 1, 3: -1, 7: -1, 47: -1} - - Please see the docstring for ``factorint`` for detailed explanations - and examples of the following keywords: - - - ``limit``: Integer limit up to which trial division is done - - ``use_trial``: Toggle use of trial division - - ``use_rho``: Toggle use of Pollard's rho method - - ``use_pm1``: Toggle use of Pollard's p-1 method - - ``verbose``: Toggle detailed printing of progress - - ``visual``: Toggle product form of output - """ - from collections import defaultdict - f = factorint(rat.p, limit=limit, use_trial=use_trial, - use_rho=use_rho, use_pm1=use_pm1, - verbose=verbose).copy() - f = defaultdict(int, f) - for p, e in factorint(rat.q, limit=limit, - use_trial=use_trial, - use_rho=use_rho, - use_pm1=use_pm1, - verbose=verbose).items(): - f[p] += -e - - if len(f) > 1 and 1 in f: - del f[1] - if not visual: - return dict(f) - else: - if -1 in f: - f.pop(-1) - args = [S.NegativeOne] - else: - args = [] - args.extend([Pow(*i, evaluate=False) - for i in sorted(f.items())]) - return Mul(*args, evaluate=False) - - - def primefactors(n, limit=None, verbose=False): """Return a sorted list of n's prime factors, ignoring multiplicity and any composite factor that remains if the limit was set too low diff --git a/sympy/physics/units.py b/sympy/physics/units.py index f73c0d558d..9a40fd872e 100644 --- a/sympy/physics/units.py +++ b/sympy/physics/units.py @@ -222,7 +222,6 @@ def free_symbols(self): h = hour = hours = 60*minute day = days = 24*hour -anomalistic_year = anomalistic_years = Rational('365.259636')*day sidereal_year = sidereal_years = Rational('31558149.540')*s tropical_year = tropical_years = Rational('365.24219')*day common_year = common_years = Rational('365')*day diff --git a/sympy/release.py b/sympy/release.py index e9dbd439e6..0573d92582 100644 --- a/sympy/release.py +++ b/sympy/release.py @@ -1,1 +1,1 @@ -__version__ = "1.0.1.dev" +__version__ = "0.7.7.dev" diff --git a/sympy/sets/sets.py b/sympy/sets/sets.py index fb3cee8d95..9d4c10466e 100644 --- a/sympy/sets/sets.py +++ b/sympy/sets/sets.py @@ -12,7 +12,7 @@ from sympy.core.evaluate import global_evaluate from sympy.core.mul import Mul from sympy.core.relational import Eq -from sympy.core.symbol import Symbol, Dummy +from sympy.core.symbol import Symbol from sympy.sets.contains import Contains from sympy.utilities.misc import func_name @@ -1426,21 +1426,23 @@ def __iter__(self): def _handle_finite_sets(args): from sympy.core.logic import fuzzy_and, fuzzy_bool from sympy.core.compatibility import zip_longest - from sympy.utilities.iterables import sift - sifted = sift(args, lambda x: x.is_FiniteSet) - fs_args = sifted.pop(True, []) + new_args = [] + fs_args = [] + for s in args: + if s.is_FiniteSet: + fs_args.append(s) + else: + new_args.append(s) if not fs_args: return s = fs_args[0] fs_args = fs_args[1:] - other = sifted.pop(False, []) - res = [] unk = [] for x in s: c = fuzzy_and(fuzzy_bool(o.contains(x)) - for o in fs_args + other) + for o in fs_args + new_args) if c: res.append(x) elif c is None: @@ -1472,13 +1474,13 @@ def _handle_finite_sets(args): contained = [x for x in symbolic_s_list if sympify(v.contains(x)) is S.true] if contained != symbolic_s_list: - other.append( + new_args.append( v - FiniteSet( *contained, evaluate=False)) else: pass # for coverage - other_sets = Intersection(*other) + other_sets = Intersection(*new_args) if not other_sets: return S.EmptySet # b/c we use evaluate=False below res += Intersection( @@ -1489,15 +1491,14 @@ def _handle_finite_sets(args): @staticmethod def reduce(args): """ - Return a simplified intersection by applying rules. + Simplify an intersection using known rules We first start with global rules like - 'if any empty sets, return empty set' and 'distribute unions'. + 'if any empty sets return empty set' and 'distribute any unions' Then we iterate through all pairs and ask the constituent sets if they can simplify themselves with any other constituent """ - from sympy.simplify.simplify import clear_coefficients # ===== Global Rules ===== # If any EmptySets return EmptySet @@ -1507,22 +1508,6 @@ def reduce(args): # Handle Finite sets rv = Intersection._handle_finite_sets(args) if rv is not None: - # simplify symbolic intersection between a FiniteSet - # and an interval - if isinstance(rv, Intersection) and len(rv.args) == 2: - ivl, s = rv.args - if isinstance(s, FiniteSet) and len(s) == 1 and isinstance(ivl, Interval): - e = list(s)[0] - if e.free_symbols: - rhs = Dummy() - e, r = clear_coefficients(e, rhs) - if r != rhs: - iargs = list(ivl.args) - iargs[0] = r.subs(rhs, ivl.start) - iargs[1] = r.subs(rhs, ivl.end) - if iargs[0] > iargs[1]: - iargs = iargs[:2][::-1] + iargs[-2:][::-1] - rv = Intersection(FiniteSet(e), Interval(*iargs), evaluate=False) return rv # If any of the sets are unions, return a Union of Intersections diff --git a/sympy/simplify/simplify.py b/sympy/simplify/simplify.py index 4b78039c56..2b9643de7e 100644 --- a/sympy/simplify/simplify.py +++ b/sympy/simplify/simplify.py @@ -9,7 +9,7 @@ from sympy.core.compatibility import (iterable, ordered, range, as_int) from sympy.core.numbers import Float, I, pi, Rational, Integer -from sympy.core.function import expand_log, count_ops, _mexpand, _coeff_isneg +from sympy.core.function import expand_log, count_ops, _mexpand from sympy.core.rules import Transform from sympy.core.evaluate import global_evaluate from sympy.functions import ( @@ -1257,48 +1257,3 @@ def _real_to_rational(expr, tolerance=None): r = Integer(0) reps[key] = r return p.subs(reps, simultaneous=True) - - -def clear_coefficients(expr, rhs=S.Zero): - """Return `p, r` where `p` is the expression obtained when Rational - additive and multiplicative coefficients of `expr` have been stripped - away in a naive fashion (i.e. without simplification). The operations - needed to remove the coefficients will be applied to `rhs` and returned - as `r`. - - Examples - ======== - - >>> from sympy.simplify.simplify import clear_coefficients - >>> from sympy.abc import x, y - >>> from sympy import Dummy - >>> expr = 4*y*(6*x + 3) - >>> clear_coefficients(expr - 2) - (y*(2*x + 1), 1/6) - - When solving 2 or more expressions like `expr = a`, - `expr = b`, etc..., it is advantageous to provide a Dummy symbol - for `rhs` and simply replace it with `a`, `b`, etc... in `r`. - - >>> rhs = Dummy('rhs') - >>> clear_coefficients(expr, rhs) - (y*(2*x + 1), _rhs/12) - >>> _[1].subs(rhs, 2) - 1/6 - """ - was = None - free = expr.free_symbols - while was != expr: - was = expr - m, expr = ( - expr.as_content_primitive() - if free else - factor_terms(expr).as_coeff_Mul()) - rhs /= m - c, expr = expr.as_coeff_Add() - rhs -= c - expr = signsimp(expr, evaluate = False) - if _coeff_isneg(expr): - expr = -expr - rhs = -rhs - return expr, rhs
`nfloat` changes the arguments inside `Max`. It seems that `nfloat` applied to the product of `Max(symbol, number)` function and another expression changes `number -> 1.0` if `number <= 0`. Is it me misunderstanding the documentation ? |expr|result|expected ?| |:---|:-----|----------| |`nfloat(Max(y, 0))`|`Max(0, y)`|yes| |`nfloat(x * Max(y, 0))`|`x*Max(1.0, y)`|no| |`nfloat(Max(y, -1.1))`|`Max(-1.1, y)`|yes| |`nfloat(x * Max(y, -1.1))`|`x*Max(1.0, y)`|no| |`nfloat(Max(y, 1.1))`|`Max(1.1, y)`|yes| |`nfloat(x * Max(y, 1.1))`|`x*Max(1.1, y)`|yes| ```python from sympy.functions.elementary.miscellaneous import Max from sympy import nfloat, symbols x, y, z = symbols('x y z') ```
sympy/sympy
diff --git a/sympy/assumptions/tests/test_refine.py b/sympy/assumptions/tests/test_refine.py index 00a16d4195..ce94837680 100644 --- a/sympy/assumptions/tests/test_refine.py +++ b/sympy/assumptions/tests/test_refine.py @@ -156,7 +156,7 @@ def my_func(*args): def test_eval_refine(): from sympy.core.expr import Expr class MockExpr(Expr): - def _eval_refine(self, assumptions): + def _eval_refine(self): return True mock_obj = MockExpr() diff --git a/sympy/core/tests/test_evalf.py b/sympy/core/tests/test_evalf.py index a1775324d2..5b781713cd 100644 --- a/sympy/core/tests/test_evalf.py +++ b/sympy/core/tests/test_evalf.py @@ -1,7 +1,7 @@ -from sympy import (Abs, Add, atan, ceiling, cos, E, Eq, exp, factorial, - fibonacci, floor, Function, GoldenRatio, I, Integral, - integrate, log, Mul, N, oo, pi, Pow, product, Product, - Rational, S, Sum, sin, sqrt, sstr, sympify, Symbol) +from sympy import (Abs, Add, atan, ceiling, cos, E, Eq, exp, + factorial, fibonacci, floor, Function, GoldenRatio, I, Integral, + integrate, log, Mul, N, oo, pi, Pow, product, Product, + Rational, S, Sum, sin, sqrt, sstr, sympify, Symbol, Max, nfloat) from sympy.core.evalf import (complex_accuracy, PrecisionExhausted, scaled_zero, get_integer_part, as_mpmath) from mpmath import inf, ninf @@ -495,3 +495,11 @@ def test_AssocOp_Function(): # should raise a value error because the first arg computes # a non-comparable (prec=1) imaginary part raises(ValueError, lambda: e._eval_evalf(2)) + + +def test_issue_10395(): + eq = x*Max(0, y) + assert nfloat(eq) == eq + eq = x*Max(y, -1.1) + assert nfloat(eq) == eq + assert Max(y, 4).n() == Max(4.0, y) diff --git a/sympy/core/tests/test_numbers.py b/sympy/core/tests/test_numbers.py index 4cf49c128b..c434a1406c 100644 --- a/sympy/core/tests/test_numbers.py +++ b/sympy/core/tests/test_numbers.py @@ -1097,7 +1097,7 @@ def test_Integer_factors(): def F(i): return Integer(i).factors() - assert F(1) == {} + assert F(1) == {1: 1} assert F(2) == {2: 1} assert F(3) == {3: 1} assert F(4) == {2: 2} @@ -1158,6 +1158,10 @@ def F(p, q, visual=None): assert F(2, 9) == {2: 1, 3: -2} assert F(2, 15) == {2: 1, 3: -1, 5: -1} assert F(6, 10) == {3: 1, 5: -1} + assert str(F(12, 1, visual=True)) == '2**2*3**1' + assert str(F(1, 1, visual=True)) == '1' + assert str(F(25, 14, visual=True)) == '5**2/(2*7)' + assert str(F(-25, 14*9, visual=True)) == '-5**2/(2*3**2*7)' def test_issue_4107(): diff --git a/sympy/core/tests/test_relational.py b/sympy/core/tests/test_relational.py index 50ecadb032..4309441a6d 100644 --- a/sympy/core/tests/test_relational.py +++ b/sympy/core/tests/test_relational.py @@ -661,9 +661,3 @@ def test_issue_10304(): assert d.is_comparable is False # if this fails, find a new d e = 1 + d*I assert simplify(Eq(e, 0)) is S.false - -def test_issue_10633(): - assert Eq(True, False) == False - assert Eq(False, True) == False - assert Eq(True, True) == True - assert Eq(False, False) == True diff --git a/sympy/ntheory/tests/test_factor_.py b/sympy/ntheory/tests/test_factor_.py index ac9f3f4de3..851ddf6a53 100644 --- a/sympy/ntheory/tests/test_factor_.py +++ b/sympy/ntheory/tests/test_factor_.py @@ -3,7 +3,7 @@ from sympy.core.compatibility import long, range from sympy.ntheory import totient, factorint, primefactors, divisors, nextprime, pollard_rho, \ - perfect_power, multiplicity, trailing, divisor_count, primorial, pollard_pm1, divisor_sigma, factorrat + perfect_power, multiplicity, trailing, divisor_count, primorial, pollard_pm1, divisor_sigma from sympy.ntheory.factor_ import smoothness, smoothness_p, \ antidivisors, antidivisor_count, core, digits, udivisors, udivisor_sigma, \ @@ -390,13 +390,6 @@ def test_visual_factorint(): assert -1 in factorint(-42, visual=True).args -def test_factorrat(): - assert str(factorrat(S(12)/1, visual=True)) == '2**2*3**1' - assert str(factorrat(S(1)/1, visual=True)) == '1' - assert str(factorrat(S(25)/14, visual=True)) == '5**2/(2*7)' - assert str(factorrat(S(-25)/14/9, visual=True)) == '-5**2/(2*3**2*7)' - - def test_visual_io(): sm = smoothness_p fi = factorint diff --git a/sympy/sets/tests/test_sets.py b/sympy/sets/tests/test_sets.py index 31334dc8b7..8a9d22c3b8 100644 --- a/sympy/sets/tests/test_sets.py +++ b/sympy/sets/tests/test_sets.py @@ -3,7 +3,7 @@ FiniteSet, Intersection, imageset, I, true, false, ProductSet, E, sqrt, Complement, EmptySet, sin, cos, Lambda, ImageSet, pi, Eq, Pow, Contains, Sum, rootof, SymmetricDifference, Piecewise, - Matrix, signsimp) + Matrix) from mpmath import mpi from sympy.core.compatibility import range @@ -982,18 +982,3 @@ def test_issue_10326(): assert Interval(1, 2) in FiniteSet(Interval(0, 5), Interval(1, 2)) assert Interval(-oo, oo).contains(oo) is S.false assert Interval(-oo, oo).contains(-oo) is S.false - - -def test_issue_10285(): - assert FiniteSet(-x - 1).intersect(Interval.Ropen(1, 2)) == \ - FiniteSet(x).intersect(Interval.Lopen(-3, -2)) - eq = -x - 2*(-x - y) - s = signsimp(eq) - ivl = Interval.open(0, 1) - assert FiniteSet(eq).intersect(ivl) == FiniteSet(s).intersect(ivl) - assert FiniteSet(-eq).intersect(ivl) == \ - FiniteSet(s).intersect(Interval.open(-1, 0)) - eq -= 1 - ivl = Interval.Lopen(1, oo) - assert FiniteSet(eq).intersect(ivl) == \ - FiniteSet(s).intersect(Interval.Lopen(2, oo)) diff --git a/sympy/simplify/tests/test_simplify.py b/sympy/simplify/tests/test_simplify.py index 63f3a209c6..88c6ebc26f 100644 --- a/sympy/simplify/tests/test_simplify.py +++ b/sympy/simplify/tests/test_simplify.py @@ -635,12 +635,3 @@ def inverse(self, argindex=1): assert simplify(f(g(x))) == x assert simplify(f(g(sin(x)**2 + cos(x)**2))) == 1 assert simplify(f(g(x, y))) == f(g(x, y)) - - -def test_clear_coefficients(): - from sympy.simplify.simplify import clear_coefficients - assert clear_coefficients(4*y*(6*x + 3)) == (y*(2*x + 1), 0) - assert clear_coefficients(4*y*(6*x + 3) - 2) == (y*(2*x + 1), S(1)/6) - assert clear_coefficients(4*y*(6*x + 3) - 2, x) == (y*(2*x + 1), x/12 + S(1)/6) - assert clear_coefficients(sqrt(2) - 2) == (sqrt(2), 2) - assert clear_coefficients(4*sqrt(2) - 2) == (sqrt(2), S.Half) diff --git a/sympy/solvers/tests/test_solveset.py b/sympy/solvers/tests/test_solveset.py index 7af972fcdd..ee49f9cc72 100644 --- a/sympy/solvers/tests/test_solveset.py +++ b/sympy/solvers/tests/test_solveset.py @@ -139,13 +139,19 @@ def ireal(x, s=S.Reals): n = Dummy('n') x = Symbol('x') - h1 = Intersection(Interval(-oo, -3), FiniteSet(-a + b - 3), + h1 = Intersection(Interval(-3, oo), FiniteSet(a + b - 3), + imageset(Lambda(n, -n + a - 3), Interval(-oo, 0))) + + h2 = Intersection(Interval(-oo, -3), FiniteSet(-a + b - 3), imageset(Lambda(n, n - a - 3), Interval(0, oo))) - h2 = Intersection(Interval(-3, oo), FiniteSet(a - b - 3), + h3 = Intersection(Interval(-3, oo), FiniteSet(a - b - 3), imageset(Lambda(n, -n + a - 3), Interval(0, oo))) - assert invert_real(Abs(Abs(x + 3) - a) - b, 0, x) == (x, Union(h1, h2)) + h4 = Intersection(Interval(-oo, -3), FiniteSet(-a - b - 3), + imageset(Lambda(n, n - a - 3), Interval(-oo, 0))) + + assert invert_real(Abs(Abs(x + 3) - a) - b, 0, x) == (x, Union(h1, h2, h3, h4)) def test_invert_complex():
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 0, "test_score": 3 }, "num_modified_files": 14 }
0.7
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "mpmath>=0.19", "pytest", "pytest-cov", "pytest-xdist", "pytest-mock", "pytest-asyncio" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.7", "reqs_path": [ "requirements/base.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
certifi @ file:///croot/certifi_1671487769961/work/certifi coverage==7.2.7 exceptiongroup==1.2.2 execnet==2.0.2 importlib-metadata==6.7.0 iniconfig==2.0.0 mpmath==1.3.0 packaging==24.0 pluggy==1.2.0 pytest==7.4.4 pytest-asyncio==0.21.2 pytest-cov==4.1.0 pytest-mock==3.11.1 pytest-xdist==3.5.0 -e git+https://github.com/sympy/sympy.git@bb46f5dd1ee5e6a98ac3bd442bed2737dbb754d6#egg=sympy tomli==2.0.1 typing_extensions==4.7.1 zipp==3.15.0
name: sympy channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2022.12.7=py37h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=22.3.1=py37h06a4308_0 - python=3.7.16=h7a1cb2a_0 - readline=8.2=h5eee18b_0 - setuptools=65.6.3=py37h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.38.4=py37h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - coverage==7.2.7 - exceptiongroup==1.2.2 - execnet==2.0.2 - importlib-metadata==6.7.0 - iniconfig==2.0.0 - mpmath==1.3.0 - packaging==24.0 - pluggy==1.2.0 - pytest==7.4.4 - pytest-asyncio==0.21.2 - pytest-cov==4.1.0 - pytest-mock==3.11.1 - pytest-xdist==3.5.0 - tomli==2.0.1 - typing-extensions==4.7.1 - zipp==3.15.0 prefix: /opt/conda/envs/sympy
[ "sympy/assumptions/tests/test_refine.py::test_eval_refine", "sympy/core/tests/test_evalf.py::test_issue_10395", "sympy/core/tests/test_numbers.py::test_Integer_factors", "sympy/core/tests/test_numbers.py::test_Rational_factors", "sympy/solvers/tests/test_solveset.py::test_invert_real" ]
[ "sympy/core/tests/test_evalf.py::test_evalf_complex_bug", "sympy/core/tests/test_evalf.py::test_evalf_complex_powers_bug", "sympy/core/tests/test_numbers.py::test_mpmath_issues", "sympy/core/tests/test_relational.py::test_multivariate_relational_as_set", "sympy/core/tests/test_relational.py::test_issue_8444", "sympy/sets/tests/test_sets.py::test_image_Intersection", "sympy/sets/tests/test_sets.py::test_union_boundary_of_joining_sets", "sympy/simplify/tests/test_simplify.py::test_factorial_simplify", "sympy/simplify/tests/test_simplify.py::test_simplify_float_vs_integer", "sympy/solvers/tests/test_solveset.py::test_solve_sqrt_fail", "sympy/solvers/tests/test_solveset.py::test_uselogcombine_1", "sympy/solvers/tests/test_solveset.py::test_uselogcombine_2", "sympy/solvers/tests/test_solveset.py::test_rewrite_trigh", "sympy/solvers/tests/test_solveset.py::test_solve_only_exp_2", "sympy/solvers/tests/test_solveset.py::test_solve_quintics", "sympy/solvers/tests/test_solveset.py::test_solve_trig_abs", "sympy/solvers/tests/test_solveset.py::test_solve_trig_simplified", "sympy/solvers/tests/test_solveset.py::test_solve_lambert", "sympy/solvers/tests/test_solveset.py::test_conditionset_equality", "sympy/solvers/tests/test_solveset.py::test_issue_failing_pow" ]
[ "sympy/assumptions/tests/test_refine.py::test_Abs", "sympy/assumptions/tests/test_refine.py::test_pow", "sympy/assumptions/tests/test_refine.py::test_exp", "sympy/assumptions/tests/test_refine.py::test_Relational", "sympy/assumptions/tests/test_refine.py::test_Piecewise", "sympy/assumptions/tests/test_refine.py::test_atan2", "sympy/assumptions/tests/test_refine.py::test_func_args", "sympy/core/tests/test_evalf.py::test_evalf_helpers", "sympy/core/tests/test_evalf.py::test_evalf_basic", "sympy/core/tests/test_evalf.py::test_cancellation", "sympy/core/tests/test_evalf.py::test_evalf_powers", "sympy/core/tests/test_evalf.py::test_evalf_rump", "sympy/core/tests/test_evalf.py::test_evalf_complex", "sympy/core/tests/test_evalf.py::test_evalf_complex_powers", "sympy/core/tests/test_evalf.py::test_evalf_exponentiation", "sympy/core/tests/test_evalf.py::test_evalf_complex_cancellation", "sympy/core/tests/test_evalf.py::test_evalf_logs", "sympy/core/tests/test_evalf.py::test_evalf_trig", "sympy/core/tests/test_evalf.py::test_evalf_near_integers", "sympy/core/tests/test_evalf.py::test_evalf_ramanujan", "sympy/core/tests/test_evalf.py::test_evalf_bugs", "sympy/core/tests/test_evalf.py::test_evalf_integer_parts", "sympy/core/tests/test_evalf.py::test_evalf_trig_zero_detection", "sympy/core/tests/test_evalf.py::test_evalf_sum", "sympy/core/tests/test_evalf.py::test_evalf_divergent_series", "sympy/core/tests/test_evalf.py::test_evalf_product", "sympy/core/tests/test_evalf.py::test_evalf_py_methods", "sympy/core/tests/test_evalf.py::test_evalf_power_subs_bugs", "sympy/core/tests/test_evalf.py::test_evalf_arguments", "sympy/core/tests/test_evalf.py::test_implemented_function_evalf", "sympy/core/tests/test_evalf.py::test_evaluate_false", "sympy/core/tests/test_evalf.py::test_evalf_relational", "sympy/core/tests/test_evalf.py::test_issue_5486", "sympy/core/tests/test_evalf.py::test_issue_5486_bug", "sympy/core/tests/test_evalf.py::test_bugs", "sympy/core/tests/test_evalf.py::test_subs", "sympy/core/tests/test_evalf.py::test_issue_4956_5204", "sympy/core/tests/test_evalf.py::test_old_docstring", "sympy/core/tests/test_evalf.py::test_issue_4806", "sympy/core/tests/test_evalf.py::test_evalf_mul", "sympy/core/tests/test_evalf.py::test_scaled_zero", "sympy/core/tests/test_evalf.py::test_chop_value", "sympy/core/tests/test_evalf.py::test_infinities", "sympy/core/tests/test_evalf.py::test_to_mpmath", "sympy/core/tests/test_evalf.py::test_issue_6632_evalf", "sympy/core/tests/test_evalf.py::test_issue_4945", "sympy/core/tests/test_evalf.py::test_evalf_integral", "sympy/core/tests/test_evalf.py::test_issue_8821_highprec_from_str", "sympy/core/tests/test_evalf.py::test_issue_8853", "sympy/core/tests/test_evalf.py::test_issue_9326", "sympy/core/tests/test_evalf.py::test_issue_10323", "sympy/core/tests/test_evalf.py::test_AssocOp_Function", "sympy/core/tests/test_numbers.py::test_integers_cache", "sympy/core/tests/test_numbers.py::test_seterr", "sympy/core/tests/test_numbers.py::test_mod", "sympy/core/tests/test_numbers.py::test_divmod", "sympy/core/tests/test_numbers.py::test_igcd", "sympy/core/tests/test_numbers.py::test_ilcm", "sympy/core/tests/test_numbers.py::test_igcdex", "sympy/core/tests/test_numbers.py::test_Integer_new", "sympy/core/tests/test_numbers.py::test_Rational_new", "sympy/core/tests/test_numbers.py::test_Number_new", "sympy/core/tests/test_numbers.py::test_Rational_cmp", "sympy/core/tests/test_numbers.py::test_Float", "sympy/core/tests/test_numbers.py::test_Float_default_to_highprec_from_str", "sympy/core/tests/test_numbers.py::test_Float_eval", "sympy/core/tests/test_numbers.py::test_Float_issue_2107", "sympy/core/tests/test_numbers.py::test_Infinity", "sympy/core/tests/test_numbers.py::test_Infinity_2", "sympy/core/tests/test_numbers.py::test_Mul_Infinity_Zero", "sympy/core/tests/test_numbers.py::test_Div_By_Zero", "sympy/core/tests/test_numbers.py::test_Infinity_inequations", "sympy/core/tests/test_numbers.py::test_NaN", "sympy/core/tests/test_numbers.py::test_special_numbers", "sympy/core/tests/test_numbers.py::test_powers", "sympy/core/tests/test_numbers.py::test_integer_nthroot_overflow", "sympy/core/tests/test_numbers.py::test_powers_Integer", "sympy/core/tests/test_numbers.py::test_powers_Rational", "sympy/core/tests/test_numbers.py::test_powers_Float", "sympy/core/tests/test_numbers.py::test_abs1", "sympy/core/tests/test_numbers.py::test_accept_int", "sympy/core/tests/test_numbers.py::test_dont_accept_str", "sympy/core/tests/test_numbers.py::test_int", "sympy/core/tests/test_numbers.py::test_long", "sympy/core/tests/test_numbers.py::test_real_bug", "sympy/core/tests/test_numbers.py::test_bug_sqrt", "sympy/core/tests/test_numbers.py::test_pi_Pi", "sympy/core/tests/test_numbers.py::test_no_len", "sympy/core/tests/test_numbers.py::test_issue_3321", "sympy/core/tests/test_numbers.py::test_issue_3692", "sympy/core/tests/test_numbers.py::test_issue_3423", "sympy/core/tests/test_numbers.py::test_issue_3449", "sympy/core/tests/test_numbers.py::test_issue_4107", "sympy/core/tests/test_numbers.py::test_IntegerInteger", "sympy/core/tests/test_numbers.py::test_Rational_gcd_lcm_cofactors", "sympy/core/tests/test_numbers.py::test_Float_gcd_lcm_cofactors", "sympy/core/tests/test_numbers.py::test_issue_4611", "sympy/core/tests/test_numbers.py::test_conversion_to_mpmath", "sympy/core/tests/test_numbers.py::test_relational", "sympy/core/tests/test_numbers.py::test_Integer_as_index", "sympy/core/tests/test_numbers.py::test_Rational_int", "sympy/core/tests/test_numbers.py::test_zoo", "sympy/core/tests/test_numbers.py::test_issue_4122", "sympy/core/tests/test_numbers.py::test_GoldenRatio_expand", "sympy/core/tests/test_numbers.py::test_as_content_primitive", "sympy/core/tests/test_numbers.py::test_hashing_sympy_integers", "sympy/core/tests/test_numbers.py::test_issue_4172", "sympy/core/tests/test_numbers.py::test_Catalan_EulerGamma_prec", "sympy/core/tests/test_numbers.py::test_Float_eq", "sympy/core/tests/test_numbers.py::test_int_NumberSymbols", "sympy/core/tests/test_numbers.py::test_issue_6640", "sympy/core/tests/test_numbers.py::test_issue_6349", "sympy/core/tests/test_numbers.py::test_mpf_norm", "sympy/core/tests/test_numbers.py::test_latex", "sympy/core/tests/test_numbers.py::test_issue_7742", "sympy/core/tests/test_numbers.py::test_simplify_AlgebraicNumber", "sympy/core/tests/test_numbers.py::test_Float_idempotence", "sympy/core/tests/test_numbers.py::test_comp", "sympy/core/tests/test_numbers.py::test_issue_9491", "sympy/core/tests/test_numbers.py::test_issue_10063", "sympy/core/tests/test_numbers.py::test_issue_10020", "sympy/core/tests/test_numbers.py::test_invert_numbers", "sympy/core/tests/test_numbers.py::test_mod_inverse", "sympy/core/tests/test_relational.py::test_rel_ne", "sympy/core/tests/test_relational.py::test_rel_subs", "sympy/core/tests/test_relational.py::test_wrappers", "sympy/core/tests/test_relational.py::test_Eq", "sympy/core/tests/test_relational.py::test_rel_Infinity", "sympy/core/tests/test_relational.py::test_bool", "sympy/core/tests/test_relational.py::test_rich_cmp", "sympy/core/tests/test_relational.py::test_doit", "sympy/core/tests/test_relational.py::test_new_relational", "sympy/core/tests/test_relational.py::test_relational_bool_output", "sympy/core/tests/test_relational.py::test_relational_logic_symbols", "sympy/core/tests/test_relational.py::test_univariate_relational_as_set", "sympy/core/tests/test_relational.py::test_Not", "sympy/core/tests/test_relational.py::test_evaluate", "sympy/core/tests/test_relational.py::test_imaginary_compare_raises_TypeError", "sympy/core/tests/test_relational.py::test_complex_compare_not_real", "sympy/core/tests/test_relational.py::test_imaginary_and_inf_compare_raises_TypeError", "sympy/core/tests/test_relational.py::test_complex_pure_imag_not_ordered", "sympy/core/tests/test_relational.py::test_x_minus_y_not_same_as_x_lt_y", "sympy/core/tests/test_relational.py::test_nan_equality_exceptions", "sympy/core/tests/test_relational.py::test_nan_inequality_raise_errors", "sympy/core/tests/test_relational.py::test_nan_complex_inequalities", "sympy/core/tests/test_relational.py::test_complex_infinity_inequalities", "sympy/core/tests/test_relational.py::test_inequalities_symbol_name_same", "sympy/core/tests/test_relational.py::test_inequalities_symbol_name_same_complex", "sympy/core/tests/test_relational.py::test_inequalities_cant_sympify_other", "sympy/core/tests/test_relational.py::test_ineq_avoid_wild_symbol_flip", "sympy/core/tests/test_relational.py::test_issue_8245", "sympy/core/tests/test_relational.py::test_issue_8449", "sympy/core/tests/test_relational.py::test_simplify", "sympy/core/tests/test_relational.py::test_equals", "sympy/core/tests/test_relational.py::test_reversed", "sympy/core/tests/test_relational.py::test_canonical", "sympy/core/tests/test_relational.py::test_issue_10304", "sympy/ntheory/tests/test_factor_.py::test_trailing", "sympy/ntheory/tests/test_factor_.py::test_multiplicity", "sympy/ntheory/tests/test_factor_.py::test_perfect_power", "sympy/ntheory/tests/test_factor_.py::test_factorint", "sympy/ntheory/tests/test_factor_.py::test_divisors_and_divisor_count", "sympy/ntheory/tests/test_factor_.py::test_udivisors_and_udivisor_count", "sympy/ntheory/tests/test_factor_.py::test_issue_6981", "sympy/ntheory/tests/test_factor_.py::test_totient", "sympy/ntheory/tests/test_factor_.py::test_divisor_sigma", "sympy/ntheory/tests/test_factor_.py::test_udivisor_sigma", "sympy/ntheory/tests/test_factor_.py::test_issue_4356", "sympy/ntheory/tests/test_factor_.py::test_divisors", "sympy/ntheory/tests/test_factor_.py::test_divisor_count", "sympy/ntheory/tests/test_factor_.py::test_antidivisors", "sympy/ntheory/tests/test_factor_.py::test_antidivisor_count", "sympy/ntheory/tests/test_factor_.py::test_smoothness_and_smoothness_p", "sympy/ntheory/tests/test_factor_.py::test_visual_factorint", "sympy/ntheory/tests/test_factor_.py::test_visual_io", "sympy/ntheory/tests/test_factor_.py::test_core", "sympy/ntheory/tests/test_factor_.py::test_digits", "sympy/sets/tests/test_sets.py::test_interval_arguments", "sympy/sets/tests/test_sets.py::test_interval_symbolic_end_points", "sympy/sets/tests/test_sets.py::test_union", "sympy/sets/tests/test_sets.py::test_difference", "sympy/sets/tests/test_sets.py::test_Complement", "sympy/sets/tests/test_sets.py::test_complement", "sympy/sets/tests/test_sets.py::test_intersect", "sympy/sets/tests/test_sets.py::test_intersection", "sympy/sets/tests/test_sets.py::test_issue_9623", "sympy/sets/tests/test_sets.py::test_is_disjoint", "sympy/sets/tests/test_sets.py::test_ProductSet_of_single_arg_is_arg", "sympy/sets/tests/test_sets.py::test_interval_subs", "sympy/sets/tests/test_sets.py::test_interval_to_mpi", "sympy/sets/tests/test_sets.py::test_measure", "sympy/sets/tests/test_sets.py::test_is_subset", "sympy/sets/tests/test_sets.py::test_is_proper_subset", "sympy/sets/tests/test_sets.py::test_is_superset", "sympy/sets/tests/test_sets.py::test_is_proper_superset", "sympy/sets/tests/test_sets.py::test_contains", "sympy/sets/tests/test_sets.py::test_interval_symbolic", "sympy/sets/tests/test_sets.py::test_union_contains", "sympy/sets/tests/test_sets.py::test_is_number", "sympy/sets/tests/test_sets.py::test_Interval_is_left_unbounded", "sympy/sets/tests/test_sets.py::test_Interval_is_right_unbounded", "sympy/sets/tests/test_sets.py::test_Interval_as_relational", "sympy/sets/tests/test_sets.py::test_Finite_as_relational", "sympy/sets/tests/test_sets.py::test_Union_as_relational", "sympy/sets/tests/test_sets.py::test_Intersection_as_relational", "sympy/sets/tests/test_sets.py::test_EmptySet", "sympy/sets/tests/test_sets.py::test_finite_basic", "sympy/sets/tests/test_sets.py::test_powerset", "sympy/sets/tests/test_sets.py::test_product_basic", "sympy/sets/tests/test_sets.py::test_real", "sympy/sets/tests/test_sets.py::test_supinf", "sympy/sets/tests/test_sets.py::test_universalset", "sympy/sets/tests/test_sets.py::test_Union_of_ProductSets_shares", "sympy/sets/tests/test_sets.py::test_Interval_free_symbols", "sympy/sets/tests/test_sets.py::test_image_interval", "sympy/sets/tests/test_sets.py::test_image_piecewise", "sympy/sets/tests/test_sets.py::test_image_FiniteSet", "sympy/sets/tests/test_sets.py::test_image_Union", "sympy/sets/tests/test_sets.py::test_image_EmptySet", "sympy/sets/tests/test_sets.py::test_issue_5724_7680", "sympy/sets/tests/test_sets.py::test_boundary", "sympy/sets/tests/test_sets.py::test_boundary_Union", "sympy/sets/tests/test_sets.py::test_boundary_ProductSet", "sympy/sets/tests/test_sets.py::test_boundary_ProductSet_line", "sympy/sets/tests/test_sets.py::test_is_open", "sympy/sets/tests/test_sets.py::test_is_closed", "sympy/sets/tests/test_sets.py::test_closure", "sympy/sets/tests/test_sets.py::test_interior", "sympy/sets/tests/test_sets.py::test_issue_7841", "sympy/sets/tests/test_sets.py::test_Eq", "sympy/sets/tests/test_sets.py::test_SymmetricDifference", "sympy/sets/tests/test_sets.py::test_issue_9536", "sympy/sets/tests/test_sets.py::test_issue_9637", "sympy/sets/tests/test_sets.py::test_issue_9808", "sympy/sets/tests/test_sets.py::test_issue_9956", "sympy/sets/tests/test_sets.py::test_issue_Symbol_inter", "sympy/sets/tests/test_sets.py::test_issue_10113", "sympy/sets/tests/test_sets.py::test_issue_10248", "sympy/sets/tests/test_sets.py::test_issue_9447", "sympy/sets/tests/test_sets.py::test_issue_10337", "sympy/sets/tests/test_sets.py::test_issue_10326", "sympy/simplify/tests/test_simplify.py::test_issue_7263", "sympy/simplify/tests/test_simplify.py::test_simplify_expr", "sympy/simplify/tests/test_simplify.py::test_issue_3557", "sympy/simplify/tests/test_simplify.py::test_simplify_other", "sympy/simplify/tests/test_simplify.py::test_simplify_complex", "sympy/simplify/tests/test_simplify.py::test_simplify_ratio", "sympy/simplify/tests/test_simplify.py::test_simplify_measure", "sympy/simplify/tests/test_simplify.py::test_simplify_issue_1308", "sympy/simplify/tests/test_simplify.py::test_issue_5652", "sympy/simplify/tests/test_simplify.py::test_simplify_fail1", "sympy/simplify/tests/test_simplify.py::test_nthroot", "sympy/simplify/tests/test_simplify.py::test_nthroot1", "sympy/simplify/tests/test_simplify.py::test_separatevars", "sympy/simplify/tests/test_simplify.py::test_separatevars_advanced_factor", "sympy/simplify/tests/test_simplify.py::test_hypersimp", "sympy/simplify/tests/test_simplify.py::test_nsimplify", "sympy/simplify/tests/test_simplify.py::test_issue_9448", "sympy/simplify/tests/test_simplify.py::test_extract_minus_sign", "sympy/simplify/tests/test_simplify.py::test_diff", "sympy/simplify/tests/test_simplify.py::test_logcombine_1", "sympy/simplify/tests/test_simplify.py::test_logcombine_complex_coeff", "sympy/simplify/tests/test_simplify.py::test_posify", "sympy/simplify/tests/test_simplify.py::test_issue_4194", "sympy/simplify/tests/test_simplify.py::test_as_content_primitive", "sympy/simplify/tests/test_simplify.py::test_signsimp", "sympy/simplify/tests/test_simplify.py::test_besselsimp", "sympy/simplify/tests/test_simplify.py::test_Piecewise", "sympy/simplify/tests/test_simplify.py::test_polymorphism", "sympy/simplify/tests/test_simplify.py::test_issue_from_PR1599", "sympy/simplify/tests/test_simplify.py::test_issue_6811", "sympy/simplify/tests/test_simplify.py::test_issue_6920", "sympy/simplify/tests/test_simplify.py::test_issue_7001", "sympy/simplify/tests/test_simplify.py::test_inequality_no_auto_simplify", "sympy/simplify/tests/test_simplify.py::test_issue_9398", "sympy/simplify/tests/test_simplify.py::test_issue_9324_simplify", "sympy/simplify/tests/test_simplify.py::test_simplify_function_inverse", "sympy/solvers/tests/test_solveset.py::test_invert_complex", "sympy/solvers/tests/test_solveset.py::test_domain_check", "sympy/solvers/tests/test_solveset.py::test_is_function_class_equation", "sympy/solvers/tests/test_solveset.py::test_garbage_input", "sympy/solvers/tests/test_solveset.py::test_solve_mul", "sympy/solvers/tests/test_solveset.py::test_solve_invert", "sympy/solvers/tests/test_solveset.py::test_errorinverses", "sympy/solvers/tests/test_solveset.py::test_solve_polynomial", "sympy/solvers/tests/test_solveset.py::test_return_root_of", "sympy/solvers/tests/test_solveset.py::test__has_rational_power", "sympy/solvers/tests/test_solveset.py::test_solveset_sqrt_1", "sympy/solvers/tests/test_solveset.py::test_solveset_sqrt_2", "sympy/solvers/tests/test_solveset.py::test_solve_sqrt_3", "sympy/solvers/tests/test_solveset.py::test_solve_polynomial_symbolic_param", "sympy/solvers/tests/test_solveset.py::test_solve_rational", "sympy/solvers/tests/test_solveset.py::test_solveset_real_gen_is_pow", "sympy/solvers/tests/test_solveset.py::test_no_sol", "sympy/solvers/tests/test_solveset.py::test_sol_zero_real", "sympy/solvers/tests/test_solveset.py::test_no_sol_rational_extragenous", "sympy/solvers/tests/test_solveset.py::test_solve_polynomial_cv_1a", "sympy/solvers/tests/test_solveset.py::test_solveset_real_rational", "sympy/solvers/tests/test_solveset.py::test_solveset_real_log", "sympy/solvers/tests/test_solveset.py::test_poly_gens", "sympy/solvers/tests/test_solveset.py::test_solve_abs", "sympy/solvers/tests/test_solveset.py::test_real_imag_splitting", "sympy/solvers/tests/test_solveset.py::test_units", "sympy/solvers/tests/test_solveset.py::test_solve_only_exp_1", "sympy/solvers/tests/test_solveset.py::test_atan2", "sympy/solvers/tests/test_solveset.py::test_piecewise", "sympy/solvers/tests/test_solveset.py::test_solveset_complex_polynomial", "sympy/solvers/tests/test_solveset.py::test_sol_zero_complex", "sympy/solvers/tests/test_solveset.py::test_solveset_complex_rational", "sympy/solvers/tests/test_solveset.py::test_solveset_complex_exp", "sympy/solvers/tests/test_solveset.py::test_solve_complex_log", "sympy/solvers/tests/test_solveset.py::test_solve_complex_sqrt", "sympy/solvers/tests/test_solveset.py::test_solveset_complex_tan", "sympy/solvers/tests/test_solveset.py::test_solve_trig", "sympy/solvers/tests/test_solveset.py::test_solve_invalid_sol", "sympy/solvers/tests/test_solveset.py::test_solveset", "sympy/solvers/tests/test_solveset.py::test_conditionset", "sympy/solvers/tests/test_solveset.py::test_solveset_domain", "sympy/solvers/tests/test_solveset.py::test_improve_coverage", "sympy/solvers/tests/test_solveset.py::test_issue_9522", "sympy/solvers/tests/test_solveset.py::test_linear_eq_to_matrix", "sympy/solvers/tests/test_solveset.py::test_linsolve", "sympy/solvers/tests/test_solveset.py::test_issue_9556", "sympy/solvers/tests/test_solveset.py::test_issue_9611", "sympy/solvers/tests/test_solveset.py::test_issue_9557", "sympy/solvers/tests/test_solveset.py::test_issue_9778", "sympy/solvers/tests/test_solveset.py::test_issue_9849", "sympy/solvers/tests/test_solveset.py::test_issue_9953", "sympy/solvers/tests/test_solveset.py::test_issue_9913", "sympy/solvers/tests/test_solveset.py::test_issue_10397", "sympy/solvers/tests/test_solveset.py::test_simplification", "sympy/solvers/tests/test_solveset.py::test_issue_10555", "sympy/solvers/tests/test_solveset.py::test_issue_8715" ]
[]
BSD
445
getlogbook__logbook-195
bb0f4fbeec318a140780b1ac8781599474cf2666
2016-02-21 11:29:15
bb0f4fbeec318a140780b1ac8781599474cf2666
diff --git a/logbook/utils.py b/logbook/utils.py index 5416c52..7851f48 100644 --- a/logbook/utils.py +++ b/logbook/utils.py @@ -3,17 +3,14 @@ import functools import sys import threading -from .base import Logger +from .base import Logger, DEBUG from .helpers import string_types -from logbook import debug as logbook_debug class _SlowContextNotifier(object): - def __init__(self, threshold, logger_func, args, kwargs): - self.logger_func = logger_func - self.args = args - self.kwargs = kwargs or {} + def __init__(self, threshold, func): + self.func = func self.evt = threading.Event() self.threshold = threshold self.thread = threading.Thread(target=self._notifier) @@ -21,7 +18,7 @@ class _SlowContextNotifier(object): def _notifier(self): self.evt.wait(timeout=self.threshold) if not self.evt.is_set(): - self.logger_func(*self.args, **self.kwargs) + self.func() def __enter__(self): self.thread.start() @@ -32,18 +29,36 @@ class _SlowContextNotifier(object): self.thread.join() -def logged_if_slow(message, threshold=1, func=logbook_debug, args=None, - kwargs=None): - """Logs a message (by default using the global debug logger) if a certain - context containing a set of operations is too slow +_slow_logger = Logger('Slow') - >>> with logged_if_slow('too slow!'): - ... ... - .. versionadded:: 0.12 +def logged_if_slow(*args, **kwargs): + """Context manager that logs if operations within take longer than + `threshold` seconds. + + :param threshold: Number of seconds (or fractions thereof) allwoed before + logging occurs. The default is 1 second. + :param logger: :class:`~logbook.Logger` to use. The default is a 'slow' + logger. + :param level: Log level. The default is `DEBUG`. + :param func: (Deprecated). Function to call to perform logging. + + The remaining parameters are passed to the + :meth:`~logbook.base.LoggerMixin.log` method. """ - full_args = (message, ) if args is None else (message, ) + tuple(args) - return _SlowContextNotifier(threshold, func, full_args, kwargs) + threshold = kwargs.pop('threshold', 1) + func = kwargs.pop('func', None) + if func is None: + logger = kwargs.pop('logger', _slow_logger) + level = kwargs.pop('level', DEBUG) + func = functools.partial(logger.log, level, *args, **kwargs) + else: + if 'logger' in kwargs or 'level' in kwargs: + raise TypeError("If using deprecated func parameter, 'logger' and" + " 'level' arguments cannot be passed.") + func = functools.partial(func, *args, **kwargs) + + return _SlowContextNotifier(threshold, func) class _Local(threading.local): diff --git a/tox.ini b/tox.ini index b3d3493..07a3f90 100644 --- a/tox.ini +++ b/tox.ini @@ -6,6 +6,7 @@ skipsdist=True whitelist_externals= rm deps= + py{26,27}: mock pytest Cython changedir={toxinidir} @@ -18,6 +19,7 @@ commands= [testenv:pypy] deps= + mock pytest commands= {envpython} {toxinidir}/setup.py develop
logged_if_slow API I think the logged_if_slow API is a bit awkard. I think we should change it for v1.0. Current API: ``` def logged_if_slow(message, threshold=1, func=logbook_debug, args=None, kwargs=None) ``` Problems: - Always uses default logger - Have to pass log function, why not a level? - args and kwargs are normal parameters (rather than `*args`, `**kwargs`). Proposed API: ``` def logged_if_slow(*args, **kwargs): threshold = kwargs.pop('threshold', 1) logger = kwargs.pop('logger', _default_logger) level = kwargs.pop('level', logbook.DEBUG) # Context manager would call: logger.log(level, *args, **kwargs) ``` Then the context manager can simply call ``` logger.log(level, *args, **kwargs) ``` Also: are there valid reasons to use threading and events to handle the threshold rather than just storing a `time.time()` value and comparing in `__exit__`? I don't know how the threading timeout works, so I'm just curious about the implementation!
getlogbook/logbook
diff --git a/tests/test_utils.py b/tests/test_utils.py index 3d1443f..f4ca5b8 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -8,23 +8,59 @@ from time import sleep _THRESHOLD = 0.1 +try: + from unittest.mock import Mock, call +except ImportError: + from mock import Mock, call -def test_logged_if_slow_reached(logger, test_handler): + +def test_logged_if_slow_reached(test_handler): with test_handler.applicationbound(): with logged_if_slow('checking...', threshold=_THRESHOLD): - sleep(2*_THRESHOLD) + sleep(2 * _THRESHOLD) assert len(test_handler.records) == 1 [record] = test_handler.records assert record.message == 'checking...' -def test_logged_if_slow_did_not_reached(logger, test_handler): +def test_logged_if_slow_did_not_reached(test_handler): with test_handler.applicationbound(): with logged_if_slow('checking...', threshold=_THRESHOLD): - sleep(_THRESHOLD/2) + sleep(_THRESHOLD / 2) assert len(test_handler.records) == 0 +def test_logged_if_slow_logger(): + logger = Mock() + + with logged_if_slow('checking...', threshold=_THRESHOLD, logger=logger): + sleep(2 * _THRESHOLD) + + assert logger.log.call_args == call(logbook.DEBUG, 'checking...') + + +def test_logged_if_slow_level(test_handler): + with test_handler.applicationbound(): + with logged_if_slow('checking...', threshold=_THRESHOLD, + level=logbook.WARNING): + sleep(2 * _THRESHOLD) + + assert test_handler.records[0].level == logbook.WARNING + + +def test_logged_if_slow_deprecated(logger, test_handler): + with test_handler.applicationbound(): + with logged_if_slow('checking...', threshold=_THRESHOLD, + func=logbook.error): + sleep(2 * _THRESHOLD) + + assert test_handler.records[0].level == logbook.ERROR + assert test_handler.records[0].message == 'checking...' + + with pytest.raises(TypeError): + logged_if_slow('checking...', logger=logger, func=logger.error) + + def test_deprecated_func_called(capture): assert deprecated_func(1, 2) == 3
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 3, "issue_text_score": 1, "test_score": 0 }, "num_modified_files": 2 }
0.12
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "pytest-cov" ], "pre_install": [ "apt-get update", "apt-get install -y gcc libzmq3-dev" ], "python": "3.9", "reqs_path": [ "requirements/base.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
coverage==7.8.0 Cython==3.0.12 exceptiongroup==1.2.2 iniconfig==2.1.0 -e git+https://github.com/getlogbook/logbook.git@bb0f4fbeec318a140780b1ac8781599474cf2666#egg=Logbook packaging==24.2 pluggy==1.5.0 pytest==8.3.5 pytest-cov==6.0.0 tomli==2.2.1
name: logbook channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - coverage==7.8.0 - cython==3.0.12 - exceptiongroup==1.2.2 - iniconfig==2.1.0 - packaging==24.2 - pluggy==1.5.0 - pytest==8.3.5 - pytest-cov==6.0.0 - tomli==2.2.1 prefix: /opt/conda/envs/logbook
[ "tests/test_utils.py::test_logged_if_slow_logger", "tests/test_utils.py::test_logged_if_slow_level" ]
[]
[ "tests/test_utils.py::test_logged_if_slow_reached", "tests/test_utils.py::test_logged_if_slow_did_not_reached", "tests/test_utils.py::test_logged_if_slow_deprecated", "tests/test_utils.py::test_deprecated_func_called", "tests/test_utils.py::test_deprecation_message", "tests/test_utils.py::test_deprecation_with_message", "tests/test_utils.py::test_no_deprecations", "tests/test_utils.py::test_class_deprecation[_no_decorator]", "tests/test_utils.py::test_class_deprecation[classmethod]", "tests/test_utils.py::test_deprecations_different_sources", "tests/test_utils.py::test_deprecations_same_sources", "tests/test_utils.py::test_deprecation_message_different_sources", "tests/test_utils.py::test_deprecation_message_same_sources", "tests/test_utils.py::test_deprecation_message_full_warning", "tests/test_utils.py::test_name_doc", "tests/test_utils.py::test_doc_update", "tests/test_utils.py::test_deprecatd_docstring" ]
[]
BSD License
446
typesafehub__conductr-cli-110
5f065ac8f67877d0c393f26f789b22c270cb24d9
2016-02-22 03:34:24
76f795642d4d2220be0eddf75bcf8e933a7b6821
markusjura: The current code would fail if the `value` would contain a `=` sign, e.g. ``` secret = ah5k=sdh ``` We should split the line by key value only by the first `=` sign. huntc: Good catch! I shall fix tomorrow!
diff --git a/conductr_cli/resolvers/bintray_resolver.py b/conductr_cli/resolvers/bintray_resolver.py index cfb0ce9..2186cb6 100644 --- a/conductr_cli/resolvers/bintray_resolver.py +++ b/conductr_cli/resolvers/bintray_resolver.py @@ -5,13 +5,14 @@ from requests.exceptions import HTTPError import json import logging import os +import re import requests BINTRAY_API_BASE_URL = 'https://api.bintray.com' BINTRAY_DOWNLOAD_BASE_URL = 'https://dl.bintray.com' -BINTRAY_DOWNLOAD_REALM = 'Bintray' BINTRAY_CREDENTIAL_FILE_PATH = '{}/.bintray/.credentials'.format(os.path.expanduser('~')) +BINTRAY_PROPERTIES_RE = re.compile('^(\S+)\s*=\s*([\S]+)$') def resolve_bundle(cache_dir, uri): @@ -23,8 +24,7 @@ def resolve_bundle(cache_dir, uri): bundle_download_url = bintray_download_url(bintray_username, bintray_password, org, repo, package_name, compatibility_version, digest) if bundle_download_url: - auth = (BINTRAY_DOWNLOAD_REALM, bintray_username, bintray_password) if bintray_username else None - return uri_resolver.resolve_bundle(cache_dir, bundle_download_url, auth) + return uri_resolver.resolve_bundle(cache_dir, bundle_download_url) else: return False, None, None except MalformedBundleUriError: @@ -61,8 +61,13 @@ def load_bintray_credentials(): lines = [line.replace('\n', '') for line in cred_file.readlines()] data = dict() for line in lines: - key, value = line.replace(' = ', '=').split('=') - data[key] = value + match = BINTRAY_PROPERTIES_RE.match(line) + if match is not None: + try: + key, value = match.group(1, 2) + data[key] = value + except IndexError: + pass username = None if 'user' not in data else data['user'] password = None if 'password' not in data else data['password'] diff --git a/conductr_cli/resolvers/uri_resolver.py b/conductr_cli/resolvers/uri_resolver.py index 8207d26..45a12df 100644 --- a/conductr_cli/resolvers/uri_resolver.py +++ b/conductr_cli/resolvers/uri_resolver.py @@ -6,10 +6,9 @@ from conductr_cli import screen_utils import os import logging import shutil -import urllib -def resolve_bundle(cache_dir, uri, auth=None): +def resolve_bundle(cache_dir, uri): log = logging.getLogger(__name__) if not os.path.exists(cache_dir): @@ -24,7 +23,7 @@ def resolve_bundle(cache_dir, uri, auth=None): if os.path.exists(tmp_download_path): os.remove(tmp_download_path) - download_bundle(log, bundle_url, tmp_download_path, auth) + download_bundle(log, bundle_url, tmp_download_path) shutil.move(tmp_download_path, cached_file) return True, bundle_name, cached_file @@ -63,22 +62,12 @@ def cache_path(cache_dir, uri): return '{}/{}'.format(cache_dir, basename) -def download_bundle(log, bundle_url, tmp_download_path, auth): +def download_bundle(log, bundle_url, tmp_download_path): log.info('Retrieving {}'.format(bundle_url)) parsed = urlparse(bundle_url, scheme='file') is_http_download = parsed.scheme == 'http' or parsed.scheme == 'https' - if is_http_download and auth: - realm, username, password = auth - authinfo = urllib.request.HTTPBasicAuthHandler() - authinfo.add_password(realm=realm, - uri=bundle_url, - user=username, - passwd=password) - opener = urllib.request.build_opener(authinfo) - urllib.request.install_opener(opener) - if log.is_progress_enabled() and is_http_download: urlretrieve(bundle_url, tmp_download_path, reporthook=show_progress(log)) else:
bintray credential parsing Apparently bintray credential file formats can screw up our parsing of them. Our parser will have to become more sophisticated. From @henrikengstrom: > I haven’t changed the credentials file lately and it contained more than `user` and `password`, e.g. `realm` and `host`.
typesafehub/conductr-cli
diff --git a/conductr_cli/resolvers/test/test_bintray_resolver.py b/conductr_cli/resolvers/test/test_bintray_resolver.py index 2c8a77b..1d7b723 100644 --- a/conductr_cli/resolvers/test/test_bintray_resolver.py +++ b/conductr_cli/resolvers/test/test_bintray_resolver.py @@ -32,8 +32,7 @@ class TestResolveBundle(TestCase): parse_mock.assert_called_with('bundle-name:v1') bintray_download_url_mock.assert_called_with('username', 'password', 'typesafe', 'bundle', 'bundle-name', 'v1', 'digest') - resolve_bundle_mock.assert_called_with('/cache-dir', 'https://dl.bintray.com/download.zip', - ('Bintray', 'username', 'password')) + resolve_bundle_mock.assert_called_with('/cache-dir', 'https://dl.bintray.com/download.zip') def test_bintray_version_not_found(self): load_bintray_credentials_mock = MagicMock(return_value=('username', 'password')) @@ -390,7 +389,8 @@ class TestLoadBintrayCredentials(TestCase): def test_success(self): bintray_credential_file = strip_margin( """|user = user1 - |password = secret + |password = sec=ret + |# Some comment |""") exists_mock = MagicMock(return_value=True) @@ -400,7 +400,7 @@ class TestLoadBintrayCredentials(TestCase): patch('builtins.open', open_mock): username, password = bintray_resolver.load_bintray_credentials() self.assertEqual('user1', username) - self.assertEqual('secret', password) + self.assertEqual('sec=ret', password) exists_mock.assert_called_with('{}/.bintray/.credentials'.format(os.path.expanduser('~'))) open_mock.assert_called_with('{}/.bintray/.credentials'.format(os.path.expanduser('~')), 'r')
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 2, "test_score": 0 }, "num_modified_files": 2 }
0.24
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "pytest", "pytest-cov", "pytest-xdist", "pytest-mock", "pytest-asyncio" ], "pre_install": null, "python": "3.9", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
argcomplete==3.6.1 arrow==1.3.0 certifi==2025.1.31 charset-normalizer==3.4.1 -e git+https://github.com/typesafehub/conductr-cli.git@5f065ac8f67877d0c393f26f789b22c270cb24d9#egg=conductr_cli coverage==7.8.0 exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work execnet==2.1.1 idna==3.10 iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work packaging @ file:///croot/packaging_1734472117206/work pluggy @ file:///croot/pluggy_1733169602837/work pyhocon==0.2.1 pyparsing==2.0.3 pytest @ file:///croot/pytest_1738938843180/work pytest-asyncio==0.26.0 pytest-cov==6.0.0 pytest-mock==3.14.0 pytest-xdist==3.6.1 python-dateutil==2.9.0.post0 requests==2.32.3 six==1.17.0 tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work types-python-dateutil==2.9.0.20241206 typing_extensions==4.13.0 urllib3==2.3.0
name: conductr-cli channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - exceptiongroup=1.2.0=py39h06a4308_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - packaging=24.2=py39h06a4308_0 - pip=25.0=py39h06a4308_0 - pluggy=1.5.0=py39h06a4308_0 - pytest=8.3.4=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tomli=2.0.1=py39h06a4308_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - argcomplete==3.6.1 - arrow==1.3.0 - certifi==2025.1.31 - charset-normalizer==3.4.1 - coverage==7.8.0 - execnet==2.1.1 - idna==3.10 - pyhocon==0.2.1 - pyparsing==2.0.3 - pytest-asyncio==0.26.0 - pytest-cov==6.0.0 - pytest-mock==3.14.0 - pytest-xdist==3.6.1 - python-dateutil==2.9.0.post0 - requests==2.32.3 - six==1.17.0 - types-python-dateutil==2.9.0.20241206 - typing-extensions==4.13.0 - urllib3==2.3.0 prefix: /opt/conda/envs/conductr-cli
[ "conductr_cli/resolvers/test/test_bintray_resolver.py::TestResolveBundle::test_bintray_version_found", "conductr_cli/resolvers/test/test_bintray_resolver.py::TestLoadBintrayCredentials::test_success" ]
[]
[ "conductr_cli/resolvers/test/test_bintray_resolver.py::TestResolveBundle::test_bintray_version_not_found", "conductr_cli/resolvers/test/test_bintray_resolver.py::TestResolveBundle::test_failure_http_error", "conductr_cli/resolvers/test/test_bintray_resolver.py::TestResolveBundle::test_failure_malformed_bundle_uri", "conductr_cli/resolvers/test/test_bintray_resolver.py::TestLoadFromCache::test_bintray_version_found", "conductr_cli/resolvers/test/test_bintray_resolver.py::TestLoadFromCache::test_bintray_version_not_found", "conductr_cli/resolvers/test/test_bintray_resolver.py::TestLoadFromCache::test_failure_http_error", "conductr_cli/resolvers/test/test_bintray_resolver.py::TestLoadFromCache::test_failure_malformed_bundle_uri", "conductr_cli/resolvers/test/test_bintray_resolver.py::TestBintrayDownloadUrl::test_failure_multiple_versions_found", "conductr_cli/resolvers/test/test_bintray_resolver.py::TestBintrayDownloadUrl::test_failure_version_not_found", "conductr_cli/resolvers/test/test_bintray_resolver.py::TestBintrayDownloadUrl::test_success", "conductr_cli/resolvers/test/test_bintray_resolver.py::TestBintrayDownloadUrlLatest::test_failure_latest_version_malformed", "conductr_cli/resolvers/test/test_bintray_resolver.py::TestBintrayDownloadUrlLatest::test_latest_version_from_attribute_names", "conductr_cli/resolvers/test/test_bintray_resolver.py::TestBintrayDownloadUrlLatest::test_latest_version_from_attribute_names_not_found", "conductr_cli/resolvers/test/test_bintray_resolver.py::TestBintrayDownloadUrlLatest::test_success", "conductr_cli/resolvers/test/test_bintray_resolver.py::TestBintrayDownloadUrlLatestCompatibilityVersion::test_no_version", "conductr_cli/resolvers/test/test_bintray_resolver.py::TestBintrayDownloadUrlLatestCompatibilityVersion::test_success", "conductr_cli/resolvers/test/test_bintray_resolver.py::TestLoadBintrayCredentials::test_credential_file_not_having_username_password", "conductr_cli/resolvers/test/test_bintray_resolver.py::TestLoadBintrayCredentials::test_missing_credential_file", "conductr_cli/resolvers/test/test_bintray_resolver.py::TestGetJson::test_get_json", "conductr_cli/resolvers/test/test_bintray_resolver.py::TestGetJson::test_get_json_no_credentials" ]
[]
Apache License 2.0
447
typesafehub__conductr-cli-111
76f795642d4d2220be0eddf75bcf8e933a7b6821
2016-02-23 01:04:09
76f795642d4d2220be0eddf75bcf8e933a7b6821
diff --git a/conductr_cli/resolvers/bintray_resolver.py b/conductr_cli/resolvers/bintray_resolver.py index 8f892b1..cfb0ce9 100644 --- a/conductr_cli/resolvers/bintray_resolver.py +++ b/conductr_cli/resolvers/bintray_resolver.py @@ -10,6 +10,7 @@ import requests BINTRAY_API_BASE_URL = 'https://api.bintray.com' BINTRAY_DOWNLOAD_BASE_URL = 'https://dl.bintray.com' +BINTRAY_DOWNLOAD_REALM = 'Bintray' BINTRAY_CREDENTIAL_FILE_PATH = '{}/.bintray/.credentials'.format(os.path.expanduser('~')) @@ -22,7 +23,8 @@ def resolve_bundle(cache_dir, uri): bundle_download_url = bintray_download_url(bintray_username, bintray_password, org, repo, package_name, compatibility_version, digest) if bundle_download_url: - return uri_resolver.resolve_bundle(cache_dir, bundle_download_url) + auth = (BINTRAY_DOWNLOAD_REALM, bintray_username, bintray_password) if bintray_username else None + return uri_resolver.resolve_bundle(cache_dir, bundle_download_url, auth) else: return False, None, None except MalformedBundleUriError: diff --git a/conductr_cli/resolvers/uri_resolver.py b/conductr_cli/resolvers/uri_resolver.py index 45a12df..8207d26 100644 --- a/conductr_cli/resolvers/uri_resolver.py +++ b/conductr_cli/resolvers/uri_resolver.py @@ -6,9 +6,10 @@ from conductr_cli import screen_utils import os import logging import shutil +import urllib -def resolve_bundle(cache_dir, uri): +def resolve_bundle(cache_dir, uri, auth=None): log = logging.getLogger(__name__) if not os.path.exists(cache_dir): @@ -23,7 +24,7 @@ def resolve_bundle(cache_dir, uri): if os.path.exists(tmp_download_path): os.remove(tmp_download_path) - download_bundle(log, bundle_url, tmp_download_path) + download_bundle(log, bundle_url, tmp_download_path, auth) shutil.move(tmp_download_path, cached_file) return True, bundle_name, cached_file @@ -62,12 +63,22 @@ def cache_path(cache_dir, uri): return '{}/{}'.format(cache_dir, basename) -def download_bundle(log, bundle_url, tmp_download_path): +def download_bundle(log, bundle_url, tmp_download_path, auth): log.info('Retrieving {}'.format(bundle_url)) parsed = urlparse(bundle_url, scheme='file') is_http_download = parsed.scheme == 'http' or parsed.scheme == 'https' + if is_http_download and auth: + realm, username, password = auth + authinfo = urllib.request.HTTPBasicAuthHandler() + authinfo.add_password(realm=realm, + uri=bundle_url, + user=username, + passwd=password) + opener = urllib.request.build_opener(authinfo) + urllib.request.install_opener(opener) + if log.is_progress_enabled() and is_http_download: urlretrieve(bundle_url, tmp_download_path, reporthook=show_progress(log)) else:
Unable to access non-default org and repo with bintray resolver Note how the URL duplicates the org and file name, even when you don't specify an org. `conduct load typesafe/internal-bundle/typesafe-website` ``` Retrieving https://dl.bintray.com/typesafe/internal-bundle/typesafe/typesafe-website/v1-075dbb07a7c6271164c2a429b06f5908bc3d416d18c5813d3d4d718aa6470f2e/typesafe-website-v1-075dbb07a7c6271164c2a429b06f5908bc3d416d18c5813d3d4d718aa6470f2e.zip Error: Bundle not found: Unable to resolve bundle using typesafe/internal-bundle/typesafe-website ``` ` conduct load internal-bundle/typesafe-website` ``` Resolving bundle typesafe/internal-bundle/typesafe-website Retrieving https://dl.bintray.com/typesafe/internal-bundle/typesafe/typesafe-website/v1-d31c136feef7a7c0a43a0bdf4a2179491e40161ee3a7a37335bcda1c13c5612f/typesafe-website-v1-d31c136feef7a7c0a43a0bdf4a2179491e40161ee3a7a37335bcda1c13c5612f.zip Error: Bundle not found: Unable to resolve bundle using internal-bundle/typesafe-website ```
typesafehub/conductr-cli
diff --git a/conductr_cli/resolvers/test/test_bintray_resolver.py b/conductr_cli/resolvers/test/test_bintray_resolver.py index cf68b72..2c8a77b 100644 --- a/conductr_cli/resolvers/test/test_bintray_resolver.py +++ b/conductr_cli/resolvers/test/test_bintray_resolver.py @@ -32,7 +32,8 @@ class TestResolveBundle(TestCase): parse_mock.assert_called_with('bundle-name:v1') bintray_download_url_mock.assert_called_with('username', 'password', 'typesafe', 'bundle', 'bundle-name', 'v1', 'digest') - resolve_bundle_mock.assert_called_with('/cache-dir', 'https://dl.bintray.com/download.zip') + resolve_bundle_mock.assert_called_with('/cache-dir', 'https://dl.bintray.com/download.zip', + ('Bintray', 'username', 'password')) def test_bintray_version_not_found(self): load_bintray_credentials_mock = MagicMock(return_value=('username', 'password'))
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_hyperlinks", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 1, "test_score": 2 }, "num_modified_files": 2 }
0.24
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "pytest", "flake8" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.9", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
argcomplete==3.6.1 arrow==1.3.0 certifi==2025.1.31 charset-normalizer==3.4.1 -e git+https://github.com/typesafehub/conductr-cli.git@76f795642d4d2220be0eddf75bcf8e933a7b6821#egg=conductr_cli exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work flake8==7.2.0 idna==3.10 iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work mccabe==0.7.0 packaging @ file:///croot/packaging_1734472117206/work pluggy @ file:///croot/pluggy_1733169602837/work pycodestyle==2.13.0 pyflakes==3.3.1 pyhocon==0.2.1 pyparsing==2.0.3 pytest @ file:///croot/pytest_1738938843180/work python-dateutil==2.9.0.post0 requests==2.32.3 six==1.17.0 tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work types-python-dateutil==2.9.0.20241206 urllib3==2.3.0
name: conductr-cli channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - exceptiongroup=1.2.0=py39h06a4308_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - packaging=24.2=py39h06a4308_0 - pip=25.0=py39h06a4308_0 - pluggy=1.5.0=py39h06a4308_0 - pytest=8.3.4=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tomli=2.0.1=py39h06a4308_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - argcomplete==3.6.1 - arrow==1.3.0 - certifi==2025.1.31 - charset-normalizer==3.4.1 - flake8==7.2.0 - idna==3.10 - mccabe==0.7.0 - pycodestyle==2.13.0 - pyflakes==3.3.1 - pyhocon==0.2.1 - pyparsing==2.0.3 - python-dateutil==2.9.0.post0 - requests==2.32.3 - six==1.17.0 - types-python-dateutil==2.9.0.20241206 - urllib3==2.3.0 prefix: /opt/conda/envs/conductr-cli
[ "conductr_cli/resolvers/test/test_bintray_resolver.py::TestResolveBundle::test_bintray_version_found" ]
[]
[ "conductr_cli/resolvers/test/test_bintray_resolver.py::TestResolveBundle::test_bintray_version_not_found", "conductr_cli/resolvers/test/test_bintray_resolver.py::TestResolveBundle::test_failure_http_error", "conductr_cli/resolvers/test/test_bintray_resolver.py::TestResolveBundle::test_failure_malformed_bundle_uri", "conductr_cli/resolvers/test/test_bintray_resolver.py::TestLoadFromCache::test_bintray_version_found", "conductr_cli/resolvers/test/test_bintray_resolver.py::TestLoadFromCache::test_bintray_version_not_found", "conductr_cli/resolvers/test/test_bintray_resolver.py::TestLoadFromCache::test_failure_http_error", "conductr_cli/resolvers/test/test_bintray_resolver.py::TestLoadFromCache::test_failure_malformed_bundle_uri", "conductr_cli/resolvers/test/test_bintray_resolver.py::TestBintrayDownloadUrl::test_failure_multiple_versions_found", "conductr_cli/resolvers/test/test_bintray_resolver.py::TestBintrayDownloadUrl::test_failure_version_not_found", "conductr_cli/resolvers/test/test_bintray_resolver.py::TestBintrayDownloadUrl::test_success", "conductr_cli/resolvers/test/test_bintray_resolver.py::TestBintrayDownloadUrlLatest::test_failure_latest_version_malformed", "conductr_cli/resolvers/test/test_bintray_resolver.py::TestBintrayDownloadUrlLatest::test_latest_version_from_attribute_names", "conductr_cli/resolvers/test/test_bintray_resolver.py::TestBintrayDownloadUrlLatest::test_latest_version_from_attribute_names_not_found", "conductr_cli/resolvers/test/test_bintray_resolver.py::TestBintrayDownloadUrlLatest::test_success", "conductr_cli/resolvers/test/test_bintray_resolver.py::TestBintrayDownloadUrlLatestCompatibilityVersion::test_no_version", "conductr_cli/resolvers/test/test_bintray_resolver.py::TestBintrayDownloadUrlLatestCompatibilityVersion::test_success", "conductr_cli/resolvers/test/test_bintray_resolver.py::TestLoadBintrayCredentials::test_credential_file_not_having_username_password", "conductr_cli/resolvers/test/test_bintray_resolver.py::TestLoadBintrayCredentials::test_missing_credential_file", "conductr_cli/resolvers/test/test_bintray_resolver.py::TestLoadBintrayCredentials::test_success", "conductr_cli/resolvers/test/test_bintray_resolver.py::TestGetJson::test_get_json", "conductr_cli/resolvers/test/test_bintray_resolver.py::TestGetJson::test_get_json_no_credentials" ]
[]
Apache License 2.0
448
wireservice__agate-509
e90440bbee9f027459140fdfdb0478a8078338f9
2016-02-23 02:49:36
e90440bbee9f027459140fdfdb0478a8078338f9
diff --git a/agate/utils.py b/agate/utils.py index e18e5f0..834e262 100644 --- a/agate/utils.py +++ b/agate/utils.py @@ -109,12 +109,12 @@ class Quantiles(Sequence): raise ValueError('Value is greater than maximum quantile value.') if value == self._quantiles[-1]: - return len(self._quantiles) - 1 + return Decimal(len(self._quantiles) - 1) while value >= self._quantiles[i + 1]: i += 1 - return i + return Decimal(i) def median(data_sorted):
Percentiles should be Decimals For consistency
wireservice/agate
diff --git a/tests/test_computations.py b/tests/test_computations.py index bfc3a9b..c9c76b3 100644 --- a/tests/test_computations.py +++ b/tests/test_computations.py @@ -316,6 +316,8 @@ class TestTableComputation(unittest.TestCase): self.assertSequenceEqual(new_table.rows[500], (501, 50)) self.assertSequenceEqual(new_table.rows[998], (999, 99)) self.assertSequenceEqual(new_table.rows[999], (1000, 100)) + self.assertIsInstance(new_table.columns['percentiles'][0], Decimal) + self.assertIsInstance(new_table.columns['percentiles'][-1], Decimal) def test_percentile_rank_invalid_types(self): with self.assertRaises(DataTypeError):
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_pytest_match_arg" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 0, "test_score": 1 }, "num_modified_files": 1 }
1.2
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "parsedatetime>=2.0", "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.9", "reqs_path": [ "requirements-py3.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
-e git+https://github.com/wireservice/agate.git@e90440bbee9f027459140fdfdb0478a8078338f9#egg=agate alabaster==0.7.16 babel==2.17.0 cachetools==5.5.2 certifi==2025.1.31 chardet==5.2.0 charset-normalizer==3.4.1 colorama==0.4.6 coverage==7.8.0 distlib==0.3.9 docutils==0.21.2 exceptiongroup==1.2.2 filelock==3.18.0 idna==3.10 imagesize==1.4.1 importlib_metadata==8.6.1 iniconfig==2.1.0 isodate==0.7.2 Jinja2==3.1.6 MarkupSafe==3.0.2 nose==1.3.7 packaging==24.2 parsedatetime==2.6 platformdirs==4.3.7 pluggy==1.5.0 Pygments==2.19.1 pyproject-api==1.9.0 pytest==8.3.5 pytimeparse==1.1.8 pytz==2025.2 requests==2.32.3 six==1.17.0 snowballstemmer==2.2.0 Sphinx==7.4.7 sphinx-rtd-theme==3.0.2 sphinxcontrib-applehelp==2.0.0 sphinxcontrib-devhelp==2.0.0 sphinxcontrib-htmlhelp==2.1.0 sphinxcontrib-jquery==4.1 sphinxcontrib-jsmath==1.0.1 sphinxcontrib-qthelp==2.0.0 sphinxcontrib-serializinghtml==2.0.0 tomli==2.2.1 tox==4.25.0 typing_extensions==4.13.0 urllib3==2.3.0 virtualenv==20.29.3 zipp==3.21.0
name: agate channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - alabaster==0.7.16 - babel==2.17.0 - cachetools==5.5.2 - certifi==2025.1.31 - chardet==5.2.0 - charset-normalizer==3.4.1 - colorama==0.4.6 - coverage==7.8.0 - distlib==0.3.9 - docutils==0.21.2 - exceptiongroup==1.2.2 - filelock==3.18.0 - idna==3.10 - imagesize==1.4.1 - importlib-metadata==8.6.1 - iniconfig==2.1.0 - isodate==0.7.2 - jinja2==3.1.6 - markupsafe==3.0.2 - nose==1.3.7 - packaging==24.2 - parsedatetime==2.6 - platformdirs==4.3.7 - pluggy==1.5.0 - pygments==2.19.1 - pyproject-api==1.9.0 - pytest==8.3.5 - pytimeparse==1.1.8 - pytz==2025.2 - requests==2.32.3 - six==1.17.0 - snowballstemmer==2.2.0 - sphinx==7.4.7 - sphinx-rtd-theme==3.0.2 - sphinxcontrib-applehelp==2.0.0 - sphinxcontrib-devhelp==2.0.0 - sphinxcontrib-htmlhelp==2.1.0 - sphinxcontrib-jquery==4.1 - sphinxcontrib-jsmath==1.0.1 - sphinxcontrib-qthelp==2.0.0 - sphinxcontrib-serializinghtml==2.0.0 - tomli==2.2.1 - tox==4.25.0 - typing-extensions==4.13.0 - urllib3==2.3.0 - virtualenv==20.29.3 - zipp==3.21.0 prefix: /opt/conda/envs/agate
[ "tests/test_computations.py::TestTableComputation::test_percentile_rank" ]
[]
[ "tests/test_computations.py::TestTableComputation::test_change", "tests/test_computations.py::TestTableComputation::test_change_mixed_types", "tests/test_computations.py::TestTableComputation::test_change_nulls", "tests/test_computations.py::TestTableComputation::test_changed_invalid_types", "tests/test_computations.py::TestTableComputation::test_formula", "tests/test_computations.py::TestTableComputation::test_formula_invalid", "tests/test_computations.py::TestTableComputation::test_formula_no_validate", "tests/test_computations.py::TestTableComputation::test_percent", "tests/test_computations.py::TestTableComputation::test_percent_change", "tests/test_computations.py::TestTableComputation::test_percent_change_invalid_columns", "tests/test_computations.py::TestTableComputation::test_percent_nulls", "tests/test_computations.py::TestTableComputation::test_percent_total_override", "tests/test_computations.py::TestTableComputation::test_percent_zeros", "tests/test_computations.py::TestTableComputation::test_percentile_rank_invalid_types", "tests/test_computations.py::TestTableComputation::test_rank_number", "tests/test_computations.py::TestTableComputation::test_rank_number_key", "tests/test_computations.py::TestTableComputation::test_rank_number_reverse", "tests/test_computations.py::TestTableComputation::test_rank_number_reverse_key", "tests/test_computations.py::TestTableComputation::test_rank_text", "tests/test_computations.py::TestDateAndTimeComputations::test_change_dates", "tests/test_computations.py::TestDateAndTimeComputations::test_change_datetimes", "tests/test_computations.py::TestDateAndTimeComputations::test_change_timedeltas" ]
[]
MIT License
449
jmespath__jmespath.py-102
a6baa176714c72a68a2b367810b91162b1125f41
2016-02-23 07:25:34
71f44854a35c5abcdb8fbd84e25d185d0ca53f92
diff --git a/README.rst b/README.rst index e7262d4..486783b 100644 --- a/README.rst +++ b/README.rst @@ -96,6 +96,85 @@ of your dict keys. To do this you can use either of these options: ... jmespath.Options(dict_cls=collections.OrderedDict)) +Custom Functions +~~~~~~~~~~~~~~~~ + +The JMESPath language has numerous +`built-in functions +<http://jmespath.org/specification.html#built-in-functions>`__, but it is +also possible to add your own custom functions. Keep in mind that +custom function support in jmespath.py is experimental and the API may +change based on feedback. + +**If you have a custom function that you've found useful, consider submitting +it to jmespath.site and propose that it be added to the JMESPath language.** +You can submit proposals +`here <https://github.com/jmespath/jmespath.site/issues>`__. + +To create custom functions: + +* Create a subclass of ``jmespath.functions.Functions``. +* Create a method with the name ``_func_<your function name>``. +* Apply the ``jmespath.functions.signature`` decorator that indicates + the expected types of the function arguments. +* Provide an instance of your subclass in a ``jmespath.Options`` object. + +Below are a few examples: + +.. code:: python + + import jmespath + from jmespath import functions + + # 1. Create a subclass of functions.Functions. + # The function.Functions base class has logic + # that introspects all of its methods and automatically + # registers your custom functions in its function table. + class CustomFunctions(functions.Functions): + + # 2 and 3. Create a function that starts with _func_ + # and decorate it with @signature which indicates its + # expected types. + # In this example, we're creating a jmespath function + # called "unique_letters" that accepts a single argument + # with an expected type "string". + @functions.signature({'types': ['string']}) + def _func_unique_letters(self, s): + # Given a string s, return a sorted + # string of unique letters: 'ccbbadd' -> 'abcd' + return ''.join(sorted(set(s))) + + # Here's another example. This is creating + # a jmespath function called "my_add" that expects + # two arguments, both of which should be of type number. + @functions.signature({'types': ['number']}, {'types': ['number']}) + def _func_my_add(self, x, y): + return x + y + + # 4. Provide an instance of your subclass in a Options object. + options = jmespath.Options(custom_functions=CustomFunctions()) + + # Provide this value to jmespath.search: + # This will print 3 + print( + jmespath.search( + 'my_add(`1`, `2`)', {}, options=options) + ) + + # This will print "abcd" + print( + jmespath.search( + 'foo.bar | unique_letters(@)', + {'foo': {'bar': 'ccbbadd'}}, + options=options) + ) + +Again, if you come up with useful functions that you think make +sense in the JMESPath language (and make sense to implement in all +JMESPath libraries, not just python), please let us know at +`jmespath.site <https://github.com/jmespath/jmespath.site/issues>`__. + + Specification ============= diff --git a/jmespath/compat.py b/jmespath/compat.py index 7b70adb..2ed0fe7 100644 --- a/jmespath/compat.py +++ b/jmespath/compat.py @@ -3,6 +3,15 @@ import inspect PY2 = sys.version_info[0] == 2 + +def with_metaclass(meta, *bases): + # Taken from flask/six. + class metaclass(meta): + def __new__(cls, name, this_bases, d): + return meta(name, bases, d) + return type.__new__(metaclass, 'temporary_class', (), {}) + + if PY2: text_type = unicode string_type = basestring diff --git a/jmespath/functions.py b/jmespath/functions.py index e306f7b..a7c7bd9 100644 --- a/jmespath/functions.py +++ b/jmespath/functions.py @@ -1,10 +1,9 @@ import math import json -import weakref from jmespath import exceptions from jmespath.compat import string_type as STRING_TYPE -from jmespath.compat import get_methods +from jmespath.compat import get_methods, with_metaclass # python types -> jmespath types @@ -35,48 +34,39 @@ REVERSE_TYPES_MAP = { } -def populate_function_table(cls): - func_table = cls.FUNCTION_TABLE - for name, method in get_methods(cls): - signature = getattr(method, 'signature', None) - if signature is not None: - func_table[name[6:]] = {"function": method, - "signature": signature} - return cls - - -def builtin_function(*arguments): - def _record_arity(func): +def signature(*arguments): + def _record_signature(func): func.signature = arguments return func - return _record_arity + return _record_signature -@populate_function_table -class RuntimeFunctions(object): - # The built in functions are automatically populated in the FUNCTION_TABLE - # using the @builtin_function decorator on methods defined in this class. +class FunctionRegistry(type): + def __init__(cls, name, bases, attrs): + cls._populate_function_table() + super(FunctionRegistry, cls).__init__(name, bases, attrs) - FUNCTION_TABLE = { - } + def _populate_function_table(cls): + function_table = getattr(cls, 'FUNCTION_TABLE', {}) + # Any method with a @signature decorator that also + # starts with "_func_" is registered as a function. + # _func_max_by -> max_by function. + for name, method in get_methods(cls): + if not name.startswith('_func_'): + continue + signature = getattr(method, 'signature', None) + if signature is not None: + function_table[name[6:]] = { + 'function': method, + 'signature': signature, + } + cls.FUNCTION_TABLE = function_table - def __init__(self): - self._interpreter = None - @property - def interpreter(self): - if self._interpreter is None: - return None - else: - return self._interpreter() +class Functions(with_metaclass(FunctionRegistry, object)): - @interpreter.setter - def interpreter(self, value): - # A weakref is used because we have - # a cyclic reference and we want to allow - # for the memory to be properly freed when - # the objects are no longer needed. - self._interpreter = weakref.ref(value) + FUNCTION_TABLE = { + } def call_function(self, function_name, resolved_args): try: @@ -170,28 +160,28 @@ class RuntimeFunctions(object): raise exceptions.JMESPathTypeError( function_name, element, actual_typename, types) - @builtin_function({'types': ['number']}) + @signature({'types': ['number']}) def _func_abs(self, arg): return abs(arg) - @builtin_function({'types': ['array-number']}) + @signature({'types': ['array-number']}) def _func_avg(self, arg): return sum(arg) / float(len(arg)) - @builtin_function({'types': [], 'variadic': True}) + @signature({'types': [], 'variadic': True}) def _func_not_null(self, *arguments): for argument in arguments: if argument is not None: return argument - @builtin_function({'types': []}) + @signature({'types': []}) def _func_to_array(self, arg): if isinstance(arg, list): return arg else: return [arg] - @builtin_function({'types': []}) + @signature({'types': []}) def _func_to_string(self, arg): if isinstance(arg, STRING_TYPE): return arg @@ -199,7 +189,7 @@ class RuntimeFunctions(object): return json.dumps(arg, separators=(',', ':'), default=str) - @builtin_function({'types': []}) + @signature({'types': []}) def _func_to_number(self, arg): if isinstance(arg, (list, dict, bool)): return None @@ -216,88 +206,88 @@ class RuntimeFunctions(object): except ValueError: return None - @builtin_function({'types': ['array', 'string']}, {'types': []}) + @signature({'types': ['array', 'string']}, {'types': []}) def _func_contains(self, subject, search): return search in subject - @builtin_function({'types': ['string', 'array', 'object']}) + @signature({'types': ['string', 'array', 'object']}) def _func_length(self, arg): return len(arg) - @builtin_function({'types': ['string']}, {'types': ['string']}) + @signature({'types': ['string']}, {'types': ['string']}) def _func_ends_with(self, search, suffix): return search.endswith(suffix) - @builtin_function({'types': ['string']}, {'types': ['string']}) + @signature({'types': ['string']}, {'types': ['string']}) def _func_starts_with(self, search, suffix): return search.startswith(suffix) - @builtin_function({'types': ['array', 'string']}) + @signature({'types': ['array', 'string']}) def _func_reverse(self, arg): if isinstance(arg, STRING_TYPE): return arg[::-1] else: return list(reversed(arg)) - @builtin_function({"types": ['number']}) + @signature({"types": ['number']}) def _func_ceil(self, arg): return math.ceil(arg) - @builtin_function({"types": ['number']}) + @signature({"types": ['number']}) def _func_floor(self, arg): return math.floor(arg) - @builtin_function({"types": ['string']}, {"types": ['array-string']}) + @signature({"types": ['string']}, {"types": ['array-string']}) def _func_join(self, separator, array): return separator.join(array) - @builtin_function({'types': ['expref']}, {'types': ['array']}) + @signature({'types': ['expref']}, {'types': ['array']}) def _func_map(self, expref, arg): result = [] for element in arg: - result.append(self.interpreter.visit(expref.expression, element)) + result.append(expref.visit(expref.expression, element)) return result - @builtin_function({"types": ['array-number', 'array-string']}) + @signature({"types": ['array-number', 'array-string']}) def _func_max(self, arg): if arg: return max(arg) else: return None - @builtin_function({"types": ["object"], "variadic": True}) + @signature({"types": ["object"], "variadic": True}) def _func_merge(self, *arguments): merged = {} for arg in arguments: merged.update(arg) return merged - @builtin_function({"types": ['array-number', 'array-string']}) + @signature({"types": ['array-number', 'array-string']}) def _func_min(self, arg): if arg: return min(arg) else: return None - @builtin_function({"types": ['array-string', 'array-number']}) + @signature({"types": ['array-string', 'array-number']}) def _func_sort(self, arg): return list(sorted(arg)) - @builtin_function({"types": ['array-number']}) + @signature({"types": ['array-number']}) def _func_sum(self, arg): return sum(arg) - @builtin_function({"types": ['object']}) + @signature({"types": ['object']}) def _func_keys(self, arg): # To be consistent with .values() # should we also return the indices of a list? return list(arg.keys()) - @builtin_function({"types": ['object']}) + @signature({"types": ['object']}) def _func_values(self, arg): return list(arg.values()) - @builtin_function({'types': []}) + @signature({'types': []}) def _func_type(self, arg): if isinstance(arg, STRING_TYPE): return "string" @@ -312,7 +302,7 @@ class RuntimeFunctions(object): elif arg is None: return "null" - @builtin_function({'types': ['array']}, {'types': ['expref']}) + @signature({'types': ['array']}, {'types': ['expref']}) def _func_sort_by(self, array, expref): if not array: return array @@ -323,34 +313,32 @@ class RuntimeFunctions(object): # that validates that type, which requires that remaining array # elements resolve to the same type as the first element. required_type = self._convert_to_jmespath_type( - type(self.interpreter.visit(expref.expression, array[0])).__name__) + type(expref.visit(expref.expression, array[0])).__name__) if required_type not in ['number', 'string']: raise exceptions.JMESPathTypeError( 'sort_by', array[0], required_type, ['string', 'number']) - keyfunc = self._create_key_func(expref.expression, + keyfunc = self._create_key_func(expref, [required_type], 'sort_by') return list(sorted(array, key=keyfunc)) - @builtin_function({'types': ['array']}, {'types': ['expref']}) + @signature({'types': ['array']}, {'types': ['expref']}) def _func_min_by(self, array, expref): - keyfunc = self._create_key_func(expref.expression, + keyfunc = self._create_key_func(expref, ['number', 'string'], 'min_by') return min(array, key=keyfunc) - @builtin_function({'types': ['array']}, {'types': ['expref']}) + @signature({'types': ['array']}, {'types': ['expref']}) def _func_max_by(self, array, expref): - keyfunc = self._create_key_func(expref.expression, + keyfunc = self._create_key_func(expref, ['number', 'string'], 'min_by') return max(array, key=keyfunc) - def _create_key_func(self, expr_node, allowed_types, function_name): - interpreter = self.interpreter - + def _create_key_func(self, expref, allowed_types, function_name): def keyfunc(x): - result = interpreter.visit(expr_node, x) + result = expref.visit(expref.expression, x) actual_typename = type(result).__name__ jmespath_type = self._convert_to_jmespath_type(actual_typename) # allowed_types is in term of jmespath types, not python types. diff --git a/jmespath/visitor.py b/jmespath/visitor.py index bbb5a8e..8a94a7e 100644 --- a/jmespath/visitor.py +++ b/jmespath/visitor.py @@ -35,7 +35,7 @@ def _is_special_integer_case(x, y): class Options(object): """Options to control how a JMESPath function is evaluated.""" - def __init__(self, dict_cls): + def __init__(self, dict_cls=None, custom_functions=None): #: The class to use when creating a dict. The interpreter # may create dictionaries during the evalution of a JMESPath # expression. For example, a multi-select hash will @@ -45,11 +45,16 @@ class Options(object): # want to set a collections.OrderedDict so that you can # have predictible key ordering. self.dict_cls = dict_cls + self.custom_functions = custom_functions class _Expression(object): - def __init__(self, expression): + def __init__(self, expression, interpreter): self.expression = expression + self.interpreter = interpreter + + def visit(self, node, *args, **kwargs): + return self.interpreter.visit(node, *args, **kwargs) class Visitor(object): @@ -83,15 +88,16 @@ class TreeInterpreter(Visitor): def __init__(self, options=None): super(TreeInterpreter, self).__init__() - self._options = options self._dict_cls = self.MAP_TYPE - if options is not None and options.dict_cls is not None: + if options is None: + options = Options() + self._options = options + if options.dict_cls is not None: self._dict_cls = self._options.dict_cls - self._functions = functions.RuntimeFunctions() - # Note that .interpreter is a property that uses - # a weakref so that the cyclic reference can be - # properly freed. - self._functions.interpreter = self + if options.custom_functions is not None: + self._functions = self._options.custom_functions + else: + self._functions = functions.Functions() def default_visit(self, node, *args, **kwargs): raise NotImplementedError(node['type']) @@ -119,7 +125,7 @@ class TreeInterpreter(Visitor): return value def visit_expref(self, node, value): - return _Expression(node['children'][0]) + return _Expression(node['children'][0], self) def visit_function_expression(self, node, value): resolved_args = []
Custom functions What are your thoughts about adding a method to register custom functions directly into `RuntimeFunctions` class in `functions.py`? JMESPath is almost good enough to use as a domain specific language for general language transforming objects. You can sneak in literals in the multi-select hash. You can filter for values, and transform them to booleans using `<=`, `==` operators. There's some support for making sure values are numbers. However I don't see anyway to do something like "if value is x, return `y`" where you show a literal if a condition matches. There's no way to convert conditions to an arbitrary literal - if a value in a multi-select hash is going to be a literal, it has to be of the same value no matter what. I can see a possible workaround if custom functions on JMESPath. E.g. if I implement the "if" function for use, I can do something like: search("if(bar==`1`, `hello`, `world`)", {'bar': '1'}) This would return the literal `hello` if the `bar` key is 1, otherwise it returns `world`. The only issue is the current python implementation means its going to be hacky to do this. You have to override multiple classes, in `functions.py` and override the `TreeInterpreter` and the `ParsedResult` classes as well. I think if custom functions were desired, it would be much more elegant if there is a method to register them directly into the `RuntimeFunctions` in `functions.py`, rather than either forcing a fork or overriding a litany of classes. What do you think?
jmespath/jmespath.py
diff --git a/tests/test_search.py b/tests/test_search.py index 56a0a75..71ab3dc 100644 --- a/tests/test_search.py +++ b/tests/test_search.py @@ -1,6 +1,7 @@ from tests import unittest, OrderedDict import jmespath +import jmespath.functions class TestSearchOptions(unittest.TestCase): @@ -10,3 +11,28 @@ class TestSearchOptions(unittest.TestCase): {'c': 'c', 'b': 'b', 'a': 'a', 'd': 'd'}, options=jmespath.Options(dict_cls=OrderedDict)) self.assertEqual(result, ['a', 'b', 'c']) + + def test_can_provide_custom_functions(self): + class CustomFunctions(jmespath.functions.Functions): + @jmespath.functions.signature( + {'types': ['number']}, + {'types': ['number']}) + def _func_custom_add(self, x, y): + return x + y + + @jmespath.functions.signature( + {'types': ['number']}, + {'types': ['number']}) + def _func_my_subtract(self, x, y): + return x - y + + + options = jmespath.Options(custom_functions=CustomFunctions()) + self.assertEqual( + jmespath.search('custom_add(`1`, `2`)', {}, options=options), + 3 + ) + self.assertEqual( + jmespath.search('my_subtract(`10`, `3`)', {}, options=options), + 7 + )
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 2, "test_score": 0 }, "num_modified_files": 4 }
0.9
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": null, "python": "3.5", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==22.2.0 certifi==2021.5.30 distlib==0.3.9 filelock==3.4.1 importlib-metadata==4.8.3 importlib-resources==5.4.0 iniconfig==1.1.1 -e git+https://github.com/jmespath/jmespath.py.git@a6baa176714c72a68a2b367810b91162b1125f41#egg=jmespath nose==1.2.1 packaging==21.3 platformdirs==2.4.0 pluggy==1.0.0 py==1.11.0 pyparsing==3.1.4 pytest==7.0.1 tomli==1.2.3 tox==1.4.2 typing_extensions==4.1.1 virtualenv==20.17.1 zipp==3.6.0
name: jmespath.py channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==22.2.0 - distlib==0.3.9 - filelock==3.4.1 - importlib-metadata==4.8.3 - importlib-resources==5.4.0 - iniconfig==1.1.1 - nose==1.2.1 - packaging==21.3 - platformdirs==2.4.0 - pluggy==1.0.0 - py==1.11.0 - pyparsing==3.1.4 - pytest==7.0.1 - tomli==1.2.3 - tox==1.4.2 - typing-extensions==4.1.1 - virtualenv==20.17.1 - wheel==0.24.0 - zipp==3.6.0 prefix: /opt/conda/envs/jmespath.py
[ "tests/test_search.py::TestSearchOptions::test_can_provide_custom_functions" ]
[]
[ "tests/test_search.py::TestSearchOptions::test_can_provide_dict_cls" ]
[]
MIT License
450
rbarrois__python-semanticversion-38
d10ab4cb1d1ca8090132f205017dfc90473c91cc
2016-02-23 22:25:50
d10ab4cb1d1ca8090132f205017dfc90473c91cc
diff --git a/CREDITS b/CREDITS index 53fdef1..ca9a781 100644 --- a/CREDITS +++ b/CREDITS @@ -23,6 +23,7 @@ The project has received contributions from (in alphabetical order): * Michael Hrivnak <[email protected]> (https://github.com/mhrivnak) * William Minchin <[email protected]> (https://github.com/minchinweb) * Dave Hall <[email protected]> (https://github.com/skwashd) +* Martin Ek <[email protected]> (https://github.com/ekmartin) Contributor license agreement diff --git a/semantic_version/base.py b/semantic_version/base.py index 1504642..83a9c25 100644 --- a/semantic_version/base.py +++ b/semantic_version/base.py @@ -405,6 +405,7 @@ class SpecItem(object): KIND_NEQ = '!=' KIND_CARET = '^' KIND_TILDE = '~' + KIND_COMPATIBLE = '~=' # Map a kind alias to its full version KIND_ALIASES = { @@ -412,7 +413,7 @@ class SpecItem(object): KIND_EMPTY: KIND_EQUAL, } - re_spec = re.compile(r'^(<|<=||=|==|>=|>|!=|\^|~)(\d.*)$') + re_spec = re.compile(r'^(<|<=||=|==|>=|>|!=|\^|~|~=)(\d.*)$') def __init__(self, requirement_string): kind, spec = self.parse(requirement_string) @@ -468,6 +469,12 @@ class SpecItem(object): return self.spec <= version < upper elif self.kind == self.KIND_TILDE: return self.spec <= version < self.spec.next_minor() + elif self.kind == self.KIND_COMPATIBLE: + if self.spec.patch: + upper = self.spec.next_minor() + else: + upper = self.spec.next_major() + return self.spec <= version < upper else: # pragma: no cover raise ValueError('Unexpected match kind: %r' % self.kind)
Support compatible release clauses (~=) Hi! Here's an example: ```python Spec('~=1.5.6') # results in a ValueError ``` https://www.python.org/dev/peps/pep-0440/#compatible-release
rbarrois/python-semanticversion
diff --git a/tests/test_base.py b/tests/test_base.py index 24bf86e..0675b24 100755 --- a/tests/test_base.py +++ b/tests/test_base.py @@ -514,6 +514,14 @@ class SpecItemTestCase(unittest.TestCase): ['0.0.2', '0.0.2-alpha', '0.0.2+abb'], ['0.1.0', '0.0.3', '1.0.0'], ), + '~=1.4.5': ( + ['1.4.5', '1.4.10-alpha', '1.4.10'], + ['1.3.6', '1.4.4', '1.5.0'], + ), + '~=1.4': ( + ['1.4.0', '1.6.10-alpha', '1.6.10'], + ['1.3.0', '2.0.0'], + ), } def test_matches(self): diff --git a/tests/test_match.py b/tests/test_match.py index 49464f8..4d1a96f 100755 --- a/tests/test_match.py +++ b/tests/test_match.py @@ -31,6 +31,7 @@ class MatchTestCase(unittest.TestCase): '!=0.1.2-rc1.3-14.15+build.2012-01-01.11h34', '^0.1.2', '~0.1.2', + '~=0.1.2', ] matches = { @@ -113,6 +114,16 @@ class MatchTestCase(unittest.TestCase): '0.1.2+build4.5', '0.1.3-rc1.3', ], + '~=1.4.5': ( + '1.4.5', + '1.4.10-alpha', + '1.4.10', + ), + '~=1.4': [ + '1.4.0', + '1.6.10-alpha', + '1.6.10', + ], } def test_invalid(self):
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_hyperlinks", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 0, "test_score": 0 }, "num_modified_files": 2 }
2.5
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": null, "python": "3.5", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==22.2.0 certifi==2021.5.30 importlib-metadata==4.8.3 iniconfig==1.1.1 packaging==21.3 pluggy==1.0.0 py==1.11.0 pyparsing==3.1.4 pytest==7.0.1 -e git+https://github.com/rbarrois/python-semanticversion.git@d10ab4cb1d1ca8090132f205017dfc90473c91cc#egg=semantic_version tomli==1.2.3 typing_extensions==4.1.1 zipp==3.6.0
name: python-semanticversion channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==22.2.0 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - packaging==21.3 - pluggy==1.0.0 - py==1.11.0 - pyparsing==3.1.4 - pytest==7.0.1 - tomli==1.2.3 - typing-extensions==4.1.1 - zipp==3.6.0 prefix: /opt/conda/envs/python-semanticversion
[ "tests/test_base.py::SpecItemTestCase::test_matches", "tests/test_match.py::MatchTestCase::test_match", "tests/test_match.py::MatchTestCase::test_simple" ]
[]
[ "tests/test_base.py::ComparisonTestCase::test_identifier_cmp", "tests/test_base.py::ComparisonTestCase::test_identifier_list_cmp", "tests/test_base.py::TopLevelTestCase::test_compare", "tests/test_base.py::TopLevelTestCase::test_match", "tests/test_base.py::TopLevelTestCase::test_validate_invalid", "tests/test_base.py::TopLevelTestCase::test_validate_valid", "tests/test_base.py::VersionTestCase::test_bump_clean_versions", "tests/test_base.py::VersionTestCase::test_bump_prerelease_versions", "tests/test_base.py::VersionTestCase::test_compare_partial_to_self", "tests/test_base.py::VersionTestCase::test_compare_to_self", "tests/test_base.py::VersionTestCase::test_hash", "tests/test_base.py::VersionTestCase::test_invalid_comparisons", "tests/test_base.py::VersionTestCase::test_parsing", "tests/test_base.py::VersionTestCase::test_parsing_partials", "tests/test_base.py::VersionTestCase::test_str", "tests/test_base.py::VersionTestCase::test_str_partials", "tests/test_base.py::SpecItemTestCase::test_components", "tests/test_base.py::SpecItemTestCase::test_equality", "tests/test_base.py::SpecItemTestCase::test_hash", "tests/test_base.py::SpecItemTestCase::test_invalids", "tests/test_base.py::SpecItemTestCase::test_to_string", "tests/test_base.py::CoerceTestCase::test_coerce", "tests/test_base.py::CoerceTestCase::test_invalid", "tests/test_base.py::SpecTestCase::test_contains", "tests/test_base.py::SpecTestCase::test_equality", "tests/test_base.py::SpecTestCase::test_filter_compatible", "tests/test_base.py::SpecTestCase::test_filter_empty", "tests/test_base.py::SpecTestCase::test_filter_incompatible", "tests/test_base.py::SpecTestCase::test_hash", "tests/test_base.py::SpecTestCase::test_matches", "tests/test_base.py::SpecTestCase::test_parsing", "tests/test_base.py::SpecTestCase::test_parsing_split", "tests/test_base.py::SpecTestCase::test_select_compatible", "tests/test_base.py::SpecTestCase::test_select_empty", "tests/test_base.py::SpecTestCase::test_select_incompatible", "tests/test_match.py::MatchTestCase::test_build_check", "tests/test_match.py::MatchTestCase::test_contains", "tests/test_match.py::MatchTestCase::test_invalid", "tests/test_match.py::MatchTestCase::test_prerelease_check" ]
[]
BSD 2-Clause "Simplified" License
451
scieloorg__xylose-96
df12890d7e4d8d986f33844513b9d4f68a148fda
2016-02-25 18:06:10
df12890d7e4d8d986f33844513b9d4f68a148fda
fabiobatalha: Puesdes escribir un testcase para eso. swarzesherz: Actualizado con testcase fabiobatalha: Desculpe não ter comentado antes. Acho que seria legal também incluir no retorno do Xylose a version ISO do país, quando existir e for válida. swarzesherz: Actualizado, agregue affdict['country_iso_3166'] = aff['p']
diff --git a/xylose/scielodocument.py b/xylose/scielodocument.py index 0b42e3e..c71e5b2 100644 --- a/xylose/scielodocument.py +++ b/xylose/scielodocument.py @@ -1309,6 +1309,7 @@ class Article(object): continue normalized[aff['index']]['normalized'] = True normalized[aff['index']]['country'] = aff.get('country', '') + normalized[aff['index']]['country_iso_3166'] = aff.get('country_iso_3166', '') normalized[aff['index']]['institution'] = aff.get('institution', '') normalized[aff['index']]['state'] = aff.get('state', '') @@ -1335,8 +1336,7 @@ class Article(object): if 'p' in aff and aff['p'] in choices.ISO_3166: affdict['country'] = choices.ISO_3166[aff['p']] - if aff['p'] in choices.ISO_3166: - affdict['country_iso_3166'] = aff['p'] + affdict['country_iso_3166'] = aff['p'] if 's' in aff: affdict['state'] = aff['s'] @@ -1369,6 +1369,9 @@ class Article(object): affdict['state'] = html_decode(aff['s']) if 'p' in aff: affdict['country'] = html_decode(aff['p']) + if 'p' in aff and 'q' in aff and aff['p'] in choices.ISO_3166: + affdict['country'] = choices.ISO_3166[aff['p']] + affdict['country_iso_3166'] = aff['p'] if 'e' in aff: affdict['email'] = html_decode(aff['e']) if 'd' in aff:
Asignación incorrecta de country en afiliaciones no normalizadas (v70) En versiones de PC-Programs anteriores a: https://github.com/scieloorg/PC-Programs/commit/5e494a031cabb9d718970a6201f3ee6c9847b942 se realizaba la asignación del campo ```p```de la siguiente forma: ``` a['p'] = item.country if item.i_country is None else item.i_country a['q'] = item.country if item.i_country is not None else None ``` Por lo que aunque el campo no este normalizado se asignaba el valor correspondiente al código ISO_3166, lo cual trae problemas en aplicaciones como articles_meta en donde encontramos combinados datos de códigos ISO_3166 con nombres del país: http://articlemeta.scielo.org/api/v1/article/?code=S1665-70632015000300102&format=xmlwos ![screen shot 2016-02-25 at 12 02 39](https://cloud.githubusercontent.com/assets/197827/13329058/906141b6-dbb7-11e5-8428-225d4bd89945.png)
scieloorg/xylose
diff --git a/tests/test_document.py b/tests/test_document.py index 758d7b8..6e6b1d6 100644 --- a/tests/test_document.py +++ b/tests/test_document.py @@ -1751,6 +1751,13 @@ class ArticleTests(unittest.TestCase): u"p": u"US", u"s": u"São Paulo", u"_": u"University of Florida Not Normalized" + }, + { + u"i": u"A04", + u"q": u"Mexico", + u"p": u"MX", + u"s": u"Yucatán", + u"_": u"Secretaría de Salud de Yucatán" } ] @@ -1758,13 +1765,15 @@ class ArticleTests(unittest.TestCase): result_index = u''.join([i['index'] for i in sorted(amc, key=lambda k: k['index'])]) result_country = u''.join([i['country'] for i in sorted(amc, key=lambda k: k['index'])]) + result_country_iso = u''.join([i['country_iso_3166'] for i in sorted(amc, key=lambda k: k['index']) if 'country_iso_3166' in i]) result_status = u''.join([str(i['normalized']) for i in sorted(amc, key=lambda k: k['index'])]) result_state = u''.join([i['state'] for i in sorted(amc, key=lambda k: k['index'])]) - self.assertEqual(result_index, u'A01A02A03') - self.assertEqual(result_country, u'BrazilBrazilUS') - self.assertEqual(result_status, u'TrueTrueFalse') - self.assertEqual(result_state, u'São PauloSão Paulo') + self.assertEqual(result_index, u'A01A02A03A04') + self.assertEqual(result_country, u'BrazilBrazilUSMexico') + self.assertEqual(result_country_iso, u'BRBRMX') + self.assertEqual(result_status, u'TrueTrueFalseFalse') + self.assertEqual(result_state, u'São PauloSão PauloYucatán') def test_without_normalized_affiliations(self): article = self.article @@ -1992,6 +2001,41 @@ class ArticleTests(unittest.TestCase): self.assertEqual(article.affiliations, expected) + def test_affiliation_with_country_iso_3166(self): + + article = self.article + + del(article.data['article']['v70']) + + article.data['article']['v70'] = [ + { + u"1": u"Escuela Nacional de Enfermería y Obstetricia", + u"2": u"División de Estudios de Posgrado e Investigación", + u"q": u"Mexico", + u"c": u"México", + u"i": u"A01", + u"l": u"a", + u"p": u"MX", + u"s": u"D.F.", + u"_": u"Universidad Nacional Autónoma de México" + } + ] + + expected = [ + { + 'index': u'A01', + 'city': u'México', + 'state': u'D.F.', + 'country': u'Mexico', + 'country_iso_3166': u'MX', + 'orgdiv1': u'Escuela Nacional de Enfermería y Obstetricia', + 'orgdiv2': u'División de Estudios de Posgrado e Investigación', + 'institution': u'Universidad Nacional Autónoma de México' + } + ] + + self.assertEqual(article.affiliations, expected) + def test_without_scielo_domain(self): article = self.article
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_hyperlinks", "has_media" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 2, "test_score": 0 }, "num_modified_files": 1 }
1.0
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "nose", "coverage", "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.9", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
coverage==7.8.0 exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work nose==1.3.7 packaging @ file:///croot/packaging_1734472117206/work pluggy @ file:///croot/pluggy_1733169602837/work pytest @ file:///croot/pytest_1738938843180/work tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work -e git+https://github.com/scieloorg/xylose.git@df12890d7e4d8d986f33844513b9d4f68a148fda#egg=xylose
name: xylose channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - exceptiongroup=1.2.0=py39h06a4308_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - packaging=24.2=py39h06a4308_0 - pip=25.0=py39h06a4308_0 - pluggy=1.5.0=py39h06a4308_0 - pytest=8.3.4=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tomli=2.0.1=py39h06a4308_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - coverage==7.8.0 - nose==1.3.7 prefix: /opt/conda/envs/xylose
[ "tests/test_document.py::ArticleTests::test_affiliation_with_country_iso_3166", "tests/test_document.py::ArticleTests::test_mixed_affiliations_1" ]
[]
[ "tests/test_document.py::ToolsTests::test_get_date_wrong_day", "tests/test_document.py::ToolsTests::test_get_date_wrong_day_month", "tests/test_document.py::ToolsTests::test_get_date_wrong_day_month_not_int", "tests/test_document.py::ToolsTests::test_get_date_wrong_day_not_int", "tests/test_document.py::ToolsTests::test_get_date_wrong_month_not_int", "tests/test_document.py::ToolsTests::test_get_date_year", "tests/test_document.py::ToolsTests::test_get_date_year_day", "tests/test_document.py::ToolsTests::test_get_date_year_month", "tests/test_document.py::ToolsTests::test_get_date_year_month_day", "tests/test_document.py::ToolsTests::test_get_date_year_month_day_31", "tests/test_document.py::ToolsTests::test_get_language_iso639_1_defined", "tests/test_document.py::ToolsTests::test_get_language_iso639_1_undefined", "tests/test_document.py::ToolsTests::test_get_language_iso639_2_defined", "tests/test_document.py::ToolsTests::test_get_language_iso639_2_undefined", "tests/test_document.py::ToolsTests::test_get_language_without_iso_format", "tests/test_document.py::IssueTests::test_collection_acronym", "tests/test_document.py::IssueTests::test_is_ahead", "tests/test_document.py::IssueTests::test_is_ahead_1", "tests/test_document.py::IssueTests::test_issue", "tests/test_document.py::IssueTests::test_issue_label_field_v4", "tests/test_document.py::IssueTests::test_issue_label_without_field_v4", "tests/test_document.py::IssueTests::test_issue_url", "tests/test_document.py::IssueTests::test_order", "tests/test_document.py::IssueTests::test_supplement_number", "tests/test_document.py::IssueTests::test_supplement_volume", "tests/test_document.py::IssueTests::test_volume", "tests/test_document.py::IssueTests::test_without_issue", "tests/test_document.py::IssueTests::test_without_suplement_number", "tests/test_document.py::IssueTests::test_without_supplement_volume", "tests/test_document.py::IssueTests::test_without_volume", "tests/test_document.py::JournalTests::test_any_issn_priority_electronic", "tests/test_document.py::JournalTests::test_any_issn_priority_electronic_without_electronic", "tests/test_document.py::JournalTests::test_any_issn_priority_print", "tests/test_document.py::JournalTests::test_any_issn_priority_print_without_print", "tests/test_document.py::JournalTests::test_collection_acronym", "tests/test_document.py::JournalTests::test_creation_date", "tests/test_document.py::JournalTests::test_current_status", "tests/test_document.py::JournalTests::test_current_status_lots_of_changes_study_case_1", "tests/test_document.py::JournalTests::test_current_status_some_changes", "tests/test_document.py::JournalTests::test_current_without_v51", "tests/test_document.py::JournalTests::test_journal", "tests/test_document.py::JournalTests::test_journal_abbreviated_title", "tests/test_document.py::JournalTests::test_journal_acronym", "tests/test_document.py::JournalTests::test_journal_fulltitle", "tests/test_document.py::JournalTests::test_journal_fulltitle_without_subtitle", "tests/test_document.py::JournalTests::test_journal_fulltitle_without_title", "tests/test_document.py::JournalTests::test_journal_subtitle", "tests/test_document.py::JournalTests::test_journal_title", "tests/test_document.py::JournalTests::test_journal_title_nlm", "tests/test_document.py::JournalTests::test_journal_url", "tests/test_document.py::JournalTests::test_journal_without_subtitle", "tests/test_document.py::JournalTests::test_languages", "tests/test_document.py::JournalTests::test_languages_without_v350", "tests/test_document.py::JournalTests::test_load_issn_with_v435", "tests/test_document.py::JournalTests::test_load_issn_with_v935_and_v35_ONLINE", "tests/test_document.py::JournalTests::test_load_issn_with_v935_and_v35_PRINT", "tests/test_document.py::JournalTests::test_load_issn_with_v935_equal_v400_and_v35_ONLINE", "tests/test_document.py::JournalTests::test_load_issn_with_v935_equal_v400_and_v35_PRINT", "tests/test_document.py::JournalTests::test_load_issn_with_v935_without_v35", "tests/test_document.py::JournalTests::test_load_issn_without_v935_and_v35_ONLINE", "tests/test_document.py::JournalTests::test_load_issn_without_v935_and_v35_PRINT", "tests/test_document.py::JournalTests::test_load_issn_without_v935_without_v35", "tests/test_document.py::JournalTests::test_periodicity", "tests/test_document.py::JournalTests::test_periodicity_in_months", "tests/test_document.py::JournalTests::test_periodicity_in_months_out_of_choices", "tests/test_document.py::JournalTests::test_periodicity_out_of_choices", "tests/test_document.py::JournalTests::test_permission_id", "tests/test_document.py::JournalTests::test_permission_t0", "tests/test_document.py::JournalTests::test_permission_t1", "tests/test_document.py::JournalTests::test_permission_t2", "tests/test_document.py::JournalTests::test_permission_t3", "tests/test_document.py::JournalTests::test_permission_t4", "tests/test_document.py::JournalTests::test_permission_text", "tests/test_document.py::JournalTests::test_permission_url", "tests/test_document.py::JournalTests::test_permission_without_v540", "tests/test_document.py::JournalTests::test_permission_without_v540_t", "tests/test_document.py::JournalTests::test_publisher_loc", "tests/test_document.py::JournalTests::test_publisher_name", "tests/test_document.py::JournalTests::test_scielo_issn", "tests/test_document.py::JournalTests::test_status", "tests/test_document.py::JournalTests::test_status_lots_of_changes", "tests/test_document.py::JournalTests::test_status_lots_of_changes_study_case_1", "tests/test_document.py::JournalTests::test_status_lots_of_changes_with_reason", "tests/test_document.py::JournalTests::test_status_some_changes", "tests/test_document.py::JournalTests::test_status_without_v51", "tests/test_document.py::JournalTests::test_subject_areas", "tests/test_document.py::JournalTests::test_update_date", "tests/test_document.py::JournalTests::test_without_journal_abbreviated_title", "tests/test_document.py::JournalTests::test_without_journal_acronym", "tests/test_document.py::JournalTests::test_without_journal_title", "tests/test_document.py::JournalTests::test_without_journal_title_nlm", "tests/test_document.py::JournalTests::test_without_journal_url", "tests/test_document.py::JournalTests::test_without_periodicity", "tests/test_document.py::JournalTests::test_without_periodicity_in_months", "tests/test_document.py::JournalTests::test_without_publisher_loc", "tests/test_document.py::JournalTests::test_without_publisher_name", "tests/test_document.py::JournalTests::test_without_scielo_domain", "tests/test_document.py::JournalTests::test_without_scielo_domain_title_v690", "tests/test_document.py::JournalTests::test_without_subject_areas", "tests/test_document.py::JournalTests::test_without_wos_citation_indexes", "tests/test_document.py::JournalTests::test_without_wos_subject_areas", "tests/test_document.py::JournalTests::test_wos_citation_indexes", "tests/test_document.py::JournalTests::test_wos_subject_areas", "tests/test_document.py::ArticleTests::test_acceptance_date", "tests/test_document.py::ArticleTests::test_affiliation_just_with_affiliation_name", "tests/test_document.py::ArticleTests::test_affiliation_without_affiliation_name", "tests/test_document.py::ArticleTests::test_affiliations", "tests/test_document.py::ArticleTests::test_ahead_publication_date", "tests/test_document.py::ArticleTests::test_article", "tests/test_document.py::ArticleTests::test_author_with_two_affiliations", "tests/test_document.py::ArticleTests::test_author_with_two_role", "tests/test_document.py::ArticleTests::test_author_without_affiliations", "tests/test_document.py::ArticleTests::test_author_without_surname_and_given_names", "tests/test_document.py::ArticleTests::test_authors", "tests/test_document.py::ArticleTests::test_collection_acronym", "tests/test_document.py::ArticleTests::test_collection_acronym_priorizing_collection", "tests/test_document.py::ArticleTests::test_collection_acronym_retrieving_v992", "tests/test_document.py::ArticleTests::test_collection_name_brazil", "tests/test_document.py::ArticleTests::test_collection_name_undefined", "tests/test_document.py::ArticleTests::test_corporative_authors", "tests/test_document.py::ArticleTests::test_creation_date", "tests/test_document.py::ArticleTests::test_creation_date_1", "tests/test_document.py::ArticleTests::test_creation_date_2", "tests/test_document.py::ArticleTests::test_data_model_version_html", "tests/test_document.py::ArticleTests::test_data_model_version_html_1", "tests/test_document.py::ArticleTests::test_data_model_version_xml", "tests/test_document.py::ArticleTests::test_document_type", "tests/test_document.py::ArticleTests::test_doi", "tests/test_document.py::ArticleTests::test_doi_clean_1", "tests/test_document.py::ArticleTests::test_doi_clean_2", "tests/test_document.py::ArticleTests::test_doi_v237", "tests/test_document.py::ArticleTests::test_e_location", "tests/test_document.py::ArticleTests::test_end_page_loaded_crazy_legacy_way_1", "tests/test_document.py::ArticleTests::test_end_page_loaded_crazy_legacy_way_2", "tests/test_document.py::ArticleTests::test_end_page_loaded_through_xml", "tests/test_document.py::ArticleTests::test_file_code", "tests/test_document.py::ArticleTests::test_file_code_crazy_slashs_1", "tests/test_document.py::ArticleTests::test_file_code_crazy_slashs_2", "tests/test_document.py::ArticleTests::test_first_author", "tests/test_document.py::ArticleTests::test_first_author_without_author", "tests/test_document.py::ArticleTests::test_fulltexts_field_fulltexts", "tests/test_document.py::ArticleTests::test_fulltexts_without_field_fulltexts", "tests/test_document.py::ArticleTests::test_html_url", "tests/test_document.py::ArticleTests::test_invalid_document_type", "tests/test_document.py::ArticleTests::test_issue_url", "tests/test_document.py::ArticleTests::test_journal_abbreviated_title", "tests/test_document.py::ArticleTests::test_journal_acronym", "tests/test_document.py::ArticleTests::test_journal_title", "tests/test_document.py::ArticleTests::test_keywords", "tests/test_document.py::ArticleTests::test_keywords_iso639_2", "tests/test_document.py::ArticleTests::test_keywords_with_undefined_language", "tests/test_document.py::ArticleTests::test_keywords_without_subfield_k", "tests/test_document.py::ArticleTests::test_keywords_without_subfield_l", "tests/test_document.py::ArticleTests::test_languages_field_fulltexts", "tests/test_document.py::ArticleTests::test_languages_field_v40", "tests/test_document.py::ArticleTests::test_last_page", "tests/test_document.py::ArticleTests::test_normalized_affiliations", "tests/test_document.py::ArticleTests::test_normalized_affiliations_undefined_ISO_3166_CODE", "tests/test_document.py::ArticleTests::test_normalized_affiliations_without_p", "tests/test_document.py::ArticleTests::test_order", "tests/test_document.py::ArticleTests::test_original_abstract_with_just_one_language_defined", "tests/test_document.py::ArticleTests::test_original_abstract_with_language_defined", "tests/test_document.py::ArticleTests::test_original_abstract_with_language_defined_but_different_of_the_article_original_language", "tests/test_document.py::ArticleTests::test_original_abstract_without_language_defined", "tests/test_document.py::ArticleTests::test_original_html_field_body", "tests/test_document.py::ArticleTests::test_original_language_invalid_iso639_2", "tests/test_document.py::ArticleTests::test_original_language_iso639_2", "tests/test_document.py::ArticleTests::test_original_language_original", "tests/test_document.py::ArticleTests::test_original_section_field_v49", "tests/test_document.py::ArticleTests::test_original_title_subfield_t", "tests/test_document.py::ArticleTests::test_original_title_with_just_one_language_defined", "tests/test_document.py::ArticleTests::test_original_title_with_language_defined", "tests/test_document.py::ArticleTests::test_original_title_with_language_defined_but_different_of_the_article_original_language", "tests/test_document.py::ArticleTests::test_original_title_without_language_defined", "tests/test_document.py::ArticleTests::test_pdf_url", "tests/test_document.py::ArticleTests::test_processing_date", "tests/test_document.py::ArticleTests::test_processing_date_1", "tests/test_document.py::ArticleTests::test_project_name", "tests/test_document.py::ArticleTests::test_project_sponsors", "tests/test_document.py::ArticleTests::test_publication_contract", "tests/test_document.py::ArticleTests::test_publication_date", "tests/test_document.py::ArticleTests::test_publisher_id", "tests/test_document.py::ArticleTests::test_publisher_loc", "tests/test_document.py::ArticleTests::test_publisher_name", "tests/test_document.py::ArticleTests::test_receive_date", "tests/test_document.py::ArticleTests::test_review_date", "tests/test_document.py::ArticleTests::test_secion_code_field_v49", "tests/test_document.py::ArticleTests::test_section_code_nd_field_v49", "tests/test_document.py::ArticleTests::test_section_code_without_field_v49", "tests/test_document.py::ArticleTests::test_section_field_v49", "tests/test_document.py::ArticleTests::test_section_nd_field_v49", "tests/test_document.py::ArticleTests::test_section_without_field_v49", "tests/test_document.py::ArticleTests::test_start_page", "tests/test_document.py::ArticleTests::test_start_page_loaded_crazy_legacy_way_1", "tests/test_document.py::ArticleTests::test_start_page_loaded_crazy_legacy_way_2", "tests/test_document.py::ArticleTests::test_start_page_loaded_through_xml", "tests/test_document.py::ArticleTests::test_subject_areas", "tests/test_document.py::ArticleTests::test_thesis_degree", "tests/test_document.py::ArticleTests::test_thesis_organization", "tests/test_document.py::ArticleTests::test_thesis_organization_and_division", "tests/test_document.py::ArticleTests::test_thesis_organization_without_name", "tests/test_document.py::ArticleTests::test_translated_abstracts", "tests/test_document.py::ArticleTests::test_translated_abstracts_without_v83", "tests/test_document.py::ArticleTests::test_translated_abtracts_iso639_2", "tests/test_document.py::ArticleTests::test_translated_htmls_field_body", "tests/test_document.py::ArticleTests::test_translated_section_field_v49", "tests/test_document.py::ArticleTests::test_translated_titles", "tests/test_document.py::ArticleTests::test_translated_titles_iso639_2", "tests/test_document.py::ArticleTests::test_translated_titles_without_v12", "tests/test_document.py::ArticleTests::test_update_date", "tests/test_document.py::ArticleTests::test_update_date_1", "tests/test_document.py::ArticleTests::test_update_date_2", "tests/test_document.py::ArticleTests::test_update_date_3", "tests/test_document.py::ArticleTests::test_whitwout_acceptance_date", "tests/test_document.py::ArticleTests::test_whitwout_ahead_publication_date", "tests/test_document.py::ArticleTests::test_whitwout_receive_date", "tests/test_document.py::ArticleTests::test_whitwout_review_date", "tests/test_document.py::ArticleTests::test_without_affiliations", "tests/test_document.py::ArticleTests::test_without_authors", "tests/test_document.py::ArticleTests::test_without_citations", "tests/test_document.py::ArticleTests::test_without_collection_acronym", "tests/test_document.py::ArticleTests::test_without_corporative_authors", "tests/test_document.py::ArticleTests::test_without_document_type", "tests/test_document.py::ArticleTests::test_without_doi", "tests/test_document.py::ArticleTests::test_without_e_location", "tests/test_document.py::ArticleTests::test_without_html_url", "tests/test_document.py::ArticleTests::test_without_issue_url", "tests/test_document.py::ArticleTests::test_without_journal_abbreviated_title", "tests/test_document.py::ArticleTests::test_without_journal_acronym", "tests/test_document.py::ArticleTests::test_without_journal_title", "tests/test_document.py::ArticleTests::test_without_keywords", "tests/test_document.py::ArticleTests::test_without_last_page", "tests/test_document.py::ArticleTests::test_without_normalized_affiliations", "tests/test_document.py::ArticleTests::test_without_order", "tests/test_document.py::ArticleTests::test_without_original_abstract", "tests/test_document.py::ArticleTests::test_without_original_title", "tests/test_document.py::ArticleTests::test_without_pages", "tests/test_document.py::ArticleTests::test_without_pdf_url", "tests/test_document.py::ArticleTests::test_without_processing_date", "tests/test_document.py::ArticleTests::test_without_project_name", "tests/test_document.py::ArticleTests::test_without_project_sponsor", "tests/test_document.py::ArticleTests::test_without_publication_contract", "tests/test_document.py::ArticleTests::test_without_publication_date", "tests/test_document.py::ArticleTests::test_without_publisher_id", "tests/test_document.py::ArticleTests::test_without_publisher_loc", "tests/test_document.py::ArticleTests::test_without_publisher_name", "tests/test_document.py::ArticleTests::test_without_scielo_domain", "tests/test_document.py::ArticleTests::test_without_scielo_domain_article_v69", "tests/test_document.py::ArticleTests::test_without_scielo_domain_article_v69_and_with_title_v690", "tests/test_document.py::ArticleTests::test_without_scielo_domain_title_v690", "tests/test_document.py::ArticleTests::test_without_start_page", "tests/test_document.py::ArticleTests::test_without_subject_areas", "tests/test_document.py::ArticleTests::test_without_thesis_degree", "tests/test_document.py::ArticleTests::test_without_thesis_organization", "tests/test_document.py::ArticleTests::test_without_wos_citation_indexes", "tests/test_document.py::ArticleTests::test_without_wos_subject_areas", "tests/test_document.py::ArticleTests::test_wos_citation_indexes", "tests/test_document.py::ArticleTests::test_wos_subject_areas", "tests/test_document.py::CitationTest::test_a_link_access_date", "tests/test_document.py::CitationTest::test_analytic_institution_for_a_article_citation", "tests/test_document.py::CitationTest::test_analytic_institution_for_a_book_citation", "tests/test_document.py::CitationTest::test_article_title", "tests/test_document.py::CitationTest::test_article_without_title", "tests/test_document.py::CitationTest::test_authors_article", "tests/test_document.py::CitationTest::test_authors_book", "tests/test_document.py::CitationTest::test_authors_link", "tests/test_document.py::CitationTest::test_authors_thesis", "tests/test_document.py::CitationTest::test_book_chapter_title", "tests/test_document.py::CitationTest::test_book_edition", "tests/test_document.py::CitationTest::test_book_volume", "tests/test_document.py::CitationTest::test_book_without_chapter_title", "tests/test_document.py::CitationTest::test_citation_sample_congress", "tests/test_document.py::CitationTest::test_citation_sample_link", "tests/test_document.py::CitationTest::test_citation_sample_link_without_comment", "tests/test_document.py::CitationTest::test_conference_edition", "tests/test_document.py::CitationTest::test_conference_name", "tests/test_document.py::CitationTest::test_conference_sponsor", "tests/test_document.py::CitationTest::test_conference_without_name", "tests/test_document.py::CitationTest::test_conference_without_sponsor", "tests/test_document.py::CitationTest::test_date", "tests/test_document.py::CitationTest::test_doi", "tests/test_document.py::CitationTest::test_editor", "tests/test_document.py::CitationTest::test_elocation_14", "tests/test_document.py::CitationTest::test_elocation_514", "tests/test_document.py::CitationTest::test_end_page_14", "tests/test_document.py::CitationTest::test_end_page_514", "tests/test_document.py::CitationTest::test_end_page_withdout_data", "tests/test_document.py::CitationTest::test_first_author_article", "tests/test_document.py::CitationTest::test_first_author_book", "tests/test_document.py::CitationTest::test_first_author_link", "tests/test_document.py::CitationTest::test_first_author_thesis", "tests/test_document.py::CitationTest::test_first_author_without_monographic_authors", "tests/test_document.py::CitationTest::test_first_author_without_monographic_authors_but_not_a_book_citation", "tests/test_document.py::CitationTest::test_index_number", "tests/test_document.py::CitationTest::test_institutions_all_fields", "tests/test_document.py::CitationTest::test_institutions_v11", "tests/test_document.py::CitationTest::test_institutions_v17", "tests/test_document.py::CitationTest::test_institutions_v29", "tests/test_document.py::CitationTest::test_institutions_v50", "tests/test_document.py::CitationTest::test_institutions_v58", "tests/test_document.py::CitationTest::test_invalid_edition", "tests/test_document.py::CitationTest::test_isbn", "tests/test_document.py::CitationTest::test_isbn_but_not_a_book", "tests/test_document.py::CitationTest::test_issn", "tests/test_document.py::CitationTest::test_issn_but_not_an_article", "tests/test_document.py::CitationTest::test_issue_part", "tests/test_document.py::CitationTest::test_issue_title", "tests/test_document.py::CitationTest::test_journal_issue", "tests/test_document.py::CitationTest::test_journal_volume", "tests/test_document.py::CitationTest::test_link", "tests/test_document.py::CitationTest::test_link_title", "tests/test_document.py::CitationTest::test_link_without_title", "tests/test_document.py::CitationTest::test_monographic_authors", "tests/test_document.py::CitationTest::test_monographic_first_author", "tests/test_document.py::CitationTest::test_pages_14", "tests/test_document.py::CitationTest::test_pages_514", "tests/test_document.py::CitationTest::test_pages_withdout_data", "tests/test_document.py::CitationTest::test_publication_type_article", "tests/test_document.py::CitationTest::test_publication_type_book", "tests/test_document.py::CitationTest::test_publication_type_conference", "tests/test_document.py::CitationTest::test_publication_type_link", "tests/test_document.py::CitationTest::test_publication_type_thesis", "tests/test_document.py::CitationTest::test_publication_type_undefined", "tests/test_document.py::CitationTest::test_publisher", "tests/test_document.py::CitationTest::test_publisher_address", "tests/test_document.py::CitationTest::test_publisher_address_without_e", "tests/test_document.py::CitationTest::test_series_book", "tests/test_document.py::CitationTest::test_series_but_neither_journal_book_or_conference_citation", "tests/test_document.py::CitationTest::test_series_conference", "tests/test_document.py::CitationTest::test_series_journal", "tests/test_document.py::CitationTest::test_source_book_title", "tests/test_document.py::CitationTest::test_source_journal", "tests/test_document.py::CitationTest::test_source_journal_without_journal_title", "tests/test_document.py::CitationTest::test_sponsor", "tests/test_document.py::CitationTest::test_start_page_14", "tests/test_document.py::CitationTest::test_start_page_514", "tests/test_document.py::CitationTest::test_start_page_withdout_data", "tests/test_document.py::CitationTest::test_thesis_institution", "tests/test_document.py::CitationTest::test_thesis_title", "tests/test_document.py::CitationTest::test_thesis_without_title", "tests/test_document.py::CitationTest::test_title_when_article_citation", "tests/test_document.py::CitationTest::test_title_when_conference_citation", "tests/test_document.py::CitationTest::test_title_when_link_citation", "tests/test_document.py::CitationTest::test_title_when_thesis_citation", "tests/test_document.py::CitationTest::test_with_volume_but_not_a_journal_article_neither_a_book", "tests/test_document.py::CitationTest::test_without_analytic_institution", "tests/test_document.py::CitationTest::test_without_authors", "tests/test_document.py::CitationTest::test_without_date", "tests/test_document.py::CitationTest::test_without_doi", "tests/test_document.py::CitationTest::test_without_edition", "tests/test_document.py::CitationTest::test_without_editor", "tests/test_document.py::CitationTest::test_without_first_author", "tests/test_document.py::CitationTest::test_without_index_number", "tests/test_document.py::CitationTest::test_without_institutions", "tests/test_document.py::CitationTest::test_without_issue", "tests/test_document.py::CitationTest::test_without_issue_part", "tests/test_document.py::CitationTest::test_without_issue_title", "tests/test_document.py::CitationTest::test_without_link", "tests/test_document.py::CitationTest::test_without_monographic_authors", "tests/test_document.py::CitationTest::test_without_monographic_authors_but_not_a_book_citation", "tests/test_document.py::CitationTest::test_without_publisher", "tests/test_document.py::CitationTest::test_without_publisher_address", "tests/test_document.py::CitationTest::test_without_series", "tests/test_document.py::CitationTest::test_without_sponsor", "tests/test_document.py::CitationTest::test_without_thesis_institution", "tests/test_document.py::CitationTest::test_without_volume" ]
[]
BSD 2-Clause "Simplified" License
452
kako-nawao__ffconv-25
b93056b5cb13eb62cbc8c3aa0e21c45e908d153f
2016-02-26 13:29:15
b93056b5cb13eb62cbc8c3aa0e21c45e908d153f
diff --git a/DESCRIPTION.rst b/DESCRIPTION.rst index d1b41af..2649378 100644 --- a/DESCRIPTION.rst +++ b/DESCRIPTION.rst @@ -1,4 +1,4 @@ ffconv -======================= +====== Process media files to match profiles using ffmpeg. \ No newline at end of file diff --git a/ffconv/stream_processors.py b/ffconv/stream_processors.py index e81feac..145207b 100644 --- a/ffconv/stream_processors.py +++ b/ffconv/stream_processors.py @@ -108,9 +108,19 @@ class VideoProcessor(StreamProcessor): # Set input reference frames value self.refs = int(stream['refs']) - # Set target values for frames, profile, level, preset and quality - height = int(stream.get('height', 720)) - self.max_refs = profile[self.media_type]['max_refs'].get(height, 5) + # Assert height is included in stream + if 'height' not in stream: + raise KeyError("Height not specified in video stream.") + + # Get height and set target for ref frames (default is 4) + self.max_refs = 4 + height = int(stream['height']) + for h, f in sorted(profile[self.media_type]['max_refs'].items()): + if height <= h: + self.max_refs = f + break + + # Set target values for profile, level, preset and quality self.target_profile = profile[self.media_type]['profile'] self.target_level = profile[self.media_type]['level'] self.target_preset = profile[self.media_type]['preset'] diff --git a/setup.py b/setup.py index 20c088d..e4ec3cc 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ setup( name='ffconv', # https://packaging.python.org/en/latest/single_source_version.html - version='0.0.3', + version='0.1.0', description='Process media files with ffmpeg', long_description=long_description,
Height-to-Reference Frames matching error Sometimes the video height does not match the standard values perfectly (720, 1080, etc), and in those cases the stream processor is deciding incorrectly: a video stream with a height if 704 is trying to limit the frames to 4, when it should do it at 8. When deciding ref frames maximum, we should *compare heights* instead of matching them exactly.
kako-nawao/ffconv
diff --git a/tests/test_files.py b/tests/test_files.py index e332188..e4f8612 100644 --- a/tests/test_files.py +++ b/tests/test_files.py @@ -108,7 +108,7 @@ class FileProcessorTest(TestCase): self.assertEqual(res, []) # Process 1 video only - streams = [{'codec_type': 'video', 'codec_name': 'h264', 'index': 0, 'refs': 1}] + streams = [{'codec_type': 'video', 'codec_name': 'h264', 'index': 0, 'refs': 1, 'height': 720}] res = processor.process_streams(streams) self.assertEqual(len(res), 1) self.assertEqual(VideoProcessor.process.call_count, 1) @@ -117,7 +117,7 @@ class FileProcessorTest(TestCase): VideoProcessor.process.reset_mock() # Process 1 video, 2 audio, 2 subs - streams = [{'codec_type': 'video', 'codec_name': 'h264', 'index': 0, 'refs': 4}, + streams = [{'codec_type': 'video', 'codec_name': 'h264', 'index': 0, 'refs': 4, 'height': 720}, {'codec_type': 'audio', 'codec_name': 'aac', 'index': 0, 'channels': 2}, {'codec_type': 'audio', 'codec_name': 'aac', 'index': 0, 'channels': 6}, {'codec_type': 'subtitle', 'codec_name': 'srt', 'index': 0}, @@ -143,7 +143,7 @@ class FileProcessorTest(TestCase): self.assertEqual(res, []) # Process 1 video, 2 audio, 2 subs - streams = [{'codec_type': 'video', 'codec_name': 'h264', 'index': 0, 'refs': 4}, + streams = [{'codec_type': 'video', 'codec_name': 'h264', 'index': 0, 'refs': 4, 'height': 480}, {'codec_type': 'audio', 'codec_name': 'aac', 'index': 0, 'channels': 2}, {'codec_type': 'audio', 'codec_name': 'aac', 'index': 0, 'channels': 6}, {'codec_type': 'subtitle', 'codec_name': 'srt', 'index': 0}, @@ -220,7 +220,7 @@ class FileProcessorTest(TestCase): @patch('ffconv.stream_processors.execute_cmd', MagicMock()) @patch('ffconv.file_processor.execute_cmd', MagicMock()) @patch('ffconv.file_processor.FileProcessor.probe', MagicMock(return_value=[ - {'index': 0, 'codec_type': 'video', 'codec_name': 'h264', 'refs': 4}, + {'index': 0, 'codec_type': 'video', 'codec_name': 'h264', 'refs': 4, 'height': 720}, {'index': 1, 'codec_type': 'audio', 'codec_name': 'aac', 'channels': 6, 'tags': {'LANGUAGE': 'eng'}}, {'index': 2, 'codec_type': 'subtitle', 'codec_name': 'ass', 'tags': {'LANGUAGE': 'spa'}}, {'index': 3, 'codec_type': 'subtitle', 'codec_name': 'srt', 'tags': {'LANGUAGE': 'por'}}, diff --git a/tests/test_streams.py b/tests/test_streams.py index a24c1bb..04584d8 100644 --- a/tests/test_streams.py +++ b/tests/test_streams.py @@ -64,6 +64,26 @@ class VideoProcessorTest(TestCase): processor.convert.reset_mock() processor.clean_up.reset_mock() + # Attempt to process 704:8, nothing to do + stream = {'index': 7, 'codec_type': 'video', 'codec_name': 'h264', + 'refs': 8, 'height': 704} + processor = VideoProcessor(input, stream, profile) + res = processor.process() + self.assertEqual(res, {'input': 'some-film.mkv', 'index': 7}) + self.assertFalse(processor.convert.called) + self.assertFalse(processor.clean_up.called) + + # Attempt process for 2160:8 refs, needs to convert (default ref is 4) + stream = {'index': 7, 'codec_type': 'video', 'codec_name': 'h264', + 'refs': 8, 'height': 2160} + processor = VideoProcessor(input, stream, profile) + res = processor.process() + self.assertEqual(res, {'input': 'video-7.mp4', 'index': 0}) + self.assertTrue(processor.convert.called) + self.assertTrue(processor.clean_up.called) + processor.convert.reset_mock() + processor.clean_up.reset_mock() + # Attempt to process xvid, turn to h264 stream = {'index': 7, 'codec_type': 'video', 'codec_name': 'xvid', 'refs': 1, 'height': 720}
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 2, "test_score": 0 }, "num_modified_files": 3 }
unknown
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.9", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
astroid==3.3.9 coverage==7.8.0 dill==0.3.9 exceptiongroup==1.2.2 -e git+https://github.com/kako-nawao/ffconv.git@b93056b5cb13eb62cbc8c3aa0e21c45e908d153f#egg=ffconv iniconfig==2.1.0 isort==6.0.1 mccabe==0.7.0 nose2==0.15.1 packaging==24.2 platformdirs==4.3.7 pluggy==1.5.0 pylint==3.3.6 pytest==8.3.5 tomli==2.2.1 tomlkit==0.13.2 typing_extensions==4.13.0
name: ffconv channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - astroid==3.3.9 - coverage==7.8.0 - dill==0.3.9 - exceptiongroup==1.2.2 - iniconfig==2.1.0 - isort==6.0.1 - mccabe==0.7.0 - nose2==0.15.1 - packaging==24.2 - platformdirs==4.3.7 - pluggy==1.5.0 - pylint==3.3.6 - pytest==8.3.5 - tomli==2.2.1 - tomlkit==0.13.2 - typing-extensions==4.13.0 prefix: /opt/conda/envs/ffconv
[ "tests/test_streams.py::VideoProcessorTest::test_process" ]
[]
[ "tests/test_files.py::ExecuteCommandTest::test_errors", "tests/test_files.py::FileProcessorTest::test_clean_up", "tests/test_files.py::FileProcessorTest::test_init", "tests/test_files.py::FileProcessorTest::test_merge", "tests/test_files.py::FileProcessorTest::test_probe", "tests/test_files.py::FileProcessorTest::test_process", "tests/test_files.py::FileProcessorTest::test_process_streams_error", "tests/test_files.py::FileProcessorTest::test_process_streams_success", "tests/test_files.py::FileProcessorTest::test_replace_original", "tests/test_streams.py::VideoProcessorTest::test_convert", "tests/test_streams.py::VideoProcessorTest::test_init", "tests/test_streams.py::AudioProcessorTest::test_convert", "tests/test_streams.py::AudioProcessorTest::test_init", "tests/test_streams.py::AudioProcessorTest::test_process", "tests/test_streams.py::SubtitleProcessorTest::test_clean_up", "tests/test_streams.py::SubtitleProcessorTest::test_convert", "tests/test_streams.py::SubtitleProcessorTest::test_init", "tests/test_streams.py::SubtitleProcessorTest::test_process" ]
[]
null
453
nose-devs__nose2-278
f4ab61b95a67aecf61abb161e4cf138bf8da6ce8
2016-02-28 09:32:12
b5eeeba68f2bad3e199af8903cdd0540b95f06cf
diff --git a/nose2/plugins/junitxml.py b/nose2/plugins/junitxml.py index e61a08b..05cfee6 100644 --- a/nose2/plugins/junitxml.py +++ b/nose2/plugins/junitxml.py @@ -164,11 +164,10 @@ class JUnitXmlReporter(events.Plugin): skipped.set('message', 'expected test failure') skipped.text = msg - system_err = ET.SubElement(testcase, 'system-err') - system_err.text = string_cleanup( + system_out = ET.SubElement(testcase, 'system-out') + system_out.text = string_cleanup( '\n'.join(event.metadata.get('logs', '')), - self.keep_restricted - ) + self.keep_restricted) def _check(self): if not os.path.exists(os.path.dirname(self.path)):
junit-xml always add logs in system_err This is really strange because logs are typically not outputted in stderr. In nosetests we did not have logs in case of successful test but now they are always displayed as system_err. May it be possible to output them in system_out instead ? In the testOutcome handler just replace system_err with this code ... ```python system_out = ET.SubElement(testcase, 'system-out') system_out.text = string_cleanup( '\n'.join(event.metadata.get('logs', '')), self.keep_restricted ) ```
nose-devs/nose2
diff --git a/nose2/tests/unit/test_junitxml.py b/nose2/tests/unit/test_junitxml.py index 706daa2..0bb96f0 100644 --- a/nose2/tests/unit/test_junitxml.py +++ b/nose2/tests/unit/test_junitxml.py @@ -145,7 +145,6 @@ class TestJunitXmlPlugin(TestCase): test(self.result) case = self.plugin.tree.find('testcase') error = case.find('error') - ending = six.u(' \uFFFD\uFFFD') assert error is not None self.assertRegex(error.text, self.EXPECTED_RE_SAFE) @@ -277,22 +276,22 @@ class TestJunitXmlPlugin(TestCase): self.assertEqual(inital_dir, os.path.dirname(os.path.realpath(self.plugin.path))) - def test_xml_contains_empty_system_err_without_logcapture(self): + def test_xml_contains_empty_system_out_without_logcapture(self): test = self.case('test_with_log') test(self.result) case = self.plugin.tree.find('testcase') - system_err = case.find('system-err') - assert system_err is not None - assert not system_err.text + system_out = case.find('system-out') + assert system_out is not None + assert not system_out.text - def test_xml_contains_log_message_in_system_err_with_logcapture(self): + def test_xml_contains_log_message_in_system_out_with_logcapture(self): self.logcapture_plugin = logcapture.LogCapture(session=self.session) self.logcapture_plugin.register() test = self.case('test_with_log') test(self.result) case = self.plugin.tree.find('testcase') - system_err = case.find('system-err') - assert system_err is not None - assert 'log message' in system_err.text - assert 'INFO' in system_err.text + system_out = case.find('system-out') + assert system_out is not None + assert 'log message' in system_out.text + assert 'INFO' in system_out.text
{ "commit_name": "head_commit", "failed_lite_validators": [], "has_test_patch": true, "is_lite": true, "llm_score": { "difficulty_score": 0, "issue_text_score": 0, "test_score": 0 }, "num_modified_files": 1 }
0.6
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "nose2", "cov-core>=1.12", "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.5", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==22.2.0 certifi==2021.5.30 cov-core==1.15.0 coverage==6.2 importlib-metadata==4.8.3 iniconfig==1.1.1 -e git+https://github.com/nose-devs/nose2.git@f4ab61b95a67aecf61abb161e4cf138bf8da6ce8#egg=nose2 packaging==21.3 pluggy==1.0.0 py==1.11.0 pyparsing==3.1.4 pytest==7.0.1 six==1.17.0 tomli==1.2.3 typing_extensions==4.1.1 zipp==3.6.0
name: nose2 channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==22.2.0 - cov-core==1.15.0 - coverage==6.2 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - packaging==21.3 - pluggy==1.0.0 - py==1.11.0 - pyparsing==3.1.4 - pytest==7.0.1 - six==1.17.0 - tomli==1.2.3 - typing-extensions==4.1.1 - zipp==3.6.0 prefix: /opt/conda/envs/nose2
[ "nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_xml_contains_empty_system_out_without_logcapture", "nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_xml_contains_log_message_in_system_out_with_logcapture" ]
[]
[ "nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_error_bad_xml", "nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_error_bad_xml_b", "nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_error_bad_xml_b_keep", "nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_error_bad_xml_keep", "nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_error_includes_traceback", "nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_failure_includes_traceback", "nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_generator_test_full_name_correct", "nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_generator_test_name_correct", "nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_params_test_full_name_correct", "nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_params_test_name_correct", "nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_skip_includes_skipped", "nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_success_added_to_xml", "nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_writes_xml_file_at_end", "nose2/tests/unit/test_junitxml.py::TestJunitXmlPlugin::test_xml_file_path_is_not_affected_by_chdir_in_test" ]
[]
BSD
454
scieloorg__xylose-108
9d72bccf95503133b6fe7ef55ec88f9cf9b50a71
2016-02-29 21:07:48
743d8ca8a32b6e6e82b1ed0fc97f7d240c85cba5
diff --git a/xylose/scielodocument.py b/xylose/scielodocument.py index 8f9703d..8676c4a 100644 --- a/xylose/scielodocument.py +++ b/xylose/scielodocument.py @@ -866,6 +866,24 @@ class Journal(object): return missions + @property + def publisher_country(self): + """ + This method retrieves the publisher country of journal. + This method return a tuple: ('US', u'United States'), otherwise + return None. + """ + if 'v310' not in self.data: + return None + + country_code = self.data.get('v310', [{'_': None}])[0]['_'] + country_name = choices.ISO_3166.get(country_code, None) + + if not country_code or not country_name: + return None + + return (country_code, country_name) + @property def copyright(self): """
Adicionar o campo ``publisher_country`` a classe Journal Para que possamos cadastrar o publisher_country no processamento inicial do Site, precisamos que esse atributo esteja no Xylose.
scieloorg/xylose
diff --git a/tests/test_document.py b/tests/test_document.py index 282c34f..30996d5 100644 --- a/tests/test_document.py +++ b/tests/test_document.py @@ -1085,6 +1085,26 @@ class JournalTests(unittest.TestCase): self.assertIsNone(journal.mission) + def test_journal_publisher_country(self): + journal = self.journal + + expected = ('BR', 'Brazil') + + self.assertEqual(journal.publisher_country, expected) + + def test_journal_publisher_country_without_country(self): + journal = self.journal + + del(journal.data['v310']) + + self.assertIsNone(journal.publisher_country) + + def test_journal_publisher_country_not_findable_code(self): + self.fulldoc['title']['v310'] = [{"_": "BRX"}] + journal = Journal(self.fulldoc['title']) + + self.assertIsNone(journal.publisher_country) + def test_journal_copyright(self): journal = self.journal
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 1, "test_score": 0 }, "num_modified_files": 1 }
1.1
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "pytest-cov", "pytest-mock" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.9", "reqs_path": [ "requirements-test.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
coverage==7.8.0 exceptiongroup==1.2.2 iniconfig==2.1.0 mocker==1.1.1 nose==1.0.0 packaging==24.2 pluggy==1.5.0 pytest==8.3.5 pytest-cov==6.0.0 pytest-mock==3.14.0 tomli==2.2.1 -e git+https://github.com/scieloorg/xylose.git@9d72bccf95503133b6fe7ef55ec88f9cf9b50a71#egg=xylose
name: xylose channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - coverage==7.8.0 - exceptiongroup==1.2.2 - iniconfig==2.1.0 - mocker==1.1.1 - nose==1.0.0 - packaging==24.2 - pluggy==1.5.0 - pytest==8.3.5 - pytest-cov==6.0.0 - pytest-mock==3.14.0 - tomli==2.2.1 prefix: /opt/conda/envs/xylose
[ "tests/test_document.py::JournalTests::test_journal_publisher_country", "tests/test_document.py::JournalTests::test_journal_publisher_country_not_findable_code", "tests/test_document.py::JournalTests::test_journal_publisher_country_without_country" ]
[]
[ "tests/test_document.py::ToolsTests::test_get_date_wrong_day", "tests/test_document.py::ToolsTests::test_get_date_wrong_day_month", "tests/test_document.py::ToolsTests::test_get_date_wrong_day_month_not_int", "tests/test_document.py::ToolsTests::test_get_date_wrong_day_not_int", "tests/test_document.py::ToolsTests::test_get_date_wrong_month_not_int", "tests/test_document.py::ToolsTests::test_get_date_year", "tests/test_document.py::ToolsTests::test_get_date_year_day", "tests/test_document.py::ToolsTests::test_get_date_year_month", "tests/test_document.py::ToolsTests::test_get_date_year_month_day", "tests/test_document.py::ToolsTests::test_get_date_year_month_day_31", "tests/test_document.py::ToolsTests::test_get_language_iso639_1_defined", "tests/test_document.py::ToolsTests::test_get_language_iso639_1_undefined", "tests/test_document.py::ToolsTests::test_get_language_iso639_2_defined", "tests/test_document.py::ToolsTests::test_get_language_iso639_2_undefined", "tests/test_document.py::ToolsTests::test_get_language_without_iso_format", "tests/test_document.py::IssueTests::test_collection_acronym", "tests/test_document.py::IssueTests::test_is_ahead", "tests/test_document.py::IssueTests::test_is_ahead_1", "tests/test_document.py::IssueTests::test_issue", "tests/test_document.py::IssueTests::test_issue_label", "tests/test_document.py::IssueTests::test_issue_url", "tests/test_document.py::IssueTests::test_order", "tests/test_document.py::IssueTests::test_processing_date", "tests/test_document.py::IssueTests::test_processing_date_1", "tests/test_document.py::IssueTests::test_publication_date", "tests/test_document.py::IssueTests::test_supplement_number", "tests/test_document.py::IssueTests::test_supplement_volume", "tests/test_document.py::IssueTests::test_type_regular", "tests/test_document.py::IssueTests::test_type_supplement_1", "tests/test_document.py::IssueTests::test_type_supplement_2", "tests/test_document.py::IssueTests::test_volume", "tests/test_document.py::IssueTests::test_without_issue", "tests/test_document.py::IssueTests::test_without_processing_date", "tests/test_document.py::IssueTests::test_without_publication_date", "tests/test_document.py::IssueTests::test_without_suplement_number", "tests/test_document.py::IssueTests::test_without_supplement_volume", "tests/test_document.py::IssueTests::test_without_volume", "tests/test_document.py::JournalTests::test_any_issn_priority_electronic", "tests/test_document.py::JournalTests::test_any_issn_priority_electronic_without_electronic", "tests/test_document.py::JournalTests::test_any_issn_priority_print", "tests/test_document.py::JournalTests::test_any_issn_priority_print_without_print", "tests/test_document.py::JournalTests::test_cnn_code", "tests/test_document.py::JournalTests::test_collection_acronym", "tests/test_document.py::JournalTests::test_creation_date", "tests/test_document.py::JournalTests::test_ctrl_vocabulary", "tests/test_document.py::JournalTests::test_ctrl_vocabulary_out_of_choices", "tests/test_document.py::JournalTests::test_current_status", "tests/test_document.py::JournalTests::test_current_status_lots_of_changes_study_case_1", "tests/test_document.py::JournalTests::test_current_status_some_changes", "tests/test_document.py::JournalTests::test_current_without_v51", "tests/test_document.py::JournalTests::test_first_number", "tests/test_document.py::JournalTests::test_first_number_1", "tests/test_document.py::JournalTests::test_first_volume", "tests/test_document.py::JournalTests::test_first_volume_1", "tests/test_document.py::JournalTests::test_first_year", "tests/test_document.py::JournalTests::test_first_year_1", "tests/test_document.py::JournalTests::test_journal", "tests/test_document.py::JournalTests::test_journal_abbreviated_title", "tests/test_document.py::JournalTests::test_journal_acronym", "tests/test_document.py::JournalTests::test_journal_copyright", "tests/test_document.py::JournalTests::test_journal_copyright_without_copyright", "tests/test_document.py::JournalTests::test_journal_fulltitle", "tests/test_document.py::JournalTests::test_journal_fulltitle_without_subtitle", "tests/test_document.py::JournalTests::test_journal_fulltitle_without_title", "tests/test_document.py::JournalTests::test_journal_mission", "tests/test_document.py::JournalTests::test_journal_mission_without_language_key", "tests/test_document.py::JournalTests::test_journal_mission_without_mission", "tests/test_document.py::JournalTests::test_journal_mission_without_mission_text", "tests/test_document.py::JournalTests::test_journal_mission_without_mission_text_and_language", "tests/test_document.py::JournalTests::test_journal_subtitle", "tests/test_document.py::JournalTests::test_journal_title", "tests/test_document.py::JournalTests::test_journal_title_nlm", "tests/test_document.py::JournalTests::test_journal_url", "tests/test_document.py::JournalTests::test_journal_without_subtitle", "tests/test_document.py::JournalTests::test_languages", "tests/test_document.py::JournalTests::test_languages_without_v350", "tests/test_document.py::JournalTests::test_last_cnn_code_1", "tests/test_document.py::JournalTests::test_last_number", "tests/test_document.py::JournalTests::test_last_number_1", "tests/test_document.py::JournalTests::test_last_volume", "tests/test_document.py::JournalTests::test_last_volume_1", "tests/test_document.py::JournalTests::test_last_year", "tests/test_document.py::JournalTests::test_last_year_1", "tests/test_document.py::JournalTests::test_load_issn_with_v435", "tests/test_document.py::JournalTests::test_load_issn_with_v935_and_v35_ONLINE", "tests/test_document.py::JournalTests::test_load_issn_with_v935_and_v35_PRINT", "tests/test_document.py::JournalTests::test_load_issn_with_v935_equal_v400_and_v35_ONLINE", "tests/test_document.py::JournalTests::test_load_issn_with_v935_equal_v400_and_v35_PRINT", "tests/test_document.py::JournalTests::test_load_issn_with_v935_without_v35", "tests/test_document.py::JournalTests::test_load_issn_without_v935_and_v35_ONLINE", "tests/test_document.py::JournalTests::test_load_issn_without_v935_and_v35_PRINT", "tests/test_document.py::JournalTests::test_load_issn_without_v935_without_v35", "tests/test_document.py::JournalTests::test_periodicity", "tests/test_document.py::JournalTests::test_periodicity_in_months", "tests/test_document.py::JournalTests::test_periodicity_in_months_out_of_choices", "tests/test_document.py::JournalTests::test_periodicity_out_of_choices", "tests/test_document.py::JournalTests::test_permission_id", "tests/test_document.py::JournalTests::test_permission_t0", "tests/test_document.py::JournalTests::test_permission_t1", "tests/test_document.py::JournalTests::test_permission_t2", "tests/test_document.py::JournalTests::test_permission_t3", "tests/test_document.py::JournalTests::test_permission_t4", "tests/test_document.py::JournalTests::test_permission_text", "tests/test_document.py::JournalTests::test_permission_url", "tests/test_document.py::JournalTests::test_permission_without_v540", "tests/test_document.py::JournalTests::test_permission_without_v540_t", "tests/test_document.py::JournalTests::test_plevel", "tests/test_document.py::JournalTests::test_plevel_out_of_choices", "tests/test_document.py::JournalTests::test_publisher_loc", "tests/test_document.py::JournalTests::test_publisher_name", "tests/test_document.py::JournalTests::test_scielo_issn", "tests/test_document.py::JournalTests::test_secs_code", "tests/test_document.py::JournalTests::test_standard", "tests/test_document.py::JournalTests::test_standard_out_of_choices", "tests/test_document.py::JournalTests::test_status", "tests/test_document.py::JournalTests::test_status_lots_of_changes", "tests/test_document.py::JournalTests::test_status_lots_of_changes_study_case_1", "tests/test_document.py::JournalTests::test_status_lots_of_changes_with_reason", "tests/test_document.py::JournalTests::test_status_some_changes", "tests/test_document.py::JournalTests::test_status_without_v51", "tests/test_document.py::JournalTests::test_subject_areas", "tests/test_document.py::JournalTests::test_subject_descriptors", "tests/test_document.py::JournalTests::test_subject_index_coverage", "tests/test_document.py::JournalTests::test_submission_url", "tests/test_document.py::JournalTests::test_update_date", "tests/test_document.py::JournalTests::test_without_ctrl_vocabulary", "tests/test_document.py::JournalTests::test_without_index_coverage", "tests/test_document.py::JournalTests::test_without_journal_abbreviated_title", "tests/test_document.py::JournalTests::test_without_journal_acronym", "tests/test_document.py::JournalTests::test_without_journal_title", "tests/test_document.py::JournalTests::test_without_journal_title_nlm", "tests/test_document.py::JournalTests::test_without_journal_url", "tests/test_document.py::JournalTests::test_without_periodicity", "tests/test_document.py::JournalTests::test_without_periodicity_in_months", "tests/test_document.py::JournalTests::test_without_plevel", "tests/test_document.py::JournalTests::test_without_publisher_loc", "tests/test_document.py::JournalTests::test_without_publisher_name", "tests/test_document.py::JournalTests::test_without_scielo_domain", "tests/test_document.py::JournalTests::test_without_scielo_domain_title_v690", "tests/test_document.py::JournalTests::test_without_secs_code", "tests/test_document.py::JournalTests::test_without_standard", "tests/test_document.py::JournalTests::test_without_subject_areas", "tests/test_document.py::JournalTests::test_without_subject_descriptors", "tests/test_document.py::JournalTests::test_without_wos_citation_indexes", "tests/test_document.py::JournalTests::test_without_wos_subject_areas", "tests/test_document.py::JournalTests::test_wos_citation_indexes", "tests/test_document.py::JournalTests::test_wos_subject_areas", "tests/test_document.py::ArticleTests::test_acceptance_date", "tests/test_document.py::ArticleTests::test_affiliation_just_with_affiliation_name", "tests/test_document.py::ArticleTests::test_affiliation_with_country_iso_3166", "tests/test_document.py::ArticleTests::test_affiliation_without_affiliation_name", "tests/test_document.py::ArticleTests::test_affiliations", "tests/test_document.py::ArticleTests::test_ahead_publication_date", "tests/test_document.py::ArticleTests::test_article", "tests/test_document.py::ArticleTests::test_author_with_two_affiliations", "tests/test_document.py::ArticleTests::test_author_with_two_role", "tests/test_document.py::ArticleTests::test_author_without_affiliations", "tests/test_document.py::ArticleTests::test_author_without_surname_and_given_names", "tests/test_document.py::ArticleTests::test_authors", "tests/test_document.py::ArticleTests::test_collection_acronym", "tests/test_document.py::ArticleTests::test_collection_acronym_priorizing_collection", "tests/test_document.py::ArticleTests::test_collection_acronym_retrieving_v992", "tests/test_document.py::ArticleTests::test_collection_name_brazil", "tests/test_document.py::ArticleTests::test_collection_name_undefined", "tests/test_document.py::ArticleTests::test_corporative_authors", "tests/test_document.py::ArticleTests::test_creation_date", "tests/test_document.py::ArticleTests::test_creation_date_1", "tests/test_document.py::ArticleTests::test_creation_date_2", "tests/test_document.py::ArticleTests::test_data_model_version_html", "tests/test_document.py::ArticleTests::test_data_model_version_html_1", "tests/test_document.py::ArticleTests::test_data_model_version_xml", "tests/test_document.py::ArticleTests::test_document_type", "tests/test_document.py::ArticleTests::test_doi", "tests/test_document.py::ArticleTests::test_doi_clean_1", "tests/test_document.py::ArticleTests::test_doi_clean_2", "tests/test_document.py::ArticleTests::test_doi_v237", "tests/test_document.py::ArticleTests::test_e_location", "tests/test_document.py::ArticleTests::test_end_page_loaded_crazy_legacy_way_1", "tests/test_document.py::ArticleTests::test_end_page_loaded_crazy_legacy_way_2", "tests/test_document.py::ArticleTests::test_end_page_loaded_through_xml", "tests/test_document.py::ArticleTests::test_file_code", "tests/test_document.py::ArticleTests::test_file_code_crazy_slashs_1", "tests/test_document.py::ArticleTests::test_file_code_crazy_slashs_2", "tests/test_document.py::ArticleTests::test_first_author", "tests/test_document.py::ArticleTests::test_first_author_without_author", "tests/test_document.py::ArticleTests::test_fulltexts_field_fulltexts", "tests/test_document.py::ArticleTests::test_fulltexts_without_field_fulltexts", "tests/test_document.py::ArticleTests::test_html_url", "tests/test_document.py::ArticleTests::test_invalid_document_type", "tests/test_document.py::ArticleTests::test_issue_url", "tests/test_document.py::ArticleTests::test_journal_abbreviated_title", "tests/test_document.py::ArticleTests::test_journal_acronym", "tests/test_document.py::ArticleTests::test_journal_title", "tests/test_document.py::ArticleTests::test_keywords", "tests/test_document.py::ArticleTests::test_keywords_iso639_2", "tests/test_document.py::ArticleTests::test_keywords_with_undefined_language", "tests/test_document.py::ArticleTests::test_keywords_without_subfield_k", "tests/test_document.py::ArticleTests::test_keywords_without_subfield_l", "tests/test_document.py::ArticleTests::test_languages_field_fulltexts", "tests/test_document.py::ArticleTests::test_languages_field_v40", "tests/test_document.py::ArticleTests::test_last_page", "tests/test_document.py::ArticleTests::test_mixed_affiliations_1", "tests/test_document.py::ArticleTests::test_normalized_affiliations", "tests/test_document.py::ArticleTests::test_normalized_affiliations_undefined_ISO_3166_CODE", "tests/test_document.py::ArticleTests::test_normalized_affiliations_without_p", "tests/test_document.py::ArticleTests::test_order", "tests/test_document.py::ArticleTests::test_original_abstract_with_just_one_language_defined", "tests/test_document.py::ArticleTests::test_original_abstract_with_language_defined", "tests/test_document.py::ArticleTests::test_original_abstract_with_language_defined_but_different_of_the_article_original_language", "tests/test_document.py::ArticleTests::test_original_abstract_without_language_defined", "tests/test_document.py::ArticleTests::test_original_html_field_body", "tests/test_document.py::ArticleTests::test_original_language_invalid_iso639_2", "tests/test_document.py::ArticleTests::test_original_language_iso639_2", "tests/test_document.py::ArticleTests::test_original_language_original", "tests/test_document.py::ArticleTests::test_original_section_field_v49", "tests/test_document.py::ArticleTests::test_original_title_subfield_t", "tests/test_document.py::ArticleTests::test_original_title_with_just_one_language_defined", "tests/test_document.py::ArticleTests::test_original_title_with_language_defined", "tests/test_document.py::ArticleTests::test_original_title_with_language_defined_but_different_of_the_article_original_language", "tests/test_document.py::ArticleTests::test_original_title_without_language_defined", "tests/test_document.py::ArticleTests::test_pdf_url", "tests/test_document.py::ArticleTests::test_processing_date", "tests/test_document.py::ArticleTests::test_processing_date_1", "tests/test_document.py::ArticleTests::test_project_name", "tests/test_document.py::ArticleTests::test_project_sponsors", "tests/test_document.py::ArticleTests::test_publication_contract", "tests/test_document.py::ArticleTests::test_publication_date", "tests/test_document.py::ArticleTests::test_publisher_id", "tests/test_document.py::ArticleTests::test_publisher_loc", "tests/test_document.py::ArticleTests::test_publisher_name", "tests/test_document.py::ArticleTests::test_receive_date", "tests/test_document.py::ArticleTests::test_review_date", "tests/test_document.py::ArticleTests::test_secion_code_field_v49", "tests/test_document.py::ArticleTests::test_section_code_nd_field_v49", "tests/test_document.py::ArticleTests::test_section_code_without_field_v49", "tests/test_document.py::ArticleTests::test_section_field_v49", "tests/test_document.py::ArticleTests::test_section_nd_field_v49", "tests/test_document.py::ArticleTests::test_section_without_field_v49", "tests/test_document.py::ArticleTests::test_start_page", "tests/test_document.py::ArticleTests::test_start_page_loaded_crazy_legacy_way_1", "tests/test_document.py::ArticleTests::test_start_page_loaded_crazy_legacy_way_2", "tests/test_document.py::ArticleTests::test_start_page_loaded_through_xml", "tests/test_document.py::ArticleTests::test_subject_areas", "tests/test_document.py::ArticleTests::test_thesis_degree", "tests/test_document.py::ArticleTests::test_thesis_organization", "tests/test_document.py::ArticleTests::test_thesis_organization_and_division", "tests/test_document.py::ArticleTests::test_thesis_organization_without_name", "tests/test_document.py::ArticleTests::test_translated_abstracts", "tests/test_document.py::ArticleTests::test_translated_abstracts_without_v83", "tests/test_document.py::ArticleTests::test_translated_abtracts_iso639_2", "tests/test_document.py::ArticleTests::test_translated_htmls_field_body", "tests/test_document.py::ArticleTests::test_translated_section_field_v49", "tests/test_document.py::ArticleTests::test_translated_titles", "tests/test_document.py::ArticleTests::test_translated_titles_iso639_2", "tests/test_document.py::ArticleTests::test_translated_titles_without_v12", "tests/test_document.py::ArticleTests::test_update_date", "tests/test_document.py::ArticleTests::test_update_date_1", "tests/test_document.py::ArticleTests::test_update_date_2", "tests/test_document.py::ArticleTests::test_update_date_3", "tests/test_document.py::ArticleTests::test_whitwout_acceptance_date", "tests/test_document.py::ArticleTests::test_whitwout_ahead_publication_date", "tests/test_document.py::ArticleTests::test_whitwout_receive_date", "tests/test_document.py::ArticleTests::test_whitwout_review_date", "tests/test_document.py::ArticleTests::test_without_affiliations", "tests/test_document.py::ArticleTests::test_without_authors", "tests/test_document.py::ArticleTests::test_without_citations", "tests/test_document.py::ArticleTests::test_without_collection_acronym", "tests/test_document.py::ArticleTests::test_without_corporative_authors", "tests/test_document.py::ArticleTests::test_without_document_type", "tests/test_document.py::ArticleTests::test_without_doi", "tests/test_document.py::ArticleTests::test_without_e_location", "tests/test_document.py::ArticleTests::test_without_html_url", "tests/test_document.py::ArticleTests::test_without_issue_url", "tests/test_document.py::ArticleTests::test_without_journal_abbreviated_title", "tests/test_document.py::ArticleTests::test_without_journal_acronym", "tests/test_document.py::ArticleTests::test_without_journal_title", "tests/test_document.py::ArticleTests::test_without_keywords", "tests/test_document.py::ArticleTests::test_without_last_page", "tests/test_document.py::ArticleTests::test_without_normalized_affiliations", "tests/test_document.py::ArticleTests::test_without_order", "tests/test_document.py::ArticleTests::test_without_original_abstract", "tests/test_document.py::ArticleTests::test_without_original_title", "tests/test_document.py::ArticleTests::test_without_pages", "tests/test_document.py::ArticleTests::test_without_pdf_url", "tests/test_document.py::ArticleTests::test_without_processing_date", "tests/test_document.py::ArticleTests::test_without_project_name", "tests/test_document.py::ArticleTests::test_without_project_sponsor", "tests/test_document.py::ArticleTests::test_without_publication_contract", "tests/test_document.py::ArticleTests::test_without_publication_date", "tests/test_document.py::ArticleTests::test_without_publisher_id", "tests/test_document.py::ArticleTests::test_without_publisher_loc", "tests/test_document.py::ArticleTests::test_without_publisher_name", "tests/test_document.py::ArticleTests::test_without_scielo_domain", "tests/test_document.py::ArticleTests::test_without_scielo_domain_article_v69", "tests/test_document.py::ArticleTests::test_without_scielo_domain_article_v69_and_with_title_v690", "tests/test_document.py::ArticleTests::test_without_scielo_domain_title_v690", "tests/test_document.py::ArticleTests::test_without_start_page", "tests/test_document.py::ArticleTests::test_without_subject_areas", "tests/test_document.py::ArticleTests::test_without_thesis_degree", "tests/test_document.py::ArticleTests::test_without_thesis_organization", "tests/test_document.py::ArticleTests::test_without_wos_citation_indexes", "tests/test_document.py::ArticleTests::test_without_wos_subject_areas", "tests/test_document.py::ArticleTests::test_wos_citation_indexes", "tests/test_document.py::ArticleTests::test_wos_subject_areas", "tests/test_document.py::CitationTest::test_a_link_access_date", "tests/test_document.py::CitationTest::test_analytic_institution_for_a_article_citation", "tests/test_document.py::CitationTest::test_analytic_institution_for_a_book_citation", "tests/test_document.py::CitationTest::test_article_title", "tests/test_document.py::CitationTest::test_article_without_title", "tests/test_document.py::CitationTest::test_authors_article", "tests/test_document.py::CitationTest::test_authors_book", "tests/test_document.py::CitationTest::test_authors_link", "tests/test_document.py::CitationTest::test_authors_thesis", "tests/test_document.py::CitationTest::test_book_chapter_title", "tests/test_document.py::CitationTest::test_book_edition", "tests/test_document.py::CitationTest::test_book_volume", "tests/test_document.py::CitationTest::test_book_without_chapter_title", "tests/test_document.py::CitationTest::test_citation_sample_congress", "tests/test_document.py::CitationTest::test_citation_sample_link", "tests/test_document.py::CitationTest::test_citation_sample_link_without_comment", "tests/test_document.py::CitationTest::test_conference_edition", "tests/test_document.py::CitationTest::test_conference_name", "tests/test_document.py::CitationTest::test_conference_sponsor", "tests/test_document.py::CitationTest::test_conference_without_name", "tests/test_document.py::CitationTest::test_conference_without_sponsor", "tests/test_document.py::CitationTest::test_date", "tests/test_document.py::CitationTest::test_doi", "tests/test_document.py::CitationTest::test_editor", "tests/test_document.py::CitationTest::test_elocation_14", "tests/test_document.py::CitationTest::test_elocation_514", "tests/test_document.py::CitationTest::test_end_page_14", "tests/test_document.py::CitationTest::test_end_page_514", "tests/test_document.py::CitationTest::test_end_page_withdout_data", "tests/test_document.py::CitationTest::test_first_author_article", "tests/test_document.py::CitationTest::test_first_author_book", "tests/test_document.py::CitationTest::test_first_author_link", "tests/test_document.py::CitationTest::test_first_author_thesis", "tests/test_document.py::CitationTest::test_first_author_without_monographic_authors", "tests/test_document.py::CitationTest::test_first_author_without_monographic_authors_but_not_a_book_citation", "tests/test_document.py::CitationTest::test_index_number", "tests/test_document.py::CitationTest::test_institutions_all_fields", "tests/test_document.py::CitationTest::test_institutions_v11", "tests/test_document.py::CitationTest::test_institutions_v17", "tests/test_document.py::CitationTest::test_institutions_v29", "tests/test_document.py::CitationTest::test_institutions_v50", "tests/test_document.py::CitationTest::test_institutions_v58", "tests/test_document.py::CitationTest::test_invalid_edition", "tests/test_document.py::CitationTest::test_isbn", "tests/test_document.py::CitationTest::test_isbn_but_not_a_book", "tests/test_document.py::CitationTest::test_issn", "tests/test_document.py::CitationTest::test_issn_but_not_an_article", "tests/test_document.py::CitationTest::test_issue_part", "tests/test_document.py::CitationTest::test_issue_title", "tests/test_document.py::CitationTest::test_journal_issue", "tests/test_document.py::CitationTest::test_journal_volume", "tests/test_document.py::CitationTest::test_link", "tests/test_document.py::CitationTest::test_link_title", "tests/test_document.py::CitationTest::test_link_without_title", "tests/test_document.py::CitationTest::test_monographic_authors", "tests/test_document.py::CitationTest::test_monographic_first_author", "tests/test_document.py::CitationTest::test_pages_14", "tests/test_document.py::CitationTest::test_pages_514", "tests/test_document.py::CitationTest::test_pages_withdout_data", "tests/test_document.py::CitationTest::test_publication_type_article", "tests/test_document.py::CitationTest::test_publication_type_book", "tests/test_document.py::CitationTest::test_publication_type_conference", "tests/test_document.py::CitationTest::test_publication_type_link", "tests/test_document.py::CitationTest::test_publication_type_thesis", "tests/test_document.py::CitationTest::test_publication_type_undefined", "tests/test_document.py::CitationTest::test_publisher", "tests/test_document.py::CitationTest::test_publisher_address", "tests/test_document.py::CitationTest::test_publisher_address_without_e", "tests/test_document.py::CitationTest::test_series_book", "tests/test_document.py::CitationTest::test_series_but_neither_journal_book_or_conference_citation", "tests/test_document.py::CitationTest::test_series_conference", "tests/test_document.py::CitationTest::test_series_journal", "tests/test_document.py::CitationTest::test_source_book_title", "tests/test_document.py::CitationTest::test_source_journal", "tests/test_document.py::CitationTest::test_source_journal_without_journal_title", "tests/test_document.py::CitationTest::test_sponsor", "tests/test_document.py::CitationTest::test_start_page_14", "tests/test_document.py::CitationTest::test_start_page_514", "tests/test_document.py::CitationTest::test_start_page_withdout_data", "tests/test_document.py::CitationTest::test_thesis_institution", "tests/test_document.py::CitationTest::test_thesis_title", "tests/test_document.py::CitationTest::test_thesis_without_title", "tests/test_document.py::CitationTest::test_title_when_article_citation", "tests/test_document.py::CitationTest::test_title_when_conference_citation", "tests/test_document.py::CitationTest::test_title_when_link_citation", "tests/test_document.py::CitationTest::test_title_when_thesis_citation", "tests/test_document.py::CitationTest::test_with_volume_but_not_a_journal_article_neither_a_book", "tests/test_document.py::CitationTest::test_without_analytic_institution", "tests/test_document.py::CitationTest::test_without_authors", "tests/test_document.py::CitationTest::test_without_date", "tests/test_document.py::CitationTest::test_without_doi", "tests/test_document.py::CitationTest::test_without_edition", "tests/test_document.py::CitationTest::test_without_editor", "tests/test_document.py::CitationTest::test_without_first_author", "tests/test_document.py::CitationTest::test_without_index_number", "tests/test_document.py::CitationTest::test_without_institutions", "tests/test_document.py::CitationTest::test_without_issue", "tests/test_document.py::CitationTest::test_without_issue_part", "tests/test_document.py::CitationTest::test_without_issue_title", "tests/test_document.py::CitationTest::test_without_link", "tests/test_document.py::CitationTest::test_without_monographic_authors", "tests/test_document.py::CitationTest::test_without_monographic_authors_but_not_a_book_citation", "tests/test_document.py::CitationTest::test_without_publisher", "tests/test_document.py::CitationTest::test_without_publisher_address", "tests/test_document.py::CitationTest::test_without_series", "tests/test_document.py::CitationTest::test_without_sponsor", "tests/test_document.py::CitationTest::test_without_thesis_institution", "tests/test_document.py::CitationTest::test_without_volume" ]
[]
BSD 2-Clause "Simplified" License
455
scieloorg__xylose-118
743d8ca8a32b6e6e82b1ed0fc97f7d240c85cba5
2016-03-01 17:38:49
743d8ca8a32b6e6e82b1ed0fc97f7d240c85cba5
diff --git a/xylose/scielodocument.py b/xylose/scielodocument.py index 0a6fdb0..b177a8d 100644 --- a/xylose/scielodocument.py +++ b/xylose/scielodocument.py @@ -567,18 +567,6 @@ class Journal(object): if len(langs) > 0: return langs - @property - def abstract_languages(self): - """ - This method retrieves a list of possible languages that the journal - publishes the abstracts. - This method deals with the legacy fields (v360). - """ - if 'v360' in self.data: - langs = [i['_'] for i in self.data['v360'] if i['_'] in choices.ISO639_1_to_2.keys()] - if len(langs) > 0: - return langs - @property def collection_acronym(self): """ @@ -717,20 +705,49 @@ class Journal(object): This method retrieves the publisher name of the given article, if it exists. This method deals with the legacy fields (480). + + This method return a list: + + ["Associa\u00e7\u00e3o Brasileira de Limnologia", + "Sociedade Botânica do Brasil"] """ + if 'v480' not in self.data: + return None - return self.data.get('v480', [{'_': None}])[0]['_'] + return [publisher['_'] for publisher in self.data.get('v480') if '_' in publisher and publisher['_'] != ""] @property def publisher_loc(self): """ - This method retrieves the publisher localization of the given article, + This method retrieves the publisher localization of the given journal, if it exists. This method deals with the legacy fields (490). """ + warnings.warn("deprecated, use journal.publisher_city", DeprecationWarning) + return self.data.get('v490', [{'_': None}])[0]['_'] + @property + def publisher_city(self): + """ + This method retrieves the publisher localization of the given journal, + if it exists. + This method deals with the legacy fields (490). + """ + + return self.data.get('v490', [{'_': None}])[0]['_'] + + @property + def publisher_state(self): + """ + This method retrieves the publisher state of the given journal, + if it exists. + This method deals with the legacy fields (320). + """ + + return self.data.get('v320', [{'_': None}])[0]['_'] + @property def title(self): """ @@ -741,6 +758,24 @@ class Journal(object): return self.data.get('v100', [{'_': None}])[0]['_'] + @property + def publisher_country(self): + """ + This method retrieves the publisher country of journal. + This method return a tuple: ('US', u'United States'), otherwise + return None. + """ + if 'v310' not in self.data: + return None + + country_code = self.data.get('v310', [{'_': None}])[0]['_'] + country_name = choices.ISO_3166.get(country_code, None) + + if not country_code or not country_name: + return None + + return (country_code, country_name) + @property def subtitle(self): """ @@ -927,24 +962,6 @@ class Journal(object): return missions - @property - def publisher_country(self): - """ - This method retrieves the publisher country of journal. - This method return a tuple: ('US', u'United States'), otherwise - return None. - """ - if 'v310' not in self.data: - return None - - country_code = self.data.get('v310', [{'_': None}])[0]['_'] - country_name = choices.ISO_3166.get(country_code, None) - - if not country_code or not country_name: - return None - - return (country_code, country_name) - @property def copyrighter(self): """
Adiciona o campo ``publisher_state`` a classe Journal
scieloorg/xylose
diff --git a/tests/test_document.py b/tests/test_document.py index 83a5588..e9cb6fb 100644 --- a/tests/test_document.py +++ b/tests/test_document.py @@ -435,18 +435,6 @@ class JournalTests(unittest.TestCase): self.assertEqual(journal.languages, None) - def test_abstract_languages(self): - journal = Journal(self.fulldoc['title']) - - self.assertEqual(sorted(journal.abstract_languages), [u'en', u'pt']) - - def test_abstract_languages_without_v350(self): - del(self.fulldoc['title']['v360']) - - journal = Journal(self.fulldoc['title']) - - self.assertEqual(journal.abstract_languages, None) - def test_current_without_v51(self): del(self.fulldoc['title']['v51']) @@ -1021,7 +1009,7 @@ class JournalTests(unittest.TestCase): def test_publisher_name(self): journal = self.journal - self.assertEqual(journal.publisher_name, u'Associação Brasileira de Limnologia') + self.assertEqual(journal.publisher_name, [u'Associação Brasileira de Limnologia']) def test_without_publisher_name(self): journal = self.journal @@ -1040,6 +1028,28 @@ class JournalTests(unittest.TestCase): del(journal.data['v490']) self.assertEqual(journal.publisher_loc, None) + def test_publisher_city(self): + journal = self.journal + + self.assertEqual(journal.publisher_city, u'Rio Claro') + + def test_without_publisher_city(self): + journal = self.journal + + del(journal.data['v490']) + self.assertEqual(journal.publisher_city, None) + + def test_publisher_state(self): + journal = self.journal + + self.assertEqual(journal.publisher_state, u'SP') + + def test_without_publisher_state(self): + journal = self.journal + + del(journal.data['v320']) + self.assertEqual(journal.publisher_state, None) + def test_journal_title(self): journal = self.journal @@ -1536,50 +1546,6 @@ class ArticleTests(unittest.TestCase): self.assertEqual(article.original_language(iso_format=None), u'en') - def test_publisher_name(self): - article = self.article - - self.assertEqual(article.journal.publisher_name, u'Associação Brasileira de Limnologia') - - def test_without_publisher_name(self): - article = self.article - - del(article.data['title']['v480']) - self.assertEqual(article.journal.publisher_name, None) - - def test_publisher_loc(self): - article = self.article - - self.assertEqual(article.journal.publisher_loc, u'Rio Claro') - - def test_without_publisher_loc(self): - article = self.article - - del(article.data['title']['v490']) - self.assertEqual(article.journal.publisher_loc, None) - - def test_journal_title(self): - article = self.article - - self.assertEqual(article.journal.title, u'Acta Limnologica Brasiliensia') - - def test_without_journal_title(self): - article = self.article - - del(article.data['title']['v100']) - self.assertEqual(article.journal.title, None) - - def test_journal_acronym(self): - article = self.article - - self.assertEqual(article.journal.acronym, u'alb') - - def test_without_journal_acronym(self): - article = self.article - - del(article.data['title']['v68']) - self.assertEqual(article.journal.acronym, None) - def test_publication_date(self): article = self.article @@ -1620,7 +1586,6 @@ class ArticleTests(unittest.TestCase): article.data['article']['v93'] = [{u'_': u'20120419'}] self.assertEqual(article.creation_date, '2012-04-19') - def test_creation_date_1(self): article = self.article @@ -1769,7 +1734,6 @@ class ArticleTests(unittest.TestCase): self.assertEqual(article.elocation, None) - def test_start_page_loaded_through_xml(self): article = self.article @@ -1877,7 +1841,6 @@ class ArticleTests(unittest.TestCase): self.assertEqual(article.doi, u'10.1590/S2179-975X2012005000004') - def test_doi_clean_1(self): article = self.article @@ -1885,7 +1848,6 @@ class ArticleTests(unittest.TestCase): self.assertEqual(article.doi, u'10.1590/S2179-975X2012005000004') - def test_doi_clean_2(self): article = self.article @@ -2030,7 +1992,6 @@ class ArticleTests(unittest.TestCase): self.assertEqual(article.original_abstract(iso_format=None), None) - def test_without_corporative_authors(self): article = self.article @@ -2689,7 +2650,6 @@ class ArticleTests(unittest.TestCase): self.assertEqual(article.translated_titles(iso_format=None), expected) - def test_translated_abstracts_without_v83(self): article = self.article
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 2, "test_score": 3 }, "num_modified_files": 1 }
1.1
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "pytest" ], "pre_install": [], "python": "3.9", "reqs_path": [ "requirements-test.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work packaging @ file:///croot/packaging_1734472117206/work pluggy @ file:///croot/pluggy_1733169602837/work pytest @ file:///croot/pytest_1738938843180/work tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work -e git+https://github.com/scieloorg/xylose.git@743d8ca8a32b6e6e82b1ed0fc97f7d240c85cba5#egg=xylose
name: xylose channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - exceptiongroup=1.2.0=py39h06a4308_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - packaging=24.2=py39h06a4308_0 - pip=25.0=py39h06a4308_0 - pluggy=1.5.0=py39h06a4308_0 - pytest=8.3.4=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tomli=2.0.1=py39h06a4308_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 prefix: /opt/conda/envs/xylose
[ "tests/test_document.py::JournalTests::test_publisher_city", "tests/test_document.py::JournalTests::test_publisher_name", "tests/test_document.py::JournalTests::test_publisher_state", "tests/test_document.py::JournalTests::test_without_publisher_city", "tests/test_document.py::JournalTests::test_without_publisher_state" ]
[]
[ "tests/test_document.py::ToolsTests::test_get_date_wrong_day", "tests/test_document.py::ToolsTests::test_get_date_wrong_day_month", "tests/test_document.py::ToolsTests::test_get_date_wrong_day_month_not_int", "tests/test_document.py::ToolsTests::test_get_date_wrong_day_not_int", "tests/test_document.py::ToolsTests::test_get_date_wrong_month_not_int", "tests/test_document.py::ToolsTests::test_get_date_year", "tests/test_document.py::ToolsTests::test_get_date_year_day", "tests/test_document.py::ToolsTests::test_get_date_year_month", "tests/test_document.py::ToolsTests::test_get_date_year_month_day", "tests/test_document.py::ToolsTests::test_get_date_year_month_day_31", "tests/test_document.py::ToolsTests::test_get_language_iso639_1_defined", "tests/test_document.py::ToolsTests::test_get_language_iso639_1_undefined", "tests/test_document.py::ToolsTests::test_get_language_iso639_2_defined", "tests/test_document.py::ToolsTests::test_get_language_iso639_2_undefined", "tests/test_document.py::ToolsTests::test_get_language_without_iso_format", "tests/test_document.py::IssueTests::test_collection_acronym", "tests/test_document.py::IssueTests::test_is_ahead", "tests/test_document.py::IssueTests::test_is_ahead_1", "tests/test_document.py::IssueTests::test_issue", "tests/test_document.py::IssueTests::test_issue_label", "tests/test_document.py::IssueTests::test_issue_url", "tests/test_document.py::IssueTests::test_order", "tests/test_document.py::IssueTests::test_processing_date", "tests/test_document.py::IssueTests::test_processing_date_1", "tests/test_document.py::IssueTests::test_publication_date", "tests/test_document.py::IssueTests::test_supplement_number", "tests/test_document.py::IssueTests::test_supplement_volume", "tests/test_document.py::IssueTests::test_type_regular", "tests/test_document.py::IssueTests::test_type_supplement_1", "tests/test_document.py::IssueTests::test_type_supplement_2", "tests/test_document.py::IssueTests::test_volume", "tests/test_document.py::IssueTests::test_without_issue", "tests/test_document.py::IssueTests::test_without_processing_date", "tests/test_document.py::IssueTests::test_without_publication_date", "tests/test_document.py::IssueTests::test_without_suplement_number", "tests/test_document.py::IssueTests::test_without_supplement_volume", "tests/test_document.py::IssueTests::test_without_volume", "tests/test_document.py::JournalTests::test_any_issn_priority_electronic", "tests/test_document.py::JournalTests::test_any_issn_priority_electronic_without_electronic", "tests/test_document.py::JournalTests::test_any_issn_priority_print", "tests/test_document.py::JournalTests::test_any_issn_priority_print_without_print", "tests/test_document.py::JournalTests::test_cnn_code", "tests/test_document.py::JournalTests::test_collection_acronym", "tests/test_document.py::JournalTests::test_creation_date", "tests/test_document.py::JournalTests::test_ctrl_vocabulary", "tests/test_document.py::JournalTests::test_ctrl_vocabulary_out_of_choices", "tests/test_document.py::JournalTests::test_current_status", "tests/test_document.py::JournalTests::test_current_status_lots_of_changes_study_case_1", "tests/test_document.py::JournalTests::test_current_status_some_changes", "tests/test_document.py::JournalTests::test_current_without_v51", "tests/test_document.py::JournalTests::test_editor_address", "tests/test_document.py::JournalTests::test_editor_address_without_data", "tests/test_document.py::JournalTests::test_editor_email", "tests/test_document.py::JournalTests::test_editor_email_without_data", "tests/test_document.py::JournalTests::test_first_number", "tests/test_document.py::JournalTests::test_first_number_1", "tests/test_document.py::JournalTests::test_first_volume", "tests/test_document.py::JournalTests::test_first_volume_1", "tests/test_document.py::JournalTests::test_first_year", "tests/test_document.py::JournalTests::test_first_year_1", "tests/test_document.py::JournalTests::test_in_ahci", "tests/test_document.py::JournalTests::test_in_scie", "tests/test_document.py::JournalTests::test_in_ssci", "tests/test_document.py::JournalTests::test_journal", "tests/test_document.py::JournalTests::test_journal_abbreviated_title", "tests/test_document.py::JournalTests::test_journal_acronym", "tests/test_document.py::JournalTests::test_journal_copyrighter", "tests/test_document.py::JournalTests::test_journal_copyrighter_without_copyright", "tests/test_document.py::JournalTests::test_journal_fulltitle", "tests/test_document.py::JournalTests::test_journal_fulltitle_without_subtitle", "tests/test_document.py::JournalTests::test_journal_fulltitle_without_title", "tests/test_document.py::JournalTests::test_journal_mission", "tests/test_document.py::JournalTests::test_journal_mission_without_language_key", "tests/test_document.py::JournalTests::test_journal_mission_without_mission", "tests/test_document.py::JournalTests::test_journal_mission_without_mission_text", "tests/test_document.py::JournalTests::test_journal_mission_without_mission_text_and_language", "tests/test_document.py::JournalTests::test_journal_other_title_without_other_titles", "tests/test_document.py::JournalTests::test_journal_other_titles", "tests/test_document.py::JournalTests::test_journal_publisher_country", "tests/test_document.py::JournalTests::test_journal_publisher_country_not_findable_code", "tests/test_document.py::JournalTests::test_journal_publisher_country_without_country", "tests/test_document.py::JournalTests::test_journal_sponsors", "tests/test_document.py::JournalTests::test_journal_sponsors_with_empty_items", "tests/test_document.py::JournalTests::test_journal_sponsors_without_sponsors", "tests/test_document.py::JournalTests::test_journal_subtitle", "tests/test_document.py::JournalTests::test_journal_title", "tests/test_document.py::JournalTests::test_journal_title_nlm", "tests/test_document.py::JournalTests::test_journal_url", "tests/test_document.py::JournalTests::test_journal_without_subtitle", "tests/test_document.py::JournalTests::test_languages", "tests/test_document.py::JournalTests::test_languages_without_v350", "tests/test_document.py::JournalTests::test_last_cnn_code_1", "tests/test_document.py::JournalTests::test_last_number", "tests/test_document.py::JournalTests::test_last_number_1", "tests/test_document.py::JournalTests::test_last_volume", "tests/test_document.py::JournalTests::test_last_volume_1", "tests/test_document.py::JournalTests::test_last_year", "tests/test_document.py::JournalTests::test_last_year_1", "tests/test_document.py::JournalTests::test_load_issn_with_v435", "tests/test_document.py::JournalTests::test_load_issn_with_v935_and_v35_ONLINE", "tests/test_document.py::JournalTests::test_load_issn_with_v935_and_v35_PRINT", "tests/test_document.py::JournalTests::test_load_issn_with_v935_equal_v400_and_v35_ONLINE", "tests/test_document.py::JournalTests::test_load_issn_with_v935_equal_v400_and_v35_PRINT", "tests/test_document.py::JournalTests::test_load_issn_with_v935_without_v35", "tests/test_document.py::JournalTests::test_load_issn_without_v935_and_v35_ONLINE", "tests/test_document.py::JournalTests::test_load_issn_without_v935_and_v35_PRINT", "tests/test_document.py::JournalTests::test_load_issn_without_v935_without_v35", "tests/test_document.py::JournalTests::test_periodicity", "tests/test_document.py::JournalTests::test_periodicity_in_months", "tests/test_document.py::JournalTests::test_periodicity_in_months_out_of_choices", "tests/test_document.py::JournalTests::test_periodicity_out_of_choices", "tests/test_document.py::JournalTests::test_permission_id", "tests/test_document.py::JournalTests::test_permission_t0", "tests/test_document.py::JournalTests::test_permission_t1", "tests/test_document.py::JournalTests::test_permission_t2", "tests/test_document.py::JournalTests::test_permission_t3", "tests/test_document.py::JournalTests::test_permission_t4", "tests/test_document.py::JournalTests::test_permission_text", "tests/test_document.py::JournalTests::test_permission_url", "tests/test_document.py::JournalTests::test_permission_without_v540", "tests/test_document.py::JournalTests::test_permission_without_v540_t", "tests/test_document.py::JournalTests::test_plevel", "tests/test_document.py::JournalTests::test_plevel_out_of_choices", "tests/test_document.py::JournalTests::test_publisher_loc", "tests/test_document.py::JournalTests::test_scielo_issn", "tests/test_document.py::JournalTests::test_secs_code", "tests/test_document.py::JournalTests::test_standard", "tests/test_document.py::JournalTests::test_standard_out_of_choices", "tests/test_document.py::JournalTests::test_status", "tests/test_document.py::JournalTests::test_status_lots_of_changes", "tests/test_document.py::JournalTests::test_status_lots_of_changes_study_case_1", "tests/test_document.py::JournalTests::test_status_lots_of_changes_with_reason", "tests/test_document.py::JournalTests::test_status_some_changes", "tests/test_document.py::JournalTests::test_status_without_v51", "tests/test_document.py::JournalTests::test_subject_areas", "tests/test_document.py::JournalTests::test_subject_descriptors", "tests/test_document.py::JournalTests::test_subject_index_coverage", "tests/test_document.py::JournalTests::test_submission_url", "tests/test_document.py::JournalTests::test_update_date", "tests/test_document.py::JournalTests::test_without_ctrl_vocabulary", "tests/test_document.py::JournalTests::test_without_index_coverage", "tests/test_document.py::JournalTests::test_without_journal_abbreviated_title", "tests/test_document.py::JournalTests::test_without_journal_acronym", "tests/test_document.py::JournalTests::test_without_journal_title", "tests/test_document.py::JournalTests::test_without_journal_title_nlm", "tests/test_document.py::JournalTests::test_without_journal_url", "tests/test_document.py::JournalTests::test_without_periodicity", "tests/test_document.py::JournalTests::test_without_periodicity_in_months", "tests/test_document.py::JournalTests::test_without_plevel", "tests/test_document.py::JournalTests::test_without_publisher_loc", "tests/test_document.py::JournalTests::test_without_publisher_name", "tests/test_document.py::JournalTests::test_without_scielo_domain", "tests/test_document.py::JournalTests::test_without_scielo_domain_title_v690", "tests/test_document.py::JournalTests::test_without_secs_code", "tests/test_document.py::JournalTests::test_without_standard", "tests/test_document.py::JournalTests::test_without_subject_areas", "tests/test_document.py::JournalTests::test_without_subject_descriptors", "tests/test_document.py::JournalTests::test_without_wos_citation_indexes", "tests/test_document.py::JournalTests::test_without_wos_subject_areas", "tests/test_document.py::JournalTests::test_wos_citation_indexes", "tests/test_document.py::JournalTests::test_wos_subject_areas", "tests/test_document.py::ArticleTests::test_acceptance_date", "tests/test_document.py::ArticleTests::test_affiliation_just_with_affiliation_name", "tests/test_document.py::ArticleTests::test_affiliation_with_country_iso_3166", "tests/test_document.py::ArticleTests::test_affiliation_without_affiliation_name", "tests/test_document.py::ArticleTests::test_affiliations", "tests/test_document.py::ArticleTests::test_ahead_publication_date", "tests/test_document.py::ArticleTests::test_article", "tests/test_document.py::ArticleTests::test_author_with_two_affiliations", "tests/test_document.py::ArticleTests::test_author_with_two_role", "tests/test_document.py::ArticleTests::test_author_without_affiliations", "tests/test_document.py::ArticleTests::test_author_without_surname_and_given_names", "tests/test_document.py::ArticleTests::test_authors", "tests/test_document.py::ArticleTests::test_collection_acronym", "tests/test_document.py::ArticleTests::test_collection_acronym_priorizing_collection", "tests/test_document.py::ArticleTests::test_collection_acronym_retrieving_v992", "tests/test_document.py::ArticleTests::test_collection_name_brazil", "tests/test_document.py::ArticleTests::test_collection_name_undefined", "tests/test_document.py::ArticleTests::test_corporative_authors", "tests/test_document.py::ArticleTests::test_creation_date", "tests/test_document.py::ArticleTests::test_creation_date_1", "tests/test_document.py::ArticleTests::test_creation_date_2", "tests/test_document.py::ArticleTests::test_data_model_version_html", "tests/test_document.py::ArticleTests::test_data_model_version_html_1", "tests/test_document.py::ArticleTests::test_data_model_version_xml", "tests/test_document.py::ArticleTests::test_document_type", "tests/test_document.py::ArticleTests::test_doi", "tests/test_document.py::ArticleTests::test_doi_clean_1", "tests/test_document.py::ArticleTests::test_doi_clean_2", "tests/test_document.py::ArticleTests::test_doi_v237", "tests/test_document.py::ArticleTests::test_e_location", "tests/test_document.py::ArticleTests::test_end_page_loaded_crazy_legacy_way_1", "tests/test_document.py::ArticleTests::test_end_page_loaded_crazy_legacy_way_2", "tests/test_document.py::ArticleTests::test_end_page_loaded_through_xml", "tests/test_document.py::ArticleTests::test_file_code", "tests/test_document.py::ArticleTests::test_file_code_crazy_slashs_1", "tests/test_document.py::ArticleTests::test_file_code_crazy_slashs_2", "tests/test_document.py::ArticleTests::test_first_author", "tests/test_document.py::ArticleTests::test_first_author_without_author", "tests/test_document.py::ArticleTests::test_fulltexts_field_fulltexts", "tests/test_document.py::ArticleTests::test_fulltexts_without_field_fulltexts", "tests/test_document.py::ArticleTests::test_html_url", "tests/test_document.py::ArticleTests::test_invalid_document_type", "tests/test_document.py::ArticleTests::test_issue_url", "tests/test_document.py::ArticleTests::test_journal_abbreviated_title", "tests/test_document.py::ArticleTests::test_keywords", "tests/test_document.py::ArticleTests::test_keywords_iso639_2", "tests/test_document.py::ArticleTests::test_keywords_with_undefined_language", "tests/test_document.py::ArticleTests::test_keywords_without_subfield_k", "tests/test_document.py::ArticleTests::test_keywords_without_subfield_l", "tests/test_document.py::ArticleTests::test_languages_field_fulltexts", "tests/test_document.py::ArticleTests::test_languages_field_v40", "tests/test_document.py::ArticleTests::test_last_page", "tests/test_document.py::ArticleTests::test_mixed_affiliations_1", "tests/test_document.py::ArticleTests::test_normalized_affiliations", "tests/test_document.py::ArticleTests::test_normalized_affiliations_undefined_ISO_3166_CODE", "tests/test_document.py::ArticleTests::test_normalized_affiliations_without_p", "tests/test_document.py::ArticleTests::test_order", "tests/test_document.py::ArticleTests::test_original_abstract_with_just_one_language_defined", "tests/test_document.py::ArticleTests::test_original_abstract_with_language_defined", "tests/test_document.py::ArticleTests::test_original_abstract_with_language_defined_but_different_of_the_article_original_language", "tests/test_document.py::ArticleTests::test_original_abstract_without_language_defined", "tests/test_document.py::ArticleTests::test_original_html_field_body", "tests/test_document.py::ArticleTests::test_original_language_invalid_iso639_2", "tests/test_document.py::ArticleTests::test_original_language_iso639_2", "tests/test_document.py::ArticleTests::test_original_language_original", "tests/test_document.py::ArticleTests::test_original_section_field_v49", "tests/test_document.py::ArticleTests::test_original_title_subfield_t", "tests/test_document.py::ArticleTests::test_original_title_with_just_one_language_defined", "tests/test_document.py::ArticleTests::test_original_title_with_language_defined", "tests/test_document.py::ArticleTests::test_original_title_with_language_defined_but_different_of_the_article_original_language", "tests/test_document.py::ArticleTests::test_original_title_without_language_defined", "tests/test_document.py::ArticleTests::test_pdf_url", "tests/test_document.py::ArticleTests::test_processing_date", "tests/test_document.py::ArticleTests::test_processing_date_1", "tests/test_document.py::ArticleTests::test_project_name", "tests/test_document.py::ArticleTests::test_project_sponsors", "tests/test_document.py::ArticleTests::test_publication_contract", "tests/test_document.py::ArticleTests::test_publication_date", "tests/test_document.py::ArticleTests::test_publisher_id", "tests/test_document.py::ArticleTests::test_receive_date", "tests/test_document.py::ArticleTests::test_review_date", "tests/test_document.py::ArticleTests::test_secion_code_field_v49", "tests/test_document.py::ArticleTests::test_section_code_nd_field_v49", "tests/test_document.py::ArticleTests::test_section_code_without_field_v49", "tests/test_document.py::ArticleTests::test_section_field_v49", "tests/test_document.py::ArticleTests::test_section_nd_field_v49", "tests/test_document.py::ArticleTests::test_section_without_field_v49", "tests/test_document.py::ArticleTests::test_start_page", "tests/test_document.py::ArticleTests::test_start_page_loaded_crazy_legacy_way_1", "tests/test_document.py::ArticleTests::test_start_page_loaded_crazy_legacy_way_2", "tests/test_document.py::ArticleTests::test_start_page_loaded_through_xml", "tests/test_document.py::ArticleTests::test_subject_areas", "tests/test_document.py::ArticleTests::test_thesis_degree", "tests/test_document.py::ArticleTests::test_thesis_organization", "tests/test_document.py::ArticleTests::test_thesis_organization_and_division", "tests/test_document.py::ArticleTests::test_thesis_organization_without_name", "tests/test_document.py::ArticleTests::test_translated_abstracts", "tests/test_document.py::ArticleTests::test_translated_abstracts_without_v83", "tests/test_document.py::ArticleTests::test_translated_abtracts_iso639_2", "tests/test_document.py::ArticleTests::test_translated_htmls_field_body", "tests/test_document.py::ArticleTests::test_translated_section_field_v49", "tests/test_document.py::ArticleTests::test_translated_titles", "tests/test_document.py::ArticleTests::test_translated_titles_iso639_2", "tests/test_document.py::ArticleTests::test_translated_titles_without_v12", "tests/test_document.py::ArticleTests::test_update_date", "tests/test_document.py::ArticleTests::test_update_date_1", "tests/test_document.py::ArticleTests::test_update_date_2", "tests/test_document.py::ArticleTests::test_update_date_3", "tests/test_document.py::ArticleTests::test_whitwout_acceptance_date", "tests/test_document.py::ArticleTests::test_whitwout_ahead_publication_date", "tests/test_document.py::ArticleTests::test_whitwout_receive_date", "tests/test_document.py::ArticleTests::test_whitwout_review_date", "tests/test_document.py::ArticleTests::test_without_affiliations", "tests/test_document.py::ArticleTests::test_without_authors", "tests/test_document.py::ArticleTests::test_without_citations", "tests/test_document.py::ArticleTests::test_without_collection_acronym", "tests/test_document.py::ArticleTests::test_without_corporative_authors", "tests/test_document.py::ArticleTests::test_without_document_type", "tests/test_document.py::ArticleTests::test_without_doi", "tests/test_document.py::ArticleTests::test_without_e_location", "tests/test_document.py::ArticleTests::test_without_html_url", "tests/test_document.py::ArticleTests::test_without_issue_url", "tests/test_document.py::ArticleTests::test_without_journal_abbreviated_title", "tests/test_document.py::ArticleTests::test_without_keywords", "tests/test_document.py::ArticleTests::test_without_last_page", "tests/test_document.py::ArticleTests::test_without_normalized_affiliations", "tests/test_document.py::ArticleTests::test_without_order", "tests/test_document.py::ArticleTests::test_without_original_abstract", "tests/test_document.py::ArticleTests::test_without_original_title", "tests/test_document.py::ArticleTests::test_without_pages", "tests/test_document.py::ArticleTests::test_without_pdf_url", "tests/test_document.py::ArticleTests::test_without_processing_date", "tests/test_document.py::ArticleTests::test_without_project_name", "tests/test_document.py::ArticleTests::test_without_project_sponsor", "tests/test_document.py::ArticleTests::test_without_publication_contract", "tests/test_document.py::ArticleTests::test_without_publication_date", "tests/test_document.py::ArticleTests::test_without_publisher_id", "tests/test_document.py::ArticleTests::test_without_scielo_domain", "tests/test_document.py::ArticleTests::test_without_scielo_domain_article_v69", "tests/test_document.py::ArticleTests::test_without_scielo_domain_article_v69_and_with_title_v690", "tests/test_document.py::ArticleTests::test_without_scielo_domain_title_v690", "tests/test_document.py::ArticleTests::test_without_start_page", "tests/test_document.py::ArticleTests::test_without_subject_areas", "tests/test_document.py::ArticleTests::test_without_thesis_degree", "tests/test_document.py::ArticleTests::test_without_thesis_organization", "tests/test_document.py::ArticleTests::test_without_wos_citation_indexes", "tests/test_document.py::ArticleTests::test_without_wos_subject_areas", "tests/test_document.py::ArticleTests::test_wos_citation_indexes", "tests/test_document.py::ArticleTests::test_wos_subject_areas", "tests/test_document.py::CitationTest::test_a_link_access_date", "tests/test_document.py::CitationTest::test_analytic_institution_for_a_article_citation", "tests/test_document.py::CitationTest::test_analytic_institution_for_a_book_citation", "tests/test_document.py::CitationTest::test_article_title", "tests/test_document.py::CitationTest::test_article_without_title", "tests/test_document.py::CitationTest::test_authors_article", "tests/test_document.py::CitationTest::test_authors_book", "tests/test_document.py::CitationTest::test_authors_link", "tests/test_document.py::CitationTest::test_authors_thesis", "tests/test_document.py::CitationTest::test_book_chapter_title", "tests/test_document.py::CitationTest::test_book_edition", "tests/test_document.py::CitationTest::test_book_volume", "tests/test_document.py::CitationTest::test_book_without_chapter_title", "tests/test_document.py::CitationTest::test_citation_sample_congress", "tests/test_document.py::CitationTest::test_citation_sample_link", "tests/test_document.py::CitationTest::test_citation_sample_link_without_comment", "tests/test_document.py::CitationTest::test_conference_edition", "tests/test_document.py::CitationTest::test_conference_name", "tests/test_document.py::CitationTest::test_conference_sponsor", "tests/test_document.py::CitationTest::test_conference_without_name", "tests/test_document.py::CitationTest::test_conference_without_sponsor", "tests/test_document.py::CitationTest::test_date", "tests/test_document.py::CitationTest::test_doi", "tests/test_document.py::CitationTest::test_editor", "tests/test_document.py::CitationTest::test_elocation_14", "tests/test_document.py::CitationTest::test_elocation_514", "tests/test_document.py::CitationTest::test_end_page_14", "tests/test_document.py::CitationTest::test_end_page_514", "tests/test_document.py::CitationTest::test_end_page_withdout_data", "tests/test_document.py::CitationTest::test_first_author_article", "tests/test_document.py::CitationTest::test_first_author_book", "tests/test_document.py::CitationTest::test_first_author_link", "tests/test_document.py::CitationTest::test_first_author_thesis", "tests/test_document.py::CitationTest::test_first_author_without_monographic_authors", "tests/test_document.py::CitationTest::test_first_author_without_monographic_authors_but_not_a_book_citation", "tests/test_document.py::CitationTest::test_index_number", "tests/test_document.py::CitationTest::test_institutions_all_fields", "tests/test_document.py::CitationTest::test_institutions_v11", "tests/test_document.py::CitationTest::test_institutions_v17", "tests/test_document.py::CitationTest::test_institutions_v29", "tests/test_document.py::CitationTest::test_institutions_v50", "tests/test_document.py::CitationTest::test_institutions_v58", "tests/test_document.py::CitationTest::test_invalid_edition", "tests/test_document.py::CitationTest::test_isbn", "tests/test_document.py::CitationTest::test_isbn_but_not_a_book", "tests/test_document.py::CitationTest::test_issn", "tests/test_document.py::CitationTest::test_issn_but_not_an_article", "tests/test_document.py::CitationTest::test_issue_part", "tests/test_document.py::CitationTest::test_issue_title", "tests/test_document.py::CitationTest::test_journal_issue", "tests/test_document.py::CitationTest::test_journal_volume", "tests/test_document.py::CitationTest::test_link", "tests/test_document.py::CitationTest::test_link_title", "tests/test_document.py::CitationTest::test_link_without_title", "tests/test_document.py::CitationTest::test_monographic_authors", "tests/test_document.py::CitationTest::test_monographic_first_author", "tests/test_document.py::CitationTest::test_pages_14", "tests/test_document.py::CitationTest::test_pages_514", "tests/test_document.py::CitationTest::test_pages_withdout_data", "tests/test_document.py::CitationTest::test_publication_type_article", "tests/test_document.py::CitationTest::test_publication_type_book", "tests/test_document.py::CitationTest::test_publication_type_conference", "tests/test_document.py::CitationTest::test_publication_type_link", "tests/test_document.py::CitationTest::test_publication_type_thesis", "tests/test_document.py::CitationTest::test_publication_type_undefined", "tests/test_document.py::CitationTest::test_publisher", "tests/test_document.py::CitationTest::test_publisher_address", "tests/test_document.py::CitationTest::test_publisher_address_without_e", "tests/test_document.py::CitationTest::test_series_book", "tests/test_document.py::CitationTest::test_series_but_neither_journal_book_or_conference_citation", "tests/test_document.py::CitationTest::test_series_conference", "tests/test_document.py::CitationTest::test_series_journal", "tests/test_document.py::CitationTest::test_source_book_title", "tests/test_document.py::CitationTest::test_source_journal", "tests/test_document.py::CitationTest::test_source_journal_without_journal_title", "tests/test_document.py::CitationTest::test_sponsor", "tests/test_document.py::CitationTest::test_start_page_14", "tests/test_document.py::CitationTest::test_start_page_514", "tests/test_document.py::CitationTest::test_start_page_withdout_data", "tests/test_document.py::CitationTest::test_thesis_institution", "tests/test_document.py::CitationTest::test_thesis_title", "tests/test_document.py::CitationTest::test_thesis_without_title", "tests/test_document.py::CitationTest::test_title_when_article_citation", "tests/test_document.py::CitationTest::test_title_when_conference_citation", "tests/test_document.py::CitationTest::test_title_when_link_citation", "tests/test_document.py::CitationTest::test_title_when_thesis_citation", "tests/test_document.py::CitationTest::test_with_volume_but_not_a_journal_article_neither_a_book", "tests/test_document.py::CitationTest::test_without_analytic_institution", "tests/test_document.py::CitationTest::test_without_authors", "tests/test_document.py::CitationTest::test_without_date", "tests/test_document.py::CitationTest::test_without_doi", "tests/test_document.py::CitationTest::test_without_edition", "tests/test_document.py::CitationTest::test_without_editor", "tests/test_document.py::CitationTest::test_without_first_author", "tests/test_document.py::CitationTest::test_without_index_number", "tests/test_document.py::CitationTest::test_without_institutions", "tests/test_document.py::CitationTest::test_without_issue", "tests/test_document.py::CitationTest::test_without_issue_part", "tests/test_document.py::CitationTest::test_without_issue_title", "tests/test_document.py::CitationTest::test_without_link", "tests/test_document.py::CitationTest::test_without_monographic_authors", "tests/test_document.py::CitationTest::test_without_monographic_authors_but_not_a_book_citation", "tests/test_document.py::CitationTest::test_without_publisher", "tests/test_document.py::CitationTest::test_without_publisher_address", "tests/test_document.py::CitationTest::test_without_series", "tests/test_document.py::CitationTest::test_without_sponsor", "tests/test_document.py::CitationTest::test_without_thesis_institution", "tests/test_document.py::CitationTest::test_without_volume" ]
[]
BSD 2-Clause "Simplified" License
456
DataDog__datadogpy-118
c8bc9d6cce1caebea0be16366f2cd0c3efb47571
2016-03-01 18:46:54
ef81785f880925467b9eeccf5ebd5b226a05d32f
yannmh: @JohnLZeller can you take a pass on it ? JohnLZeller: :+1: looks good once conflicts are resolved.
diff --git a/datadog/dogstatsd/base.py b/datadog/dogstatsd/base.py index 7e0e11d..2f7725c 100644 --- a/datadog/dogstatsd/base.py +++ b/datadog/dogstatsd/base.py @@ -22,7 +22,7 @@ log = logging.getLogger('dogstatsd') class DogStatsd(object): OK, WARNING, CRITICAL, UNKNOWN = (0, 1, 2, 3) - def __init__(self, host='localhost', port=8125, max_buffer_size=50, + def __init__(self, host='localhost', port=8125, max_buffer_size=50, namespace=None, constant_tags=None, use_ms=False): """ Initialize a DogStatsd object. @@ -39,7 +39,10 @@ class DogStatsd(object): if sending metrics in batch :type max_buffer_size: integer - :param constant_tags: Tags to attach to every metric reported by this client + :param namepace: Namespace to prefix all metric names + :type namepace: string + + :param constant_tags: Tags to attach to all metrics :type constant_tags: list of strings :param use_ms: Report timed values in milliseconds instead of seconds (default False) @@ -58,6 +61,7 @@ class DogStatsd(object): if constant_tags is None: constant_tags = [] self.constant_tags = constant_tags + env_tags + self.namespace = namespace self.use_ms = use_ms def __enter__(self): @@ -222,24 +226,37 @@ class DogStatsd(object): self._report(metric, 's', value, tags, sample_rate) def _report(self, metric, metric_type, value, tags, sample_rate): + """ + Create a metric packet and send it. + + More information about the packets' format: http://docs.datadoghq.com/guides/dogstatsd/ + """ if sample_rate != 1 and random() > sample_rate: return - payload = [metric, ":", value, "|", metric_type] - if sample_rate != 1: - payload.extend(["|@", sample_rate]) + payload = [] - # Append all client level tags to every metric + # Resolve the full tag list if self.constant_tags: if tags: tags = tags + self.constant_tags else: tags = self.constant_tags + # Create/format the metric packet + if self.namespace: + payload.extend([self.namespace, "."]) + payload.extend([metric, ":", value, "|", metric_type]) + + if sample_rate != 1: + payload.extend(["|@", sample_rate]) + if tags: payload.extend(["|#", ",".join(tags)]) encoded = "".join(imap(str, payload)) + + # Send it self._send(encoded) def _send_to_server(self, packet): diff --git a/datadog/threadstats/base.py b/datadog/threadstats/base.py index 89bbfc9..40aed00 100644 --- a/datadog/threadstats/base.py +++ b/datadog/threadstats/base.py @@ -23,7 +23,7 @@ log = logging.getLogger('dd.datadogpy') class ThreadStats(object): - def __init__(self, constant_tags=None): + def __init__(self, namespace="", constant_tags=None): """ Initialize a dogstats object. @@ -33,13 +33,16 @@ class ThreadStats(object): :envvar DATADOG_TAGS: Tags to attach to every metric reported by ThreadStats client :type constant_tags: list of strings """ - # Don't collect until start is called. - self._disabled = True + # Parameters + self.namespace = namespace env_tags = [tag for tag in os.environ.get('DATADOG_TAGS', '').split(',') if tag] if constant_tags is None: constant_tags = [] self.constant_tags = constant_tags + env_tags + # State + self._disabled = True + def start(self, flush_interval=10, roll_up_interval=10, device=None, flush_in_thread=True, flush_in_greenlet=False, disabled=False): """ @@ -307,23 +310,31 @@ class ThreadStats(object): self._is_flush_in_progress = False def _get_aggregate_metrics(self, flush_time=None): + """ + Get, format and return the rolled up metrics from the aggregator. + """ # Get rolled up metrics rolled_up_metrics = self._metric_aggregator.flush(flush_time) # FIXME: emit a dictionary from the aggregator metrics = [] for timestamp, value, name, tags, host in rolled_up_metrics: - # Append all client level tags to every metric metric_tags = tags + metric_name = name + # Append all client level tags to every metric if self.constant_tags: if tags: metric_tags = tags + self.constant_tags else: metric_tags = self.constant_tags + # Resolve the metric name + if self.namespace: + metric_name = self.namespace + "." + name + metric = { - 'metric': name, + 'metric': metric_name, 'points': [[timestamp, value]], 'type': MetricType.Gauge, 'host': host,
Prefix support Please, add prefix support like Java and C#: https://github.com/DataDog/dogstatsd-csharp-client/blob/master/src/StatsdClient/StatsdConfig.cs#L8 https://github.com/indeedeng/java-dogstatsd-client/blob/master/src/main/java/com/timgroup/statsd/NonBlockingStatsDClient.java#L120
DataDog/datadogpy
diff --git a/tests/unit/dogstatsd/test_statsd.py b/tests/unit/dogstatsd/test_statsd.py index dacf09d..8473b87 100644 --- a/tests/unit/dogstatsd/test_statsd.py +++ b/tests/unit/dogstatsd/test_statsd.py @@ -149,6 +149,14 @@ class TestDogStatsd(object): u'_sc|my_check.name|{0}|d:{1}|h:i-abcd1234|#key1:val1,key2:val2|m:{2}' .format(self.statsd.WARNING, now, u"♬ †øU \\n†øU ¥ºu|m\: T0µ ♪"), self.recv()) + def test_metric_namespace(self): + """ + Namespace prefixes all metric names. + """ + self.statsd.namespace = "foo" + self.statsd.gauge('gauge', 123.4) + t.assert_equal('foo.gauge:123.4|g', self.recv()) + # Test Client level contant tags def test_gauge_constant_tags(self): self.statsd.constant_tags=['bar:baz', 'foo'] diff --git a/tests/unit/threadstats/test_threadstats.py b/tests/unit/threadstats/test_threadstats.py index eaa0658..6f2e90b 100644 --- a/tests/unit/threadstats/test_threadstats.py +++ b/tests/unit/threadstats/test_threadstats.py @@ -2,18 +2,19 @@ Tests for the ThreadStats class, using HTTP mode """ -import os +# stdlib import logging +import os import random import time -import threading +import unittest +# 3p +from mock import patch import nose.tools as nt -from nose.plugins.skip import SkipTest +# datadog from datadog import ThreadStats -from datadog.api.exceptions import ApiNotInitialized - from tests.util.contextmanagers import preserve_environment_variable @@ -22,12 +23,10 @@ logger = logging.getLogger('dd.datadogpy') logger.setLevel(logging.ERROR) -# -# Test fixtures. -# - class MemoryReporter(object): - """ A reporting class that reports to memory for testing. """ + """ + A reporting class that reports to memory for testing. + """ def __init__(self): self.metrics = [] @@ -40,14 +39,20 @@ class MemoryReporter(object): self.events += events -# -# Unit tests. -# -class TestUnitThreadStats(object): - """ Unit tests for the dog stats api. """ +class TestUnitThreadStats(unittest.TestCase): + """ + Unit tests for ThreadStats. + """ + def setUp(self): + """ + Set a mocked reporter. + """ + self.reporter = MemoryReporter() def sort_metrics(self, metrics): - """ Sort metrics by timestamp of first point and then name """ + """ + Sort metrics by timestamp of first point and then name. + """ def sort(metric): tags = metric['tags'] or [] host = metric['host'] or '' @@ -55,6 +60,39 @@ class TestUnitThreadStats(object): metric['points'][0][1]) return sorted(metrics, key=sort) + def assertMetric(self, name=None, value=None, tags=None, count=None): + """ + Helper, to make assertions on metrics. + """ + matching_metrics = [] + + for metric in self.reporter.metrics: + if name and name != metric['metric']: + continue + if value and value != metric['points'][0][1]: + continue + if tags and tags != metric['tags']: + continue + matching_metrics.append(metric) + + if count: + self.assertEquals( + len(matching_metrics), count, + u"Candidate size assertion failure: expected {expected}, found {count}. " + u"Metric name={name}, value={value}, tags={tags}.".format( + expected=count, count=len(matching_metrics), + name=name, value=value, tags=tags + ) + ) + else: + self.assertTrue( + len(matching_metrics) > 0, + u"Candidate size assertion failure: no matching metric found. " + u"Metric name={name}, value={value}, tags={tags}.".format( + name=name, value=value, tags=tags + ) + ) + def test_timed_decorator(self): dog = ThreadStats() dog.start(roll_up_interval=1, flush_in_thread=False) @@ -393,51 +431,75 @@ class TestUnitThreadStats(object): nt.assert_equal(g3['points'][0][1], 20) def test_constant_tags(self): - dog = ThreadStats(constant_tags=['type:constant']) - dog.start(roll_up_interval=10, flush_in_thread=False) - reporter = dog.reporter = MemoryReporter() + """ + Constant tags are attached to all metrics. + """ + dog = ThreadStats(constant_tags=["type:constant"]) + dog.start(roll_up_interval=1, flush_in_thread=False) + dog.reporter = self.reporter # Post the same metric with different tags. - dog.gauge('gauge', 10, timestamp=100.0) - dog.gauge('gauge', 15, timestamp=100.0, tags=['env:production', 'db']) - dog.gauge('gauge', 20, timestamp=100.0, tags=['env:staging']) + dog.gauge("gauge", 10, timestamp=100.0) + dog.gauge("gauge", 15, timestamp=100.0, tags=["env:production", 'db']) + dog.gauge("gauge", 20, timestamp=100.0, tags=["env:staging"]) - dog.increment('counter', timestamp=100.0) - dog.increment('counter', timestamp=100.0, tags=['env:production', 'db']) - dog.increment('counter', timestamp=100.0, tags=['env:staging']) + dog.increment("counter", timestamp=100.0) + dog.increment("counter", timestamp=100.0, tags=["env:production", 'db']) + dog.increment("counter", timestamp=100.0, tags=["env:staging"]) dog.flush(200.0) - metrics = self.sort_metrics(reporter.metrics) - nt.assert_equal(len(metrics), 6) + # Assertions on all metrics + self.assertMetric(count=6) - [c1, c2, c3, g1, g2, g3] = metrics - (nt.assert_equal(c['metric'], 'counter') for c in [c1, c2, c3]) - nt.assert_equal(c1['tags'], ['env:production', 'db', 'type:constant']) - nt.assert_equal(c1['points'][0][1], 1) - nt.assert_equal(c2['tags'], ['env:staging', 'type:constant']) - nt.assert_equal(c2['points'][0][1], 1) - nt.assert_equal(c3['tags'], ['type:constant']) - nt.assert_equal(c3['points'][0][1], 1) + # Assertions on gauges + self.assertMetric(name='gauge', value=10, tags=["type:constant"], count=1) + self.assertMetric(name="gauge", value=15, tags=["env:production", "db", "type:constant"], count=1) # noqa + self.assertMetric(name="gauge", value=20, tags=["env:staging", "type:constant"], count=1) - (nt.assert_equal(c['metric'], 'gauge') for c in [g1, g2, g3]) - nt.assert_equal(g1['tags'], ['env:production', 'db', 'type:constant']) - nt.assert_equal(g1['points'][0][1], 15) - nt.assert_equal(g2['tags'], ['env:staging', 'type:constant']) - nt.assert_equal(g2['points'][0][1], 20) - nt.assert_equal(g3['tags'], ['type:constant']) - nt.assert_equal(g3['points'][0][1], 10) + # Assertions on counters + self.assertMetric(name="counter", value=1, tags=["type:constant"], count=1) + self.assertMetric(name="counter", value=1, tags=["env:production", "db", "type:constant"], count=1) # noqa + self.assertMetric(name="counter", value=1, tags=["env:staging", "type:constant"], count=1) # Ensure histograms work as well. @dog.timed('timed', tags=['version:1']) - def test(): + def do_nothing(): + """ + A function that does nothing, but being timed. + """ pass - test() + + with patch("datadog.threadstats.base.time", return_value=300): + do_nothing() + dog.histogram('timed', 20, timestamp=300.0, tags=['db', 'version:2']) - reporter.metrics = [] - dog.flush(400) - for metric in reporter.metrics: - assert metric['tags'] # this is enough + + self.reporter.metrics = [] + dog.flush(400.0) + + # Histograms, and related metric types, produce 8 different metrics + self.assertMetric(tags=["version:1", "type:constant"], count=8) + self.assertMetric(tags=["db", "version:2", "type:constant"], count=8) + + def test_metric_namespace(self): + """ + Namespace prefixes all metric names. + """ + # Set up ThreadStats with a namespace + dog = ThreadStats(namespace="foo") + dog.start(roll_up_interval=1, flush_in_thread=False) + dog.reporter = self.reporter + + # Send a few metrics + dog.gauge("gauge", 20, timestamp=100.0) + dog.increment("counter", timestamp=100.0) + dog.flush(200.0) + + # Metric names are prefixed with the namespace + self.assertMetric(count=2) + self.assertMetric(name="foo.gauge", count=1) + self.assertMetric(name="foo.counter", count=1) def test_host(self): dog = ThreadStats()
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_hyperlinks", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 3, "test_score": 2 }, "num_modified_files": 2 }
0.10
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "nose", "six", "mock", "pytest" ], "pre_install": null, "python": "3.4", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work certifi==2021.5.30 charset-normalizer==2.0.12 -e git+https://github.com/DataDog/datadogpy.git@c8bc9d6cce1caebea0be16366f2cd0c3efb47571#egg=datadog decorator==5.1.1 idna==3.10 importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work mock==5.2.0 more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work nose==1.3.7 packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work py @ file:///opt/conda/conda-bld/py_1644396412707/work pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work pytest==6.2.4 requests==2.27.1 simplejson==3.20.1 six==1.17.0 toml @ file:///tmp/build/80754af9/toml_1616166611790/work typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work urllib3==1.26.20 zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
name: datadogpy channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - attrs=21.4.0=pyhd3eb1b0_0 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - importlib-metadata=4.8.1=py36h06a4308_0 - importlib_metadata=4.8.1=hd3eb1b0_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - more-itertools=8.12.0=pyhd3eb1b0_0 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - packaging=21.3=pyhd3eb1b0_0 - pip=21.2.2=py36h06a4308_0 - pluggy=0.13.1=py36h06a4308_0 - py=1.11.0=pyhd3eb1b0_0 - pyparsing=3.0.4=pyhd3eb1b0_0 - pytest=6.2.4=py36h06a4308_2 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - toml=0.10.2=pyhd3eb1b0_0 - typing_extensions=4.1.1=pyh06a4308_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zipp=3.6.0=pyhd3eb1b0_0 - zlib=1.2.13=h5eee18b_1 - pip: - charset-normalizer==2.0.12 - decorator==5.1.1 - idna==3.10 - mock==5.2.0 - nose==1.3.7 - requests==2.27.1 - simplejson==3.20.1 - six==1.17.0 - urllib3==1.26.20 prefix: /opt/conda/envs/datadogpy
[ "tests/unit/threadstats/test_threadstats.py::TestUnitThreadStats::test_metric_namespace" ]
[ "tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_set", "tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_gauge", "tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_counter", "tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_histogram", "tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_tagged_gauge", "tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_tagged_counter", "tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_tagged_histogram", "tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_sample_rate", "tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_tags_and_samples", "tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_timing", "tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_event", "tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_event_constant_tags", "tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_service_check", "tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_metric_namespace", "tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_gauge_constant_tags", "tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_counter_constant_tag_with_metric_level_tags", "tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_gauge_constant_tags_with_metric_level_tags_twice", "tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_socket_error", "tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_timed", "tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_timed_in_ms", "tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_timed_no_metric", "tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_timed_context", "tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_timed_context_exception", "tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_timed_context_no_metric_exception", "tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_batched" ]
[ "tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_initialization", "tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_context_manager", "tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_batched_buffer_autoflush", "tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_module_level_instance", "tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_instantiating_does_not_connect", "tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_accessing_socket_opens_socket", "tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_accessing_socket_multiple_times_returns_same_socket", "tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_tags_from_environment", "tests/unit/dogstatsd/test_statsd.py::TestDogStatsd::test_tags_from_environment_and_constant", "tests/unit/threadstats/test_threadstats.py::TestUnitThreadStats::test_constant_tags", "tests/unit/threadstats/test_threadstats.py::TestUnitThreadStats::test_counter", "tests/unit/threadstats/test_threadstats.py::TestUnitThreadStats::test_custom_host_and_device", "tests/unit/threadstats/test_threadstats.py::TestUnitThreadStats::test_default_host_and_device", "tests/unit/threadstats/test_threadstats.py::TestUnitThreadStats::test_disabled_mode", "tests/unit/threadstats/test_threadstats.py::TestUnitThreadStats::test_event", "tests/unit/threadstats/test_threadstats.py::TestUnitThreadStats::test_event_constant_tags", "tests/unit/threadstats/test_threadstats.py::TestUnitThreadStats::test_gauge", "tests/unit/threadstats/test_threadstats.py::TestUnitThreadStats::test_histogram", "tests/unit/threadstats/test_threadstats.py::TestUnitThreadStats::test_histogram_percentiles", "tests/unit/threadstats/test_threadstats.py::TestUnitThreadStats::test_host", "tests/unit/threadstats/test_threadstats.py::TestUnitThreadStats::test_stop", "tests/unit/threadstats/test_threadstats.py::TestUnitThreadStats::test_tags", "tests/unit/threadstats/test_threadstats.py::TestUnitThreadStats::test_tags_from_environment", "tests/unit/threadstats/test_threadstats.py::TestUnitThreadStats::test_tags_from_environment_and_constant", "tests/unit/threadstats/test_threadstats.py::TestUnitThreadStats::test_timed_decorator" ]
[]
BSD-3-Clause
457
falconry__falcon-723
ac03888ad750598175fa1591ef11ec8ae31b0dc0
2016-03-03 01:33:46
b78ffaac7c412d3b3d6cd3c70dd05024d79d2cce
kgriffs: Routing tree from the test: https://github.com/falconry/falcon/pull/723 jmvrbanac: Interesting. Seems to look good to me. kgriffs: Sorry, I had the wrong link for the routing tree. Updated. jmvrbanac: Outside of of @fxfitz's suggestion, :+1:
diff --git a/falcon/routing/compiled.py b/falcon/routing/compiled.py index 5f8f951..9177edb 100644 --- a/falcon/routing/compiled.py +++ b/falcon/routing/compiled.py @@ -95,7 +95,7 @@ class CompiledRouter(object): else: return None, None, None - def _compile_tree(self, nodes, indent=1, level=0): + def _compile_tree(self, nodes, indent=1, level=0, fast_return=True): """Generates Python code for a routing tree or subtree.""" def line(text, indent_offset=0): @@ -119,6 +119,18 @@ class CompiledRouter(object): nodes, key=lambda node: node.is_var + (node.is_var and not node.is_complex)) + # NOTE(kgriffs): Down to this branch in the tree, we can do a + # fast 'return None'. See if the nodes at this branch are + # all still simple, meaning there is only one possible path. + if fast_return: + if len(nodes) > 1: + # NOTE(kgriffs): There's the possibility of more than + # one path. + var_nodes = [node for node in nodes if node.is_var] + found_var_nodes = bool(var_nodes) + + fast_return = not found_var_nodes + for node in nodes: if node.is_var: if node.is_complex: @@ -162,10 +174,11 @@ class CompiledRouter(object): resource_idx = len(self._return_values) self._return_values.append(node) - self._compile_tree(node.children, indent, level + 1) + self._compile_tree(node.children, indent, level + 1, fast_return) if node.resource is None: - line('return None') + if fast_return: + line('return None') else: # NOTE(kgriffs): Make sure that we have consumed all of # the segments for the requested route; otherwise we could @@ -173,11 +186,12 @@ class CompiledRouter(object): line('if path_len == %d:' % (level + 1)) line('return return_values[%d]' % resource_idx, 1) - line('return None') + if fast_return: + line('return None') indent = level_indent - if not found_simple: + if not found_simple and fast_return: line('return None') def _compile(self):
Path segment in one route's URI template masks the field expression in another route The following test demonstrates the issue. The assertion fails since the resulting status code is 404, rather than 200. "/v2.0/thing" should route to `/{version}/thing` instead of `/v2.0`. ```py def test_string_vs_var(self): self.api.add_route('/v2.0', self.resource) self.simulate_request('/v2.0') self.api.add_route('/{version}/thing', testing.TestResource()) self.simulate_request('/v2.0/thing') self.assertEqual(self.srmock.status, falcon.HTTP_200) ``` ``` /{version}/foo/bar /v1.0 ```
falconry/falcon
diff --git a/tests/test_default_router.py b/tests/test_default_router.py index 9dc5ecd..84af78f 100644 --- a/tests/test_default_router.py +++ b/tests/test_default_router.py @@ -1,5 +1,6 @@ import ddt +from falcon.routing import DefaultRouter import falcon.testing as testing @@ -14,66 +15,115 @@ class ResourceWithId(object): resp.body = self.resource_id -def setup_routes(router_interface): - router_interface.add_route( - '/repos', {}, ResourceWithId(1)) - router_interface.add_route( - '/repos/{org}', {}, ResourceWithId(2)) - router_interface.add_route( - '/repos/{org}/{repo}', {}, ResourceWithId(3)) - router_interface.add_route( - '/repos/{org}/{repo}/commits', {}, ResourceWithId(4)) - router_interface.add_route( - '/repos/{org}/{repo}/compare/{usr0}:{branch0}...{usr1}:{branch1}', - {}, ResourceWithId(5)) - router_interface.add_route( - '/teams/{id}', {}, ResourceWithId(6)) - router_interface.add_route( - '/teams/{id}/members', {}, ResourceWithId(7)) - router_interface.add_route( - '/user/memberships', {}, ResourceWithId(8)) - router_interface.add_route( - '/emojis', {}, ResourceWithId(9)) - router_interface.add_route( - '/repos/{org}/{repo}/compare/{usr0}:{branch0}...{usr1}:{branch1}/full', - {}, ResourceWithId(10)) - router_interface.add_route( - '/repos/{org}/{repo}/compare/all', {}, ResourceWithId(11)) - - # NOTE(kgriffs): The ordering of these calls is significant; we - # need to test that the {id} field does not match the other routes, - # regardless of the order they are added. - router_interface.add_route( - '/emojis/signs/0', {}, ResourceWithId(12)) - router_interface.add_route( - '/emojis/signs/{id}', {}, ResourceWithId(13)) - router_interface.add_route( - '/emojis/signs/42', {}, ResourceWithId(14)) - router_interface.add_route( - '/emojis/signs/42/small', {}, ResourceWithId(14.1)) - router_interface.add_route( - '/emojis/signs/78/small', {}, ResourceWithId(14.1)) - - router_interface.add_route( - '/repos/{org}/{repo}/compare/{usr0}:{branch0}...{usr1}:{branch1}/part', - {}, ResourceWithId(15)) - router_interface.add_route( - '/repos/{org}/{repo}/compare/{usr0}:{branch0}', - {}, ResourceWithId(16)) - router_interface.add_route( - '/repos/{org}/{repo}/compare/{usr0}:{branch0}/full', - {}, ResourceWithId(17)) - - router_interface.add_route( - '/gists/{id}/raw', {}, ResourceWithId(18)) +class TestRegressionCases(testing.TestBase): + """Test specific repros reported by users of the framework.""" + + def before(self): + self.router = DefaultRouter() + + def test_versioned_url(self): + self.router.add_route('/{version}/messages', {}, ResourceWithId(2)) + + resource, method_map, params = self.router.find('/v2/messages') + self.assertEqual(resource.resource_id, 2) + + self.router.add_route('/v2', {}, ResourceWithId(1)) + + resource, method_map, params = self.router.find('/v2') + self.assertEqual(resource.resource_id, 1) + + resource, method_map, params = self.router.find('/v2/messages') + self.assertEqual(resource.resource_id, 2) + + resource, method_map, params = self.router.find('/v1/messages') + self.assertEqual(resource.resource_id, 2) + + resource, method_map, params = self.router.find('/v1') + self.assertIs(resource, None) + + def test_recipes(self): + self.router.add_route( + '/recipes/{activity}/{type_id}', {}, ResourceWithId(1)) + self.router.add_route( + '/recipes/baking', {}, ResourceWithId(2)) + + resource, method_map, params = self.router.find('/recipes/baking/4242') + self.assertEqual(resource.resource_id, 1) + + resource, method_map, params = self.router.find('/recipes/baking') + self.assertEqual(resource.resource_id, 2) + + resource, method_map, params = self.router.find('/recipes/grilling') + self.assertIs(resource, None) @ddt.ddt -class TestStandaloneRouter(testing.TestBase): +class TestComplexRouting(testing.TestBase): def before(self): - from falcon.routing import DefaultRouter self.router = DefaultRouter() - setup_routes(self.router) + + self.router.add_route( + '/repos', {}, ResourceWithId(1)) + self.router.add_route( + '/repos/{org}', {}, ResourceWithId(2)) + self.router.add_route( + '/repos/{org}/{repo}', {}, ResourceWithId(3)) + self.router.add_route( + '/repos/{org}/{repo}/commits', {}, ResourceWithId(4)) + self.router.add_route( + '/repos/{org}/{repo}/compare/{usr0}:{branch0}...{usr1}:{branch1}', + {}, ResourceWithId(5)) + + self.router.add_route( + '/teams/{id}', {}, ResourceWithId(6)) + self.router.add_route( + '/teams/{id}/members', {}, ResourceWithId(7)) + + self.router.add_route( + '/teams/default', {}, ResourceWithId(19)) + self.router.add_route( + '/teams/default/members/thing', {}, ResourceWithId(19)) + + self.router.add_route( + '/user/memberships', {}, ResourceWithId(8)) + self.router.add_route( + '/emojis', {}, ResourceWithId(9)) + self.router.add_route( + '/repos/{org}/{repo}/compare/{usr0}:{branch0}...{usr1}:{branch1}/full', + {}, ResourceWithId(10)) + self.router.add_route( + '/repos/{org}/{repo}/compare/all', {}, ResourceWithId(11)) + + # NOTE(kgriffs): The ordering of these calls is significant; we + # need to test that the {id} field does not match the other routes, + # regardless of the order they are added. + self.router.add_route( + '/emojis/signs/0', {}, ResourceWithId(12)) + self.router.add_route( + '/emojis/signs/{id}', {}, ResourceWithId(13)) + self.router.add_route( + '/emojis/signs/42', {}, ResourceWithId(14)) + self.router.add_route( + '/emojis/signs/42/small', {}, ResourceWithId(14.1)) + self.router.add_route( + '/emojis/signs/78/small', {}, ResourceWithId(22)) + + self.router.add_route( + '/repos/{org}/{repo}/compare/{usr0}:{branch0}...{usr1}:{branch1}/part', + {}, ResourceWithId(15)) + self.router.add_route( + '/repos/{org}/{repo}/compare/{usr0}:{branch0}', + {}, ResourceWithId(16)) + self.router.add_route( + '/repos/{org}/{repo}/compare/{usr0}:{branch0}/full', + {}, ResourceWithId(17)) + + self.router.add_route( + '/gists/{id}/{representation}', {}, ResourceWithId(21)) + self.router.add_route( + '/gists/{id}/raw', {}, ResourceWithId(18)) + self.router.add_route( + '/gists/first', {}, ResourceWithId(20)) @ddt.data( '/teams/{collision}', # simple vs simple @@ -103,20 +153,6 @@ class TestStandaloneRouter(testing.TestBase): resource, method_map, params = self.router.find('/emojis/signs/0') self.assertEqual(resource.resource_id, -1) - def test_missing(self): - resource, method_map, params = self.router.find('/this/does/not/exist') - self.assertIs(resource, None) - - resource, method_map, params = self.router.find('/user/bogus') - self.assertIs(resource, None) - - resource, method_map, params = self.router.find('/teams/1234/bogus') - self.assertIs(resource, None) - - resource, method_map, params = self.router.find( - '/repos/racker/falcon/compare/johndoe:master...janedoe:dev/bogus') - self.assertIs(resource, None) - def test_literal_segment(self): resource, method_map, params = self.router.find('/emojis/signs/0') self.assertEqual(resource.resource_id, 12) @@ -167,6 +203,54 @@ class TestStandaloneRouter(testing.TestBase): resource, method_map, params = self.router.find('/gists/42/raw') self.assertEqual(params, {'id': '42'}) + @ddt.data( + ('/teams/default', 19), + ('/teams/default/members', 7), + ('/teams/foo', 6), + ('/teams/foo/members', 7), + ('/gists/first', 20), + ('/gists/first/raw', 18), + ('/gists/first/pdf', 21), + ('/gists/1776/pdf', 21), + ('/emojis/signs/78', 13), + ('/emojis/signs/78/small', 22), + ) + @ddt.unpack + def test_literal_vs_variable(self, path, expected_id): + resource, method_map, params = self.router.find(path) + self.assertEqual(resource.resource_id, expected_id) + + @ddt.data( + # Misc. + '/this/does/not/exist', + '/user/bogus', + '/repos/racker/falcon/compare/johndoe:master...janedoe:dev/bogus', + + # Literal vs variable (teams) + '/teams', + '/teams/42/members/undefined', + '/teams/42/undefined', + '/teams/42/undefined/segments', + '/teams/default/members/undefined', + '/teams/default/members/thing/undefined', + '/teams/default/members/thing/undefined/segments', + '/teams/default/undefined', + '/teams/default/undefined/segments', + + # Literal vs variable (emojis) + '/emojis/signs', + '/emojis/signs/0/small', + '/emojis/signs/0/undefined', + '/emojis/signs/0/undefined/segments', + '/emojis/signs/20/small', + '/emojis/signs/20/undefined', + '/emojis/signs/42/undefined', + '/emojis/signs/78/undefined', + ) + def test_not_found(self, path): + resource, method_map, params = self.router.find(path) + self.assertIs(resource, None) + def test_subsegment_not_found(self): resource, method_map, params = self.router.find('/emojis/signs/0/x') self.assertIs(resource, None) @@ -195,7 +279,7 @@ class TestStandaloneRouter(testing.TestBase): 'usr0': 'johndoe', 'branch0': 'master', 'usr1': 'janedoe', - 'branch1': 'dev' + 'branch1': 'dev', }) @ddt.data(('', 16), ('/full', 17))
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 0, "test_score": 0 }, "num_modified_files": 1 }
0.3
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "nose", "tox", "coveralls", "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.5", "reqs_path": [ "tools/test-requires" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==22.2.0 certifi==2021.5.30 charset-normalizer==2.0.12 coverage==6.2 coveralls==3.3.1 ddt==1.7.2 distlib==0.3.9 docopt==0.6.2 -e git+https://github.com/falconry/falcon.git@ac03888ad750598175fa1591ef11ec8ae31b0dc0#egg=falcon filelock==3.4.1 fixtures==4.0.1 idna==3.10 importlib-metadata==4.8.3 importlib-resources==5.4.0 iniconfig==1.1.1 nose==1.3.7 packaging==21.3 pbr==6.1.1 platformdirs==2.4.0 pluggy==1.0.0 py==1.11.0 pyparsing==3.1.4 pytest==7.0.1 python-mimeparse==1.6.0 PyYAML==6.0.1 requests==2.27.1 six==1.17.0 testtools==2.6.0 toml==0.10.2 tomli==1.2.3 tox==3.28.0 typing_extensions==4.1.1 urllib3==1.26.20 virtualenv==20.17.1 zipp==3.6.0
name: falcon channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==22.2.0 - charset-normalizer==2.0.12 - coverage==6.2 - coveralls==3.3.1 - ddt==1.7.2 - distlib==0.3.9 - docopt==0.6.2 - filelock==3.4.1 - fixtures==4.0.1 - idna==3.10 - importlib-metadata==4.8.3 - importlib-resources==5.4.0 - iniconfig==1.1.1 - nose==1.3.7 - packaging==21.3 - pbr==6.1.1 - platformdirs==2.4.0 - pluggy==1.0.0 - py==1.11.0 - pyparsing==3.1.4 - pytest==7.0.1 - python-mimeparse==1.6.0 - pyyaml==6.0.1 - requests==2.27.1 - six==1.17.0 - testtools==2.6.0 - toml==0.10.2 - tomli==1.2.3 - tox==3.28.0 - typing-extensions==4.1.1 - urllib3==1.26.20 - virtualenv==20.17.1 - zipp==3.6.0 prefix: /opt/conda/envs/falcon
[ "tests/test_default_router.py::TestRegressionCases::test_recipes", "tests/test_default_router.py::TestRegressionCases::test_versioned_url", "tests/test_default_router.py::TestComplexRouting::test_literal_vs_variable_02____teams_default_members___7_", "tests/test_default_router.py::TestComplexRouting::test_literal_vs_variable_06____gists_first_raw___18_", "tests/test_default_router.py::TestComplexRouting::test_literal_vs_variable_07____gists_first_pdf___21_", "tests/test_default_router.py::TestComplexRouting::test_literal_vs_variable_09____emojis_signs_78___13_" ]
[]
[ "tests/test_default_router.py::TestComplexRouting::test_collision_1__teams__collision_", "tests/test_default_router.py::TestComplexRouting::test_collision_2__emojis_signs__id_too_", "tests/test_default_router.py::TestComplexRouting::test_collision_3__repos__org___repo__compare__complex___vs_____complex2___collision_", "tests/test_default_router.py::TestComplexRouting::test_complex_1______5_", "tests/test_default_router.py::TestComplexRouting::test_complex_2____full___10_", "tests/test_default_router.py::TestComplexRouting::test_complex_3____part___15_", "tests/test_default_router.py::TestComplexRouting::test_complex_alt_1______16_", "tests/test_default_router.py::TestComplexRouting::test_complex_alt_2____full___17_", "tests/test_default_router.py::TestComplexRouting::test_dead_segment_1__teams", "tests/test_default_router.py::TestComplexRouting::test_dead_segment_2__emojis_signs", "tests/test_default_router.py::TestComplexRouting::test_dead_segment_3__gists", "tests/test_default_router.py::TestComplexRouting::test_dead_segment_4__gists_42", "tests/test_default_router.py::TestComplexRouting::test_dump", "tests/test_default_router.py::TestComplexRouting::test_literal", "tests/test_default_router.py::TestComplexRouting::test_literal_segment", "tests/test_default_router.py::TestComplexRouting::test_literal_vs_variable_01____teams_default___19_", "tests/test_default_router.py::TestComplexRouting::test_literal_vs_variable_03____teams_foo___6_", "tests/test_default_router.py::TestComplexRouting::test_literal_vs_variable_04____teams_foo_members___7_", "tests/test_default_router.py::TestComplexRouting::test_literal_vs_variable_05____gists_first___20_", "tests/test_default_router.py::TestComplexRouting::test_literal_vs_variable_08____gists_1776_pdf___21_", "tests/test_default_router.py::TestComplexRouting::test_literal_vs_variable_10____emojis_signs_78_small___22_", "tests/test_default_router.py::TestComplexRouting::test_malformed_pattern", "tests/test_default_router.py::TestComplexRouting::test_multivar", "tests/test_default_router.py::TestComplexRouting::test_non_collision_1__repos__org___repo__compare__simple_vs_complex_", "tests/test_default_router.py::TestComplexRouting::test_non_collision_2__repos__complex___vs___simple_", "tests/test_default_router.py::TestComplexRouting::test_non_collision_3__repos__org___repo__compare__complex___vs_____complex2__full", "tests/test_default_router.py::TestComplexRouting::test_not_found_01__this_does_not_exist", "tests/test_default_router.py::TestComplexRouting::test_not_found_02__user_bogus", "tests/test_default_router.py::TestComplexRouting::test_not_found_03__repos_racker_falcon_compare_johndoe_master___janedoe_dev_bogus", "tests/test_default_router.py::TestComplexRouting::test_not_found_04__teams", "tests/test_default_router.py::TestComplexRouting::test_not_found_05__teams_42_members_undefined", "tests/test_default_router.py::TestComplexRouting::test_not_found_06__teams_42_undefined", "tests/test_default_router.py::TestComplexRouting::test_not_found_07__teams_42_undefined_segments", "tests/test_default_router.py::TestComplexRouting::test_not_found_08__teams_default_members_undefined", "tests/test_default_router.py::TestComplexRouting::test_not_found_09__teams_default_members_thing_undefined", "tests/test_default_router.py::TestComplexRouting::test_not_found_10__teams_default_members_thing_undefined_segments", "tests/test_default_router.py::TestComplexRouting::test_not_found_11__teams_default_undefined", "tests/test_default_router.py::TestComplexRouting::test_not_found_12__teams_default_undefined_segments", "tests/test_default_router.py::TestComplexRouting::test_not_found_13__emojis_signs", "tests/test_default_router.py::TestComplexRouting::test_not_found_14__emojis_signs_0_small", "tests/test_default_router.py::TestComplexRouting::test_not_found_15__emojis_signs_0_undefined", "tests/test_default_router.py::TestComplexRouting::test_not_found_16__emojis_signs_0_undefined_segments", "tests/test_default_router.py::TestComplexRouting::test_not_found_17__emojis_signs_20_small", "tests/test_default_router.py::TestComplexRouting::test_not_found_18__emojis_signs_20_undefined", "tests/test_default_router.py::TestComplexRouting::test_not_found_19__emojis_signs_42_undefined", "tests/test_default_router.py::TestComplexRouting::test_not_found_20__emojis_signs_78_undefined", "tests/test_default_router.py::TestComplexRouting::test_override", "tests/test_default_router.py::TestComplexRouting::test_subsegment_not_found", "tests/test_default_router.py::TestComplexRouting::test_variable" ]
[]
Apache License 2.0
458
dask__dask-1024
09bda79a79acc261f31a446425efc5dd2fb42b9a
2016-03-03 07:43:47
6dc9229362f2d3b1dfa466a8a63831c3c832b4be
diff --git a/dask/array/reductions.py b/dask/array/reductions.py index 3f05e1628..a29003285 100644 --- a/dask/array/reductions.py +++ b/dask/array/reductions.py @@ -1,12 +1,12 @@ from __future__ import absolute_import, division, print_function from functools import partial, wraps -from itertools import product +from itertools import product, repeat from math import factorial, log, ceil import operator import numpy as np -from toolz import compose, partition_all, merge, get +from toolz import compose, partition_all, merge, get, accumulate, pluck from . import chunk from .core import _concatenate2, Array, atop, sqrt, lol_tuples @@ -34,6 +34,22 @@ def reduction(x, chunk, aggregate, axis=None, keepdims=None, dtype=None, if dtype and 'dtype' in getargspec(aggregate).args: aggregate = partial(aggregate, dtype=dtype) + # Map chunk across all blocks + inds = tuple(range(x.ndim)) + tmp = atop(partial(chunk, axis=axis, keepdims=True), inds, x, inds) + tmp._chunks = tuple((1,)*len(c) if i in axis else c for (i, c) + in enumerate(tmp.chunks)) + + return _tree_reduce(tmp, aggregate, axis, keepdims, dtype, split_every, + combine) + + +def _tree_reduce(x, aggregate, axis, keepdims, dtype, split_every=None, + combine=None): + """Perform the tree reduction step of a reduction. + + Lower level, users should use ``reduction`` or ``arg_reduction`` directly. + """ # Normalize split_every split_every = split_every or _globals.get('split_every', 4) if isinstance(split_every, dict): @@ -44,24 +60,18 @@ def reduction(x, chunk, aggregate, axis=None, keepdims=None, dtype=None, else: split_every = dict((k, v) for (k, v) in enumerate(x.numblocks) if k in axis) - # Map chunk across all blocks - inds = tuple(range(x.ndim)) - tmp = atop(partial(chunk, axis=axis, keepdims=True), inds, x, inds) - tmp._chunks = tuple((1,)*len(c) if i in axis else c for (i, c) - in enumerate(tmp.chunks)) - # Reduce across intermediates depth = 1 - for i, n in enumerate(tmp.numblocks): + for i, n in enumerate(x.numblocks): if i in split_every and split_every[i] != 1: depth = int(builtins.max(depth, ceil(log(n, split_every[i])))) func = compose(partial(combine or aggregate, axis=axis, keepdims=True), partial(_concatenate2, axes=axis)) for i in range(depth - 1): - tmp = partial_reduce(func, tmp, split_every, True, None) + x = partial_reduce(func, x, split_every, True, None) func = compose(partial(aggregate, axis=axis, keepdims=keepdims), partial(_concatenate2, axes=axis)) - return partial_reduce(func, tmp, split_every, keepdims=keepdims, + return partial_reduce(func, x, split_every, keepdims=keepdims, dtype=dtype) @@ -403,71 +413,130 @@ def vnorm(a, ord=None, axis=None, dtype=None, keepdims=False, split_every=None): split_every=split_every)**(1./ord) -def _arg_combine(data, axis, argfunc): +def _arg_combine(data, axis, argfunc, keepdims=False): """Merge intermediate results from ``arg_*`` functions""" + axis = None if len(axis) == data.ndim or data.ndim == 1 else axis[0] vals = data['vals'] arg = data['arg'] - ns = data['n'] - args = argfunc(vals, axis=axis) - offsets = np.roll(np.cumsum(ns, axis=axis), 1, axis) - offsets[tuple(slice(None) if i != axis else 0 for i in range(ns.ndim))] = 0 - inds = list(reversed(np.meshgrid(*map(np.arange, args.shape), sparse=True))) - inds.insert(axis, args) - - arg = (arg + offsets)[tuple(inds)] - vals = vals[tuple(inds)] - n = ns.sum(axis=axis).take(0, 0) - return arg, vals, n - - -def arg_chunk(func, argfunc, x, axis=None, **kwargs): - axis = axis[0] if isinstance(axis, tuple) else axis - vals = func(x, axis=axis, keepdims=True) - arg = argfunc(x, axis=axis, keepdims=True) + if axis is None: + local_args = argfunc(vals, axis=axis, keepdims=keepdims) + vals = vals.ravel()[local_args] + arg = arg.ravel()[local_args] + else: + local_args = argfunc(vals, axis=axis) + inds = np.ogrid[tuple(map(slice, local_args.shape))] + inds.insert(axis, local_args) + vals = vals[inds] + arg = arg[inds] + if keepdims: + vals = np.expand_dims(vals, axis) + arg = np.expand_dims(arg, axis) + return arg, vals + + +def arg_chunk(func, argfunc, x, axis, offset_info): + arg_axis = None if len(axis) == x.ndim or x.ndim == 1 else axis[0] + vals = func(x, axis=arg_axis, keepdims=True) + arg = argfunc(x, axis=arg_axis, keepdims=True) + if arg_axis is None: + offset, total_shape = offset_info + ind = np.unravel_index(arg.ravel()[0], x.shape) + total_ind = tuple(o + i for (o, i) in zip(offset, ind)) + arg[:] = np.ravel_multi_index(total_ind, total_shape) + else: + arg += offset_info + result = np.empty(shape=vals.shape, dtype=[('vals', vals.dtype), - ('arg', arg.dtype), - ('n', 'i8')]) + ('arg', arg.dtype)]) result['vals'] = vals result['arg'] = arg - result['n'] = x.shape[axis] return result def arg_combine(func, argfunc, data, axis=None, **kwargs): - axis = axis[0] if isinstance(axis, tuple) else axis - arg, vals, n = _arg_combine(data, axis, argfunc) - shape = tuple(s if i != axis else 1 for (i, s) in enumerate(data.shape)) - result = np.empty(shape=shape, dtype=[('vals', vals.dtype), - ('arg', arg.dtype), - ('n', 'i8')]) - result['vals'] = vals.reshape(shape) - result['arg'] = arg.reshape(shape) - result['n'] = n + arg, vals = _arg_combine(data, axis, argfunc, keepdims=True) + result = np.empty(shape=vals.shape, dtype=[('vals', vals.dtype), + ('arg', arg.dtype)]) + result['vals'] = vals + result['arg'] = arg return result def arg_agg(func, argfunc, data, axis=None, **kwargs): - axis = axis[0] if isinstance(axis, tuple) else axis - return _arg_combine(data, axis, argfunc)[0] + return _arg_combine(data, axis, argfunc, keepdims=False)[0] + + +def arg_reduction(x, chunk, combine, agg, axis=None, split_every=None): + """Generic function for argreduction. + + Parameters + ---------- + x : Array + chunk : callable + Partialed ``arg_chunk``. + combine : callable + Partialed ``arg_combine``. + agg : callable + Partialed ``arg_agg``. + axis : int, optional + split_every : int or dict, optional + """ + if axis is None: + axis = tuple(range(x.ndim)) + ravel = True + elif isinstance(axis, int): + if axis < 0: + axis += x.ndim + if axis < 0 or axis >= x.ndim: + raise ValueError("axis entry is out of bounds") + axis = (axis,) + ravel = x.ndim == 1 + else: + raise TypeError("axis must be either `None` or int, " + "got '{0}'".format(axis)) + + # Map chunk across all blocks + name = 'arg-reduce-chunk-{0}'.format(tokenize(chunk, axis)) + old = x.name + keys = list(product(*map(range, x.numblocks))) + offsets = list(product(*(accumulate(operator.add, bd[:-1], 0) + for bd in x.chunks))) + if ravel: + offset_info = zip(offsets, repeat(x.shape)) + else: + offset_info = pluck(axis[0], offsets) + + chunks = tuple((1,)*len(c) if i in axis else c for (i, c) + in enumerate(x.chunks)) + dsk = dict(((name,) + k, (chunk, (old,) + k, axis, off)) for (k, off) + in zip(keys, offset_info)) + tmp = Array(merge(dsk, x.dask), name, chunks) + return _tree_reduce(tmp, agg, axis, False, np.int64, split_every, combine) + +def make_arg_reduction(func, argfunc): + """Create a argreduction callable. -def arg_reduction(func, argfunc): + Parameters + ---------- + func : callable + The reduction (e.g. ``min``) + argfunc : callable + The argreduction (e.g. ``argmin``) + """ chunk = partial(arg_chunk, func, argfunc) - agg = partial(arg_agg, func, argfunc) combine = partial(arg_combine, func, argfunc) + agg = partial(arg_agg, func, argfunc) @wraps(argfunc) - def _(a, axis=None, split_every=None): - if axis < 0: - axis = a.ndim + axis - return reduction(a, chunk, agg, axis=axis, dtype='i8', - split_every=split_every, combine=combine) + def _(x, axis=None, split_every=None): + return arg_reduction(x, chunk, combine, agg, axis, split_every) return _ -argmin = arg_reduction(chunk.min, chunk.argmin) -argmax = arg_reduction(chunk.max, chunk.argmax) -nanargmin = arg_reduction(chunk.nanmin, chunk.nanargmin) -nanargmax = arg_reduction(chunk.nanmax, chunk.nanargmax) +argmin = make_arg_reduction(chunk.min, chunk.argmin) +argmax = make_arg_reduction(chunk.max, chunk.argmax) +nanargmin = make_arg_reduction(chunk.nanmin, chunk.nanargmin) +nanargmax = make_arg_reduction(chunk.nanmax, chunk.nanargmax) def cumreduction(func, binop, ident, x, axis, dtype=None):
dask.array.argmin fails on 3D input ``` In [29]: da.from_array(np.random.randn(2, 3, 4), chunks=(2, 3, 4)).argmin(axis=0).compute() --------------------------------------------------------------------------- IndexError Traceback (most recent call last) <ipython-input-29-b25ac117010d> in <module>() ----> 1 da.from_array(np.random.randn(2, 3, 4), chunks=(2, 3, 4)).argmin(axis=0).compute() /Users/shoyer/conda/envs/xarray-dev/lib/python3.5/site-packages/dask/base.py in compute(self, **kwargs) 35 36 def compute(self, **kwargs): ---> 37 return compute(self, **kwargs)[0] 38 39 @classmethod /Users/shoyer/conda/envs/xarray-dev/lib/python3.5/site-packages/dask/base.py in compute(*args, **kwargs) 108 for opt, val in groups.items()]) 109 keys = [var._keys() for var in variables] --> 110 results = get(dsk, keys, **kwargs) 111 112 results_iter = iter(results) /Users/shoyer/conda/envs/xarray-dev/lib/python3.5/site-packages/dask/threaded.py in get(dsk, result, cache, num_workers, **kwargs) 55 results = get_async(pool.apply_async, len(pool._pool), dsk, result, 56 cache=cache, queue=queue, get_id=_thread_get_id, ---> 57 **kwargs) 58 59 return results /Users/shoyer/conda/envs/xarray-dev/lib/python3.5/site-packages/dask/async.py in get_async(apply_async, num_workers, dsk, result, cache, queue, get_id, raise_on_exception, rerun_exceptions_locally, callbacks, **kwargs) 479 _execute_task(task, data) # Re-execute locally 480 else: --> 481 raise(remote_exception(res, tb)) 482 state['cache'][key] = res 483 finish_task(dsk, key, state, results, keyorder.get) IndexError: shape mismatch: indexing arrays could not be broadcast together with shapes (3,4) (4,1) (1,3) Traceback --------- File "/Users/shoyer/conda/envs/xarray-dev/lib/python3.5/site-packages/dask/async.py", line 264, in execute_task result = _execute_task(task, data) File "/Users/shoyer/conda/envs/xarray-dev/lib/python3.5/site-packages/dask/async.py", line 246, in _execute_task return func(*args2) File "/Users/shoyer/conda/envs/xarray-dev/lib/python3.5/site-packages/toolz/functoolz.py", line 381, in __call__ ret = f(ret) File "/Users/shoyer/conda/envs/xarray-dev/lib/python3.5/site-packages/dask/array/reductions.py", line 450, in arg_agg return _arg_combine(data, axis, argfunc)[0] File "/Users/shoyer/conda/envs/xarray-dev/lib/python3.5/site-packages/dask/array/reductions.py", line 416, in _arg_combine arg = (arg + offsets)[tuple(inds)] ``` This was reported in xarray: https://github.com/pydata/xarray/issues/759
dask/dask
diff --git a/dask/array/tests/test_reductions.py b/dask/array/tests/test_reductions.py index 9b13a98dc..7b734416f 100644 --- a/dask/array/tests/test_reductions.py +++ b/dask/array/tests/test_reductions.py @@ -73,16 +73,6 @@ def test_reductions_1D(dtype): reduction_1d_test(da.nanmin, a, np.nanmin, x, False) reduction_1d_test(da.nanmax, a, np.nanmax, x, False) - assert eq(da.argmax(a, axis=0), np.argmax(x, axis=0)) - assert eq(da.argmin(a, axis=0), np.argmin(x, axis=0)) - assert eq(da.nanargmax(a, axis=0), np.nanargmax(x, axis=0)) - assert eq(da.nanargmin(a, axis=0), np.nanargmin(x, axis=0)) - - assert eq(da.argmax(a, axis=0, split_every=2), np.argmax(x, axis=0)) - assert eq(da.argmin(a, axis=0, split_every=2), np.argmin(x, axis=0)) - assert eq(da.nanargmax(a, axis=0, split_every=2), np.nanargmax(x, axis=0)) - assert eq(da.nanargmin(a, axis=0, split_every=2), np.nanargmin(x, axis=0)) - def reduction_2d_test(da_func, darr, np_func, narr, use_dtype=True, split_every=True): @@ -144,23 +134,32 @@ def test_reductions_2D(dtype): reduction_2d_test(da.nanmin, a, np.nanmin, x, False) reduction_2d_test(da.nanmax, a, np.nanmax, x, False) - assert eq(da.argmax(a, axis=0), np.argmax(x, axis=0)) - assert eq(da.argmin(a, axis=0), np.argmin(x, axis=0)) - assert eq(da.nanargmax(a, axis=0), np.nanargmax(x, axis=0)) - assert eq(da.nanargmin(a, axis=0), np.nanargmin(x, axis=0)) - assert eq(da.argmax(a, axis=1), np.argmax(x, axis=1)) - assert eq(da.argmin(a, axis=1), np.argmin(x, axis=1)) - assert eq(da.nanargmax(a, axis=1), np.nanargmax(x, axis=1)) - assert eq(da.nanargmin(a, axis=1), np.nanargmin(x, axis=1)) - - assert eq(da.argmax(a, axis=0, split_every=2), np.argmax(x, axis=0)) - assert eq(da.argmin(a, axis=0, split_every=2), np.argmin(x, axis=0)) - assert eq(da.nanargmax(a, axis=0, split_every=2), np.nanargmax(x, axis=0)) - assert eq(da.nanargmin(a, axis=0, split_every=2), np.nanargmin(x, axis=0)) - assert eq(da.argmax(a, axis=1, split_every=2), np.argmax(x, axis=1)) - assert eq(da.argmin(a, axis=1, split_every=2), np.argmin(x, axis=1)) - assert eq(da.nanargmax(a, axis=1, split_every=2), np.nanargmax(x, axis=1)) - assert eq(da.nanargmin(a, axis=1, split_every=2), np.nanargmin(x, axis=1)) + [email protected](['dfunc', 'func'], + [(da.argmin, np.argmin), (da.argmax, np.argmax), + (da.nanargmin, np.nanargmin), (da.nanargmax, np.nanargmax)]) +def test_arg_reductions(dfunc, func): + x = np.random.random((10, 10, 10)) + a = da.from_array(x, chunks=(3, 4, 5)) + + assert eq(dfunc(a), func(x)) + assert eq(dfunc(a, 0), func(x, 0)) + assert eq(dfunc(a, 1), func(x, 1)) + assert eq(dfunc(a, 2), func(x, 2)) + with set_options(split_every=2): + assert eq(dfunc(a), func(x)) + assert eq(dfunc(a, 0), func(x, 0)) + assert eq(dfunc(a, 1), func(x, 1)) + assert eq(dfunc(a, 2), func(x, 2)) + + pytest.raises(ValueError, lambda: dfunc(a, 3)) + pytest.raises(TypeError, lambda: dfunc(a, (0, 1))) + + x2 = np.arange(10) + a2 = da.from_array(x2, chunks=3) + assert eq(dfunc(a2), func(x2)) + assert eq(dfunc(a2, 0), func(x2, 0)) + assert eq(dfunc(a2, 0, split_every=2), func(x2, 0)) def test_reductions_2D_nans():
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_hyperlinks", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 1, "test_score": 2 }, "num_modified_files": 1 }
1.8
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[complete]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "numpy>=1.16.0 pandas>=1.0.0 cloudpickle partd distributed s3fs toolz psutil pytables bokeh bcolz scipy h5py ipython", "pip_packages": [ "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y graphviz liblzma-dev" ], "python": "3.5", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
aiobotocore @ file:///opt/conda/conda-bld/aiobotocore_1643638228694/work aiohttp @ file:///tmp/build/80754af9/aiohttp_1632748060317/work aioitertools @ file:///tmp/build/80754af9/aioitertools_1607109665762/work async-timeout==3.0.1 attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work backcall @ file:///home/ktietz/src/ci/backcall_1611930011877/work bcolz==1.2.1 bokeh @ file:///tmp/build/80754af9/bokeh_1620710048147/work botocore @ file:///opt/conda/conda-bld/botocore_1642672735464/work brotlipy==0.7.0 certifi==2021.5.30 cffi @ file:///tmp/build/80754af9/cffi_1625814693874/work chardet @ file:///tmp/build/80754af9/chardet_1607706739153/work click==8.0.3 cloudpickle @ file:///tmp/build/80754af9/cloudpickle_1632508026186/work contextvars==2.4 cryptography @ file:///tmp/build/80754af9/cryptography_1635366128178/work cytoolz==0.11.0 -e git+https://github.com/dask/dask.git@09bda79a79acc261f31a446425efc5dd2fb42b9a#egg=dask decorator @ file:///opt/conda/conda-bld/decorator_1643638310831/work distributed @ file:///tmp/build/80754af9/distributed_1615054599257/work fsspec @ file:///opt/conda/conda-bld/fsspec_1642510437511/work h5py==2.10.0 HeapDict @ file:///Users/ktietz/demo/mc3/conda-bld/heapdict_1630598515714/work idna @ file:///tmp/build/80754af9/idna_1637925883363/work idna-ssl @ file:///tmp/build/80754af9/idna_ssl_1611752490495/work immutables @ file:///tmp/build/80754af9/immutables_1628888996840/work importlib-metadata==4.8.3 iniconfig==1.1.1 ipython @ file:///tmp/build/80754af9/ipython_1593447367857/work ipython-genutils @ file:///tmp/build/80754af9/ipython_genutils_1606773439826/work jedi @ file:///tmp/build/80754af9/jedi_1606932572482/work Jinja2 @ file:///opt/conda/conda-bld/jinja2_1647436528585/work jmespath @ file:///Users/ktietz/demo/mc3/conda-bld/jmespath_1630583964805/work locket==0.2.1 MarkupSafe @ file:///tmp/build/80754af9/markupsafe_1621528150516/work mock @ file:///tmp/build/80754af9/mock_1607622725907/work msgpack @ file:///tmp/build/80754af9/msgpack-python_1612287171716/work multidict @ file:///tmp/build/80754af9/multidict_1607367768400/work numexpr @ file:///tmp/build/80754af9/numexpr_1618853194344/work numpy @ file:///tmp/build/80754af9/numpy_and_numpy_base_1603483703303/work olefile @ file:///Users/ktietz/demo/mc3/conda-bld/olefile_1629805411829/work packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work pandas==1.1.5 parso==0.7.0 partd @ file:///opt/conda/conda-bld/partd_1647245470509/work pexpect @ file:///tmp/build/80754af9/pexpect_1605563209008/work pickleshare @ file:///tmp/build/80754af9/pickleshare_1606932040724/work Pillow @ file:///tmp/build/80754af9/pillow_1625670622947/work pluggy==1.0.0 prompt-toolkit @ file:///tmp/build/80754af9/prompt-toolkit_1633440160888/work psutil @ file:///tmp/build/80754af9/psutil_1612297621795/work ptyprocess @ file:///tmp/build/80754af9/ptyprocess_1609355006118/work/dist/ptyprocess-0.7.0-py2.py3-none-any.whl py==1.11.0 pycparser @ file:///tmp/build/80754af9/pycparser_1636541352034/work Pygments @ file:///opt/conda/conda-bld/pygments_1644249106324/work pyOpenSSL @ file:///opt/conda/conda-bld/pyopenssl_1643788558760/work pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work PySocks @ file:///tmp/build/80754af9/pysocks_1605305763431/work pytest==7.0.1 python-dateutil @ file:///tmp/build/80754af9/python-dateutil_1626374649649/work pytz==2021.3 PyYAML==5.4.1 s3fs @ file:///opt/conda/conda-bld/s3fs_1643701468749/work scipy @ file:///tmp/build/80754af9/scipy_1597686635649/work six @ file:///tmp/build/80754af9/six_1644875935023/work sortedcontainers @ file:///tmp/build/80754af9/sortedcontainers_1623949099177/work tables==3.6.1 tblib @ file:///Users/ktietz/demo/mc3/conda-bld/tblib_1629402031467/work tomli==1.2.3 toolz @ file:///tmp/build/80754af9/toolz_1636545406491/work tornado @ file:///tmp/build/80754af9/tornado_1606942266872/work traitlets @ file:///tmp/build/80754af9/traitlets_1632746497744/work typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work urllib3 @ file:///opt/conda/conda-bld/urllib3_1643638302206/work wcwidth @ file:///Users/ktietz/demo/mc3/conda-bld/wcwidth_1629357192024/work wrapt==1.12.1 yarl @ file:///tmp/build/80754af9/yarl_1606939915466/work zict==2.0.0 zipp==3.6.0
name: dask channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - aiobotocore=2.1.0=pyhd3eb1b0_0 - aiohttp=3.7.4.post0=py36h7f8727e_2 - aioitertools=0.7.1=pyhd3eb1b0_0 - async-timeout=3.0.1=py36h06a4308_0 - attrs=21.4.0=pyhd3eb1b0_0 - backcall=0.2.0=pyhd3eb1b0_0 - bcolz=1.2.1=py36h04863e7_0 - blas=1.0=openblas - blosc=1.21.3=h6a678d5_0 - bokeh=2.3.2=py36h06a4308_0 - botocore=1.23.24=pyhd3eb1b0_0 - brotlipy=0.7.0=py36h27cfd23_1003 - bzip2=1.0.8=h5eee18b_6 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - cffi=1.14.6=py36h400218f_0 - chardet=4.0.0=py36h06a4308_1003 - click=8.0.3=pyhd3eb1b0_0 - cloudpickle=2.0.0=pyhd3eb1b0_0 - contextvars=2.4=py_0 - cryptography=35.0.0=py36hd23ed53_0 - cytoolz=0.11.0=py36h7b6447c_0 - decorator=5.1.1=pyhd3eb1b0_0 - distributed=2021.3.0=py36h06a4308_0 - freetype=2.12.1=h4a9f257_0 - fsspec=2022.1.0=pyhd3eb1b0_0 - giflib=5.2.2=h5eee18b_0 - h5py=2.10.0=py36h7918eee_0 - hdf5=1.10.4=hb1b8bf9_0 - heapdict=1.0.1=pyhd3eb1b0_0 - idna=3.3=pyhd3eb1b0_0 - idna_ssl=1.1.0=py36h06a4308_0 - immutables=0.16=py36h7f8727e_0 - ipython=7.16.1=py36h5ca1d4c_0 - ipython_genutils=0.2.0=pyhd3eb1b0_1 - jedi=0.17.2=py36h06a4308_1 - jinja2=3.0.3=pyhd3eb1b0_0 - jmespath=0.10.0=pyhd3eb1b0_0 - jpeg=9e=h5eee18b_3 - lcms2=2.16=hb9589c4_0 - ld_impl_linux-64=2.40=h12ee557_0 - lerc=4.0.0=h6a678d5_0 - libdeflate=1.22=h5eee18b_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgfortran-ng=7.5.0=ha8ba4b0_17 - libgfortran4=7.5.0=ha8ba4b0_17 - libgomp=11.2.0=h1234567_1 - libopenblas=0.3.18=hf726d26_0 - libpng=1.6.39=h5eee18b_0 - libstdcxx-ng=11.2.0=h1234567_1 - libtiff=4.5.1=hffd6297_1 - libwebp=1.2.4=h11a3e52_1 - libwebp-base=1.2.4=h5eee18b_1 - locket=0.2.1=py36h06a4308_1 - lz4-c=1.9.4=h6a678d5_1 - lzo=2.10=h7b6447c_2 - markupsafe=2.0.1=py36h27cfd23_0 - mock=4.0.3=pyhd3eb1b0_0 - msgpack-python=1.0.2=py36hff7bd54_1 - multidict=5.1.0=py36h27cfd23_2 - ncurses=6.4=h6a678d5_0 - numexpr=2.7.3=py36h4be448d_1 - numpy=1.19.2=py36h6163131_0 - numpy-base=1.19.2=py36h75fe3a5_0 - olefile=0.46=pyhd3eb1b0_0 - openssl=1.1.1w=h7f8727e_0 - packaging=21.3=pyhd3eb1b0_0 - pandas=1.1.5=py36ha9443f7_0 - parso=0.7.0=py_0 - partd=1.2.0=pyhd3eb1b0_1 - pexpect=4.8.0=pyhd3eb1b0_3 - pickleshare=0.7.5=pyhd3eb1b0_1003 - pillow=8.3.1=py36h5aabda8_0 - pip=21.2.2=py36h06a4308_0 - prompt-toolkit=3.0.20=pyhd3eb1b0_0 - psutil=5.8.0=py36h27cfd23_1 - ptyprocess=0.7.0=pyhd3eb1b0_2 - pycparser=2.21=pyhd3eb1b0_0 - pygments=2.11.2=pyhd3eb1b0_0 - pyopenssl=22.0.0=pyhd3eb1b0_0 - pyparsing=3.0.4=pyhd3eb1b0_0 - pysocks=1.7.1=py36h06a4308_0 - pytables=3.6.1=py36h71ec239_0 - python=3.6.13=h12debd9_1 - python-dateutil=2.8.2=pyhd3eb1b0_0 - pytz=2021.3=pyhd3eb1b0_0 - pyyaml=5.4.1=py36h27cfd23_1 - readline=8.2=h5eee18b_0 - s3fs=2022.1.0=pyhd3eb1b0_0 - scipy=1.5.2=py36habc2bb6_0 - setuptools=58.0.4=py36h06a4308_0 - six=1.16.0=pyhd3eb1b0_1 - sortedcontainers=2.4.0=pyhd3eb1b0_0 - sqlite=3.45.3=h5eee18b_0 - tblib=1.7.0=pyhd3eb1b0_0 - tk=8.6.14=h39e8969_0 - toolz=0.11.2=pyhd3eb1b0_0 - tornado=6.1=py36h27cfd23_0 - traitlets=4.3.3=py36h06a4308_0 - typing-extensions=4.1.1=hd3eb1b0_0 - typing_extensions=4.1.1=pyh06a4308_0 - urllib3=1.26.8=pyhd3eb1b0_0 - wcwidth=0.2.5=pyhd3eb1b0_0 - wheel=0.37.1=pyhd3eb1b0_0 - wrapt=1.12.1=py36h7b6447c_1 - xz=5.6.4=h5eee18b_1 - yaml=0.2.5=h7b6447c_0 - yarl=1.6.3=py36h27cfd23_0 - zict=2.0.0=pyhd3eb1b0_0 - zlib=1.2.13=h5eee18b_1 - zstd=1.5.6=hc292b87_0 - pip: - importlib-metadata==4.8.3 - iniconfig==1.1.1 - pluggy==1.0.0 - py==1.11.0 - pytest==7.0.1 - tomli==1.2.3 - zipp==3.6.0 prefix: /opt/conda/envs/dask
[ "dask/array/tests/test_reductions.py::test_arg_reductions[argmin-argmin]", "dask/array/tests/test_reductions.py::test_arg_reductions[argmax-argmax]", "dask/array/tests/test_reductions.py::test_arg_reductions[nanargmin-nanargmin]", "dask/array/tests/test_reductions.py::test_arg_reductions[nanargmax-nanargmax]" ]
[ "dask/array/tests/test_reductions.py::test_reductions_2D[f4]", "dask/array/tests/test_reductions.py::test_reductions_2D[i4]" ]
[ "dask/array/tests/test_reductions.py::test_reductions_1D[f4]", "dask/array/tests/test_reductions.py::test_reductions_1D[i4]", "dask/array/tests/test_reductions.py::test_reductions_2D_nans", "dask/array/tests/test_reductions.py::test_moment", "dask/array/tests/test_reductions.py::test_reductions_with_negative_axes", "dask/array/tests/test_reductions.py::test_nan", "dask/array/tests/test_reductions.py::test_0d_array", "dask/array/tests/test_reductions.py::test_reduction_on_scalar", "dask/array/tests/test_reductions.py::test_tree_reduce_depth", "dask/array/tests/test_reductions.py::test_tree_reduce_set_options" ]
[]
BSD 3-Clause "New" or "Revised" License
459
falconry__falcon-727
7bffb3342fea9fff5677c43e89e7e6eccfc388ed
2016-03-03 18:05:13
b78ffaac7c412d3b3d6cd3c70dd05024d79d2cce
diff --git a/falcon/request.py b/falcon/request.py index c7322b4..eac9b95 100644 --- a/falcon/request.py +++ b/falcon/request.py @@ -167,19 +167,13 @@ class Request(object): Note: If an HTML form is POSTed to the API using the - *application/x-www-form-urlencoded* media type, Falcon + *application/x-www-form-urlencoded* media type, and + the :py:attr:`~.RequestOptions.auto_parse_form_urlencoded` + option is set, the framework will consume `stream` in order to parse the parameters and merge them into the query string parameters. In this case, the stream will be left at EOF. - Note also that the character encoding for fields, before - percent-encoding non-ASCII bytes, is assumed to be - UTF-8. The special `_charset_` field is ignored if present. - - Falcon expects form-encoded request bodies to be - encoded according to the standard W3C algorithm (see - also http://goo.gl/6rlcux). - date (datetime): Value of the Date header, converted to a ``datetime`` instance. The header value is assumed to conform to RFC 1123. @@ -320,7 +314,8 @@ class Request(object): # PERF(kgriffs): Technically, we should spend a few more # cycles and parse the content type for real, but # this heuristic will work virtually all the time. - if (self.content_type is not None and + if (self.options.auto_parse_form_urlencoded and + self.content_type is not None and 'application/x-www-form-urlencoded' in self.content_type): self._parse_form_urlencoded() @@ -1159,11 +1154,28 @@ class RequestOptions(object): Attributes: keep_blank_qs_values (bool): Set to ``True`` in order to retain blank values in query string parameters (default ``False``). + auto_parse_form_urlencoded: Set to ``True`` in order to + automatically consume the request stream and merge the + results into the request's query string params when the + request's content type is + *application/x-www-form-urlencoded* (default ``False``). In + this case, the request's content stream will be left at EOF. + + Note: + The character encoding for fields, before + percent-encoding non-ASCII bytes, is assumed to be + UTF-8. The special `_charset_` field is ignored if present. + + Falcon expects form-encoded request bodies to be + encoded according to the standard W3C algorithm (see + also http://goo.gl/6rlcux). """ __slots__ = ( 'keep_blank_qs_values', + 'auto_parse_form_urlencoded', ) def __init__(self): self.keep_blank_qs_values = False + self.auto_parse_form_urlencoded = False
Add an option to RequestOptions to toggle parsing of form params Add an option to RequestOptions to enable/disable parsing form params as currently implemented. It would be disabled by default (breaking change, but easy to work around). This would mitigate the problems people have experienced re the request stream being automatically slurped when the POSTed content type is `application/x-www-form-urlencoded`. This would not preclude working on a longer-term strategy for handling forms in a manner distinct from query parameters. See also: - #418 - #493
falconry/falcon
diff --git a/tests/test_query_params.py b/tests/test_query_params.py index 50ed010..93a429f 100644 --- a/tests/test_query_params.py +++ b/tests/test_query_params.py @@ -473,6 +473,10 @@ class _TestQueryParams(testing.TestBase): class PostQueryParams(_TestQueryParams): + def before(self): + super(PostQueryParams, self).before() + self.api.req_options.auto_parse_form_urlencoded = True + def simulate_request(self, path, query_string, **kwargs): headers = {"Content-Type": "application/x-www-form-urlencoded"} super(PostQueryParams, self).simulate_request( @@ -484,10 +488,29 @@ class PostQueryParams(_TestQueryParams): self.simulate_request('/', query_string=query_string) req = self.resource.req - self.assertEqual(req.get_param('q'), None) + self.assertIs(req.get_param('q'), None) + + def test_explicitly_disable_auto_parse(self): + self.api.req_options.auto_parse_form_urlencoded = False + self.simulate_request('/', query_string='q=42') + + req = self.resource.req + self.assertIs(req.get_param('q'), None) class GetQueryParams(_TestQueryParams): def simulate_request(self, path, query_string, **kwargs): super(GetQueryParams, self).simulate_request( path, query_string=query_string, **kwargs) + + +class PostQueryParamsDefaultBehavior(testing.TestBase): + def test_dont_auto_parse_by_default(self): + self.resource = testing.TestResource() + self.api.add_route('/', self.resource) + + headers = {"Content-Type": "application/x-www-form-urlencoded"} + self.simulate_request('/', body='q=42', headers=headers) + + req = self.resource.req + self.assertIs(req.get_param('q'), None)
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_issue_reference" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 1, "test_score": 1 }, "num_modified_files": 1 }
0.3
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "nose", "coverage", "ddt", "pyyaml", "requests", "testtools", "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.9", "reqs_path": [ "tools/test-requires" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
certifi==2025.1.31 charset-normalizer==3.4.1 coverage==7.8.0 ddt==1.7.2 exceptiongroup==1.2.2 -e git+https://github.com/falconry/falcon.git@7bffb3342fea9fff5677c43e89e7e6eccfc388ed#egg=falcon idna==3.10 iniconfig==2.1.0 nose==1.3.7 packaging==24.2 pluggy==1.5.0 pytest==8.3.5 python-mimeparse==2.0.0 PyYAML==6.0.2 requests==2.32.3 six==1.17.0 testtools==2.7.2 tomli==2.2.1 urllib3==2.3.0
name: falcon channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - certifi==2025.1.31 - charset-normalizer==3.4.1 - coverage==7.8.0 - ddt==1.7.2 - exceptiongroup==1.2.2 - idna==3.10 - iniconfig==2.1.0 - nose==1.3.7 - packaging==24.2 - pluggy==1.5.0 - pytest==8.3.5 - python-mimeparse==2.0.0 - pyyaml==6.0.2 - requests==2.32.3 - six==1.17.0 - testtools==2.7.2 - tomli==2.2.1 - urllib3==2.3.0 prefix: /opt/conda/envs/falcon
[ "tests/test_query_params.py::PostQueryParams::test_allowed_names", "tests/test_query_params.py::PostQueryParams::test_bad_percentage", "tests/test_query_params.py::PostQueryParams::test_blank", "tests/test_query_params.py::PostQueryParams::test_boolean", "tests/test_query_params.py::PostQueryParams::test_boolean_blank", "tests/test_query_params.py::PostQueryParams::test_explicitly_disable_auto_parse", "tests/test_query_params.py::PostQueryParams::test_get_date_invalid", "tests/test_query_params.py::PostQueryParams::test_get_date_missing_param", "tests/test_query_params.py::PostQueryParams::test_get_date_store", "tests/test_query_params.py::PostQueryParams::test_get_date_valid", "tests/test_query_params.py::PostQueryParams::test_get_date_valid_with_format", "tests/test_query_params.py::PostQueryParams::test_int", "tests/test_query_params.py::PostQueryParams::test_int_neg", "tests/test_query_params.py::PostQueryParams::test_list_transformer", "tests/test_query_params.py::PostQueryParams::test_list_type", "tests/test_query_params.py::PostQueryParams::test_list_type_blank", "tests/test_query_params.py::PostQueryParams::test_multiple_form_keys", "tests/test_query_params.py::PostQueryParams::test_multiple_form_keys_as_list", "tests/test_query_params.py::PostQueryParams::test_multiple_keys_as_bool", "tests/test_query_params.py::PostQueryParams::test_multiple_keys_as_int", "tests/test_query_params.py::PostQueryParams::test_non_ascii", "tests/test_query_params.py::PostQueryParams::test_none", "tests/test_query_params.py::PostQueryParams::test_param_property", "tests/test_query_params.py::PostQueryParams::test_percent_encoded", "tests/test_query_params.py::PostQueryParams::test_required_1_get_param", "tests/test_query_params.py::PostQueryParams::test_required_2_get_param_as_int", "tests/test_query_params.py::PostQueryParams::test_required_3_get_param_as_bool", "tests/test_query_params.py::PostQueryParams::test_required_4_get_param_as_list", "tests/test_query_params.py::PostQueryParams::test_simple", "tests/test_query_params.py::PostQueryParamsDefaultBehavior::test_dont_auto_parse_by_default" ]
[]
[ "tests/test_query_params.py::_TestQueryParams::test_allowed_names", "tests/test_query_params.py::_TestQueryParams::test_bad_percentage", "tests/test_query_params.py::_TestQueryParams::test_blank", "tests/test_query_params.py::_TestQueryParams::test_boolean", "tests/test_query_params.py::_TestQueryParams::test_boolean_blank", "tests/test_query_params.py::_TestQueryParams::test_get_date_invalid", "tests/test_query_params.py::_TestQueryParams::test_get_date_missing_param", "tests/test_query_params.py::_TestQueryParams::test_get_date_store", "tests/test_query_params.py::_TestQueryParams::test_get_date_valid", "tests/test_query_params.py::_TestQueryParams::test_get_date_valid_with_format", "tests/test_query_params.py::_TestQueryParams::test_int", "tests/test_query_params.py::_TestQueryParams::test_int_neg", "tests/test_query_params.py::_TestQueryParams::test_list_transformer", "tests/test_query_params.py::_TestQueryParams::test_list_type", "tests/test_query_params.py::_TestQueryParams::test_list_type_blank", "tests/test_query_params.py::_TestQueryParams::test_multiple_form_keys", "tests/test_query_params.py::_TestQueryParams::test_multiple_form_keys_as_list", "tests/test_query_params.py::_TestQueryParams::test_multiple_keys_as_bool", "tests/test_query_params.py::_TestQueryParams::test_multiple_keys_as_int", "tests/test_query_params.py::_TestQueryParams::test_none", "tests/test_query_params.py::_TestQueryParams::test_param_property", "tests/test_query_params.py::_TestQueryParams::test_percent_encoded", "tests/test_query_params.py::_TestQueryParams::test_required_1_get_param", "tests/test_query_params.py::_TestQueryParams::test_required_2_get_param_as_int", "tests/test_query_params.py::_TestQueryParams::test_required_3_get_param_as_bool", "tests/test_query_params.py::_TestQueryParams::test_required_4_get_param_as_list", "tests/test_query_params.py::_TestQueryParams::test_simple", "tests/test_query_params.py::GetQueryParams::test_allowed_names", "tests/test_query_params.py::GetQueryParams::test_bad_percentage", "tests/test_query_params.py::GetQueryParams::test_blank", "tests/test_query_params.py::GetQueryParams::test_boolean", "tests/test_query_params.py::GetQueryParams::test_boolean_blank", "tests/test_query_params.py::GetQueryParams::test_get_date_invalid", "tests/test_query_params.py::GetQueryParams::test_get_date_missing_param", "tests/test_query_params.py::GetQueryParams::test_get_date_store", "tests/test_query_params.py::GetQueryParams::test_get_date_valid", "tests/test_query_params.py::GetQueryParams::test_get_date_valid_with_format", "tests/test_query_params.py::GetQueryParams::test_int", "tests/test_query_params.py::GetQueryParams::test_int_neg", "tests/test_query_params.py::GetQueryParams::test_list_transformer", "tests/test_query_params.py::GetQueryParams::test_list_type", "tests/test_query_params.py::GetQueryParams::test_list_type_blank", "tests/test_query_params.py::GetQueryParams::test_multiple_form_keys", "tests/test_query_params.py::GetQueryParams::test_multiple_form_keys_as_list", "tests/test_query_params.py::GetQueryParams::test_multiple_keys_as_bool", "tests/test_query_params.py::GetQueryParams::test_multiple_keys_as_int", "tests/test_query_params.py::GetQueryParams::test_none", "tests/test_query_params.py::GetQueryParams::test_param_property", "tests/test_query_params.py::GetQueryParams::test_percent_encoded", "tests/test_query_params.py::GetQueryParams::test_required_1_get_param", "tests/test_query_params.py::GetQueryParams::test_required_2_get_param_as_int", "tests/test_query_params.py::GetQueryParams::test_required_3_get_param_as_bool", "tests/test_query_params.py::GetQueryParams::test_required_4_get_param_as_list", "tests/test_query_params.py::GetQueryParams::test_simple" ]
[]
Apache License 2.0
460
dask__dask-1028
6dc9229362f2d3b1dfa466a8a63831c3c832b4be
2016-03-03 21:56:25
6dc9229362f2d3b1dfa466a8a63831c3c832b4be
diff --git a/dask/array/reductions.py b/dask/array/reductions.py index a29003285..c0b12cd08 100644 --- a/dask/array/reductions.py +++ b/dask/array/reductions.py @@ -466,6 +466,13 @@ def arg_agg(func, argfunc, data, axis=None, **kwargs): return _arg_combine(data, axis, argfunc, keepdims=False)[0] +def nanarg_agg(func, argfunc, data, axis=None, **kwargs): + arg, vals = _arg_combine(data, axis, argfunc, keepdims=False) + if np.any(np.isnan(vals)): + raise ValueError("All NaN slice encountered") + return arg + + def arg_reduction(x, chunk, combine, agg, axis=None, split_every=None): """Generic function for argreduction. @@ -514,7 +521,7 @@ def arg_reduction(x, chunk, combine, agg, axis=None, split_every=None): return _tree_reduce(tmp, agg, axis, False, np.int64, split_every, combine) -def make_arg_reduction(func, argfunc): +def make_arg_reduction(func, argfunc, is_nan_func=False): """Create a argreduction callable. Parameters @@ -526,17 +533,34 @@ def make_arg_reduction(func, argfunc): """ chunk = partial(arg_chunk, func, argfunc) combine = partial(arg_combine, func, argfunc) - agg = partial(arg_agg, func, argfunc) + if is_nan_func: + agg = partial(nanarg_agg, func, argfunc) + else: + agg = partial(arg_agg, func, argfunc) @wraps(argfunc) def _(x, axis=None, split_every=None): return arg_reduction(x, chunk, combine, agg, axis, split_every) return _ +def _nanargmin(x, axis, **kwargs): + try: + return chunk.nanargmin(x, axis, **kwargs) + except ValueError: + return chunk.nanargmin(np.where(np.isnan(x), np.inf, x), axis, **kwargs) + + +def _nanargmax(x, axis, **kwargs): + try: + return chunk.nanargmax(x, axis, **kwargs) + except ValueError: + return chunk.nanargmax(np.where(np.isnan(x), -np.inf, x), axis, **kwargs) + + argmin = make_arg_reduction(chunk.min, chunk.argmin) argmax = make_arg_reduction(chunk.max, chunk.argmax) -nanargmin = make_arg_reduction(chunk.nanmin, chunk.nanargmin) -nanargmax = make_arg_reduction(chunk.nanmax, chunk.nanargmax) +nanargmin = make_arg_reduction(chunk.nanmin, _nanargmin, True) +nanargmax = make_arg_reduction(chunk.nanmax, _nanargmax, True) def cumreduction(func, binop, ident, x, axis, dtype=None):
da.nanargmax fails when it encounters an all-NaN slice in a chunk Follow up on #776 ``` In [1]: import numpy as np In [2]: import dask.array as da In [3]: x = np.array([[1.0, np.nan], [np.nan, 2.0]]) In [4]: da.nanmax(da.from_array(x, chunks=1), axis=1).compute() /Users/shoyer/miniconda/envs/dask-dev/lib/python2.7/site-packages/numpy/lib/nanfunctions.py:319: RuntimeWarning: All-NaN slice encountered warnings.warn("All-NaN slice encountered", RuntimeWarning) Out[4]: array([ 1., 2.]) In [5]: da.nanargmax(da.from_array(x, chunks=1), axis=1).compute() --------------------------------------------------------------------------- ValueError Traceback (most recent call last) <ipython-input-5-403b812e207c> in <module>() ----> 1 da.nanargmax(da.from_array(x, chunks=1), axis=1).compute() /Users/shoyer/dev/dask/dask/base.pyc in compute(self, **kwargs) 29 30 def compute(self, **kwargs): ---> 31 return compute(self, **kwargs)[0] 32 33 @classmethod /Users/shoyer/dev/dask/dask/base.pyc in compute(*args, **kwargs) 97 for opt, val in groups.items()]) 98 keys = [arg._keys() for arg in args] ---> 99 results = get(dsk, keys, **kwargs) 100 return tuple(a._finalize(a, r) for a, r in zip(args, results)) 101 /Users/shoyer/dev/dask/dask/threaded.pyc in get(dsk, result, cache, num_workers, **kwargs) 55 results = get_async(pool.apply_async, len(pool._pool), dsk, result, 56 cache=cache, queue=queue, get_id=_thread_get_id, ---> 57 **kwargs) 58 59 return results /Users/shoyer/dev/dask/dask/async.pyc in get_async(apply_async, num_workers, dsk, result, cache, queue, get_id, raise_on_exception, rerun_exceptions_locally, callbacks, **kwargs) 480 _execute_task(task, data) # Re-execute locally 481 else: --> 482 raise(remote_exception(res, tb)) 483 state['cache'][key] = res 484 finish_task(dsk, key, state, results, keyorder.get) ValueError: All-NaN slice encountered Traceback --------- File "dask/async.py", line 262, in execute_task result = _execute_task(task, data) File "dask/async.py", line 245, in _execute_task return func(*args2) File "dask/array/reductions.py", line 367, in argreduce return (func(x, axis=axis), argfunc(x, axis=axis)) File "dask/array/chunk.py", line 25, in keepdims_wrapped_callable r = a_callable(x, axis=axis, *args, **kwargs) File "/Users/shoyer/miniconda/envs/dask-dev/lib/python2.7/site-packages/numpy/lib/nanfunctions.py", line 420, in nanargmax raise ValueError("All-NaN slice encountered") ```
dask/dask
diff --git a/dask/array/tests/test_reductions.py b/dask/array/tests/test_reductions.py index 7b734416f..2b1a08437 100644 --- a/dask/array/tests/test_reductions.py +++ b/dask/array/tests/test_reductions.py @@ -162,6 +162,26 @@ def test_arg_reductions(dfunc, func): assert eq(dfunc(a2, 0, split_every=2), func(x2, 0)) [email protected](['dfunc', 'func'], + [(da.nanargmin, np.nanargmin), (da.nanargmax, np.nanargmax)]) +def test_nanarg_reductions(dfunc, func): + x = np.random.random((10, 10, 10)) + x[5] = np.nan + a = da.from_array(x, chunks=(3, 4, 5)) + assert eq(dfunc(a), func(x)) + assert eq(dfunc(a, 0), func(x, 0)) + with pytest.raises(ValueError): + dfunc(a, 1).compute() + + with pytest.raises(ValueError): + dfunc(a, 2).compute() + + x[:] = np.nan + a = da.from_array(x, chunks=(3, 4, 5)) + with pytest.raises(ValueError): + dfunc(a).compute() + + def test_reductions_2D_nans(): # chunks are a mix of some/all/no NaNs x = np.full((4, 4), np.nan) @@ -189,17 +209,18 @@ def test_reductions_2D_nans(): reduction_2d_test(da.nanmin, a, np.nanmin, x, False, False) reduction_2d_test(da.nanmax, a, np.nanmax, x, False, False) - # TODO: fix these tests, which fail with this error from NumPy: - # ValueError("All-NaN slice encountered"), because some of the chunks - # (not all) have all NaN values. - # assert eq(da.argmax(a, axis=0), np.argmax(x, axis=0)) - # assert eq(da.argmin(a, axis=0), np.argmin(x, axis=0)) - # assert eq(da.nanargmax(a, axis=0), np.nanargmax(x, axis=0)) - # assert eq(da.nanargmin(a, axis=0), np.nanargmin(x, axis=0)) - # assert eq(da.argmax(a, axis=1), np.argmax(x, axis=1)) - # assert eq(da.argmin(a, axis=1), np.argmin(x, axis=1)) - # assert eq(da.nanargmax(a, axis=1), np.nanargmax(x, axis=1)) - # assert eq(da.nanargmin(a, axis=1), np.nanargmin(x, axis=1)) + assert eq(da.argmax(a), np.argmax(x)) + assert eq(da.argmin(a), np.argmin(x)) + assert eq(da.nanargmax(a), np.nanargmax(x)) + assert eq(da.nanargmin(a), np.nanargmin(x)) + assert eq(da.argmax(a, axis=0), np.argmax(x, axis=0)) + assert eq(da.argmin(a, axis=0), np.argmin(x, axis=0)) + assert eq(da.nanargmax(a, axis=0), np.nanargmax(x, axis=0)) + assert eq(da.nanargmin(a, axis=0), np.nanargmin(x, axis=0)) + assert eq(da.argmax(a, axis=1), np.argmax(x, axis=1)) + assert eq(da.argmin(a, axis=1), np.argmin(x, axis=1)) + assert eq(da.nanargmax(a, axis=1), np.nanargmax(x, axis=1)) + assert eq(da.nanargmin(a, axis=1), np.nanargmin(x, axis=1)) def test_moment():
{ "commit_name": "head_commit", "failed_lite_validators": [], "has_test_patch": true, "is_lite": true, "llm_score": { "difficulty_score": 1, "issue_text_score": 0, "test_score": 2 }, "num_modified_files": 1 }
1.8
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[complete]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "numpy>=1.16.0 pandas>=1.0.0 cloudpickle partd distributed s3fs toolz psutil pytables bokeh bcolz scipy h5py ipython", "pip_packages": [ "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y graphviz liblzma-dev" ], "python": "3.5", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
aiobotocore @ file:///opt/conda/conda-bld/aiobotocore_1643638228694/work aiohttp @ file:///tmp/build/80754af9/aiohttp_1632748060317/work aioitertools @ file:///tmp/build/80754af9/aioitertools_1607109665762/work async-timeout==3.0.1 attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work backcall @ file:///home/ktietz/src/ci/backcall_1611930011877/work bcolz==1.2.1 bokeh @ file:///tmp/build/80754af9/bokeh_1620710048147/work botocore @ file:///opt/conda/conda-bld/botocore_1642672735464/work brotlipy==0.7.0 certifi==2021.5.30 cffi @ file:///tmp/build/80754af9/cffi_1625814693874/work chardet @ file:///tmp/build/80754af9/chardet_1607706739153/work click==8.0.3 cloudpickle @ file:///tmp/build/80754af9/cloudpickle_1632508026186/work contextvars==2.4 cryptography @ file:///tmp/build/80754af9/cryptography_1635366128178/work cytoolz==0.11.0 -e git+https://github.com/dask/dask.git@6dc9229362f2d3b1dfa466a8a63831c3c832b4be#egg=dask decorator @ file:///opt/conda/conda-bld/decorator_1643638310831/work distributed @ file:///tmp/build/80754af9/distributed_1615054599257/work fsspec @ file:///opt/conda/conda-bld/fsspec_1642510437511/work h5py==2.10.0 HeapDict @ file:///Users/ktietz/demo/mc3/conda-bld/heapdict_1630598515714/work idna @ file:///tmp/build/80754af9/idna_1637925883363/work idna-ssl @ file:///tmp/build/80754af9/idna_ssl_1611752490495/work immutables @ file:///tmp/build/80754af9/immutables_1628888996840/work importlib-metadata==4.8.3 iniconfig==1.1.1 ipython @ file:///tmp/build/80754af9/ipython_1593447367857/work ipython-genutils @ file:///tmp/build/80754af9/ipython_genutils_1606773439826/work jedi @ file:///tmp/build/80754af9/jedi_1606932572482/work Jinja2 @ file:///opt/conda/conda-bld/jinja2_1647436528585/work jmespath @ file:///Users/ktietz/demo/mc3/conda-bld/jmespath_1630583964805/work locket==0.2.1 MarkupSafe @ file:///tmp/build/80754af9/markupsafe_1621528150516/work mock @ file:///tmp/build/80754af9/mock_1607622725907/work msgpack @ file:///tmp/build/80754af9/msgpack-python_1612287171716/work multidict @ file:///tmp/build/80754af9/multidict_1607367768400/work numexpr @ file:///tmp/build/80754af9/numexpr_1618853194344/work numpy @ file:///tmp/build/80754af9/numpy_and_numpy_base_1603483703303/work olefile @ file:///Users/ktietz/demo/mc3/conda-bld/olefile_1629805411829/work packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work pandas==1.1.5 parso==0.7.0 partd @ file:///opt/conda/conda-bld/partd_1647245470509/work pexpect @ file:///tmp/build/80754af9/pexpect_1605563209008/work pickleshare @ file:///tmp/build/80754af9/pickleshare_1606932040724/work Pillow @ file:///tmp/build/80754af9/pillow_1625670622947/work pluggy==1.0.0 prompt-toolkit @ file:///tmp/build/80754af9/prompt-toolkit_1633440160888/work psutil @ file:///tmp/build/80754af9/psutil_1612297621795/work ptyprocess @ file:///tmp/build/80754af9/ptyprocess_1609355006118/work/dist/ptyprocess-0.7.0-py2.py3-none-any.whl py==1.11.0 pycparser @ file:///tmp/build/80754af9/pycparser_1636541352034/work Pygments @ file:///opt/conda/conda-bld/pygments_1644249106324/work pyOpenSSL @ file:///opt/conda/conda-bld/pyopenssl_1643788558760/work pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work PySocks @ file:///tmp/build/80754af9/pysocks_1605305763431/work pytest==7.0.1 python-dateutil @ file:///tmp/build/80754af9/python-dateutil_1626374649649/work pytz==2021.3 PyYAML==5.4.1 s3fs @ file:///opt/conda/conda-bld/s3fs_1643701468749/work scipy @ file:///tmp/build/80754af9/scipy_1597686635649/work six @ file:///tmp/build/80754af9/six_1644875935023/work sortedcontainers @ file:///tmp/build/80754af9/sortedcontainers_1623949099177/work tables==3.6.1 tblib @ file:///Users/ktietz/demo/mc3/conda-bld/tblib_1629402031467/work tomli==1.2.3 toolz @ file:///tmp/build/80754af9/toolz_1636545406491/work tornado @ file:///tmp/build/80754af9/tornado_1606942266872/work traitlets @ file:///tmp/build/80754af9/traitlets_1632746497744/work typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work urllib3 @ file:///opt/conda/conda-bld/urllib3_1643638302206/work wcwidth @ file:///Users/ktietz/demo/mc3/conda-bld/wcwidth_1629357192024/work wrapt==1.12.1 yarl @ file:///tmp/build/80754af9/yarl_1606939915466/work zict==2.0.0 zipp==3.6.0
name: dask channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - aiobotocore=2.1.0=pyhd3eb1b0_0 - aiohttp=3.7.4.post0=py36h7f8727e_2 - aioitertools=0.7.1=pyhd3eb1b0_0 - async-timeout=3.0.1=py36h06a4308_0 - attrs=21.4.0=pyhd3eb1b0_0 - backcall=0.2.0=pyhd3eb1b0_0 - bcolz=1.2.1=py36h04863e7_0 - blas=1.0=openblas - blosc=1.21.3=h6a678d5_0 - bokeh=2.3.2=py36h06a4308_0 - botocore=1.23.24=pyhd3eb1b0_0 - brotlipy=0.7.0=py36h27cfd23_1003 - bzip2=1.0.8=h5eee18b_6 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - cffi=1.14.6=py36h400218f_0 - chardet=4.0.0=py36h06a4308_1003 - click=8.0.3=pyhd3eb1b0_0 - cloudpickle=2.0.0=pyhd3eb1b0_0 - contextvars=2.4=py_0 - cryptography=35.0.0=py36hd23ed53_0 - cytoolz=0.11.0=py36h7b6447c_0 - decorator=5.1.1=pyhd3eb1b0_0 - distributed=2021.3.0=py36h06a4308_0 - freetype=2.12.1=h4a9f257_0 - fsspec=2022.1.0=pyhd3eb1b0_0 - giflib=5.2.2=h5eee18b_0 - h5py=2.10.0=py36h7918eee_0 - hdf5=1.10.4=hb1b8bf9_0 - heapdict=1.0.1=pyhd3eb1b0_0 - idna=3.3=pyhd3eb1b0_0 - idna_ssl=1.1.0=py36h06a4308_0 - immutables=0.16=py36h7f8727e_0 - ipython=7.16.1=py36h5ca1d4c_0 - ipython_genutils=0.2.0=pyhd3eb1b0_1 - jedi=0.17.2=py36h06a4308_1 - jinja2=3.0.3=pyhd3eb1b0_0 - jmespath=0.10.0=pyhd3eb1b0_0 - jpeg=9e=h5eee18b_3 - lcms2=2.16=hb9589c4_0 - ld_impl_linux-64=2.40=h12ee557_0 - lerc=4.0.0=h6a678d5_0 - libdeflate=1.22=h5eee18b_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgfortran-ng=7.5.0=ha8ba4b0_17 - libgfortran4=7.5.0=ha8ba4b0_17 - libgomp=11.2.0=h1234567_1 - libopenblas=0.3.18=hf726d26_0 - libpng=1.6.39=h5eee18b_0 - libstdcxx-ng=11.2.0=h1234567_1 - libtiff=4.5.1=hffd6297_1 - libwebp=1.2.4=h11a3e52_1 - libwebp-base=1.2.4=h5eee18b_1 - locket=0.2.1=py36h06a4308_1 - lz4-c=1.9.4=h6a678d5_1 - lzo=2.10=h7b6447c_2 - markupsafe=2.0.1=py36h27cfd23_0 - mock=4.0.3=pyhd3eb1b0_0 - msgpack-python=1.0.2=py36hff7bd54_1 - multidict=5.1.0=py36h27cfd23_2 - ncurses=6.4=h6a678d5_0 - numexpr=2.7.3=py36h4be448d_1 - numpy=1.19.2=py36h6163131_0 - numpy-base=1.19.2=py36h75fe3a5_0 - olefile=0.46=pyhd3eb1b0_0 - openssl=1.1.1w=h7f8727e_0 - packaging=21.3=pyhd3eb1b0_0 - pandas=1.1.5=py36ha9443f7_0 - parso=0.7.0=py_0 - partd=1.2.0=pyhd3eb1b0_1 - pexpect=4.8.0=pyhd3eb1b0_3 - pickleshare=0.7.5=pyhd3eb1b0_1003 - pillow=8.3.1=py36h5aabda8_0 - pip=21.2.2=py36h06a4308_0 - prompt-toolkit=3.0.20=pyhd3eb1b0_0 - psutil=5.8.0=py36h27cfd23_1 - ptyprocess=0.7.0=pyhd3eb1b0_2 - pycparser=2.21=pyhd3eb1b0_0 - pygments=2.11.2=pyhd3eb1b0_0 - pyopenssl=22.0.0=pyhd3eb1b0_0 - pyparsing=3.0.4=pyhd3eb1b0_0 - pysocks=1.7.1=py36h06a4308_0 - pytables=3.6.1=py36h71ec239_0 - python=3.6.13=h12debd9_1 - python-dateutil=2.8.2=pyhd3eb1b0_0 - pytz=2021.3=pyhd3eb1b0_0 - pyyaml=5.4.1=py36h27cfd23_1 - readline=8.2=h5eee18b_0 - s3fs=2022.1.0=pyhd3eb1b0_0 - scipy=1.5.2=py36habc2bb6_0 - setuptools=58.0.4=py36h06a4308_0 - six=1.16.0=pyhd3eb1b0_1 - sortedcontainers=2.4.0=pyhd3eb1b0_0 - sqlite=3.45.3=h5eee18b_0 - tblib=1.7.0=pyhd3eb1b0_0 - tk=8.6.14=h39e8969_0 - toolz=0.11.2=pyhd3eb1b0_0 - tornado=6.1=py36h27cfd23_0 - traitlets=4.3.3=py36h06a4308_0 - typing-extensions=4.1.1=hd3eb1b0_0 - typing_extensions=4.1.1=pyh06a4308_0 - urllib3=1.26.8=pyhd3eb1b0_0 - wcwidth=0.2.5=pyhd3eb1b0_0 - wheel=0.37.1=pyhd3eb1b0_0 - wrapt=1.12.1=py36h7b6447c_1 - xz=5.6.4=h5eee18b_1 - yaml=0.2.5=h7b6447c_0 - yarl=1.6.3=py36h27cfd23_0 - zict=2.0.0=pyhd3eb1b0_0 - zlib=1.2.13=h5eee18b_1 - zstd=1.5.6=hc292b87_0 - pip: - importlib-metadata==4.8.3 - iniconfig==1.1.1 - pluggy==1.0.0 - py==1.11.0 - pytest==7.0.1 - tomli==1.2.3 - zipp==3.6.0 prefix: /opt/conda/envs/dask
[ "dask/array/tests/test_reductions.py::test_arg_reductions[_nanargmin-nanargmin]", "dask/array/tests/test_reductions.py::test_arg_reductions[_nanargmax-nanargmax]", "dask/array/tests/test_reductions.py::test_nanarg_reductions[_nanargmin-nanargmin]", "dask/array/tests/test_reductions.py::test_nanarg_reductions[_nanargmax-nanargmax]", "dask/array/tests/test_reductions.py::test_reductions_2D_nans" ]
[ "dask/array/tests/test_reductions.py::test_reductions_2D[f4]", "dask/array/tests/test_reductions.py::test_reductions_2D[i4]" ]
[ "dask/array/tests/test_reductions.py::test_reductions_1D[f4]", "dask/array/tests/test_reductions.py::test_reductions_1D[i4]", "dask/array/tests/test_reductions.py::test_arg_reductions[argmin-argmin]", "dask/array/tests/test_reductions.py::test_arg_reductions[argmax-argmax]", "dask/array/tests/test_reductions.py::test_moment", "dask/array/tests/test_reductions.py::test_reductions_with_negative_axes", "dask/array/tests/test_reductions.py::test_nan", "dask/array/tests/test_reductions.py::test_0d_array", "dask/array/tests/test_reductions.py::test_reduction_on_scalar", "dask/array/tests/test_reductions.py::test_tree_reduce_depth", "dask/array/tests/test_reductions.py::test_tree_reduce_set_options" ]
[]
BSD 3-Clause "New" or "Revised" License
461
sympy__sympy-10741
7e2c5280fd12994b952d599b19d5e5be14629918
2016-03-05 23:05:19
8bb5814067cfa0348fb8b708848f35dba2b55ff4
diff --git a/sympy/core/basic.py b/sympy/core/basic.py index d8dec8588a..502a3adb8e 100644 --- a/sympy/core/basic.py +++ b/sympy/core/basic.py @@ -849,20 +849,15 @@ def subs(self, *args, **kwargs): sequence = list(sequence) for i in range(len(sequence)): - s = list(sequence[i]) - for j, si in enumerate(s): - try: - si = sympify(si, strict=True) - except SympifyError: - if type(si) is str: - si = Symbol(si) - else: - # if it can't be sympified, skip it - sequence[i] = None - break - s[j] = si - else: - sequence[i] = None if _aresame(*s) else tuple(s) + o, n = sequence[i] + so, sn = sympify(o), sympify(n) + if not isinstance(so, Basic): + if type(o) is str: + so = Symbol(o) + sequence[i] = (so, sn) + if _aresame(so, sn): + sequence[i] = None + continue sequence = list(filter(None, sequence)) if unordered: diff --git a/sympy/physics/vector/vector.py b/sympy/physics/vector/vector.py index 93dc3d24c3..e835c3999f 100644 --- a/sympy/physics/vector/vector.py +++ b/sympy/physics/vector/vector.py @@ -128,10 +128,7 @@ def __eq__(self, other): if other == 0: other = Vector(0) - try: - other = _check_vector(other) - except TypeError: - return False + other = _check_vector(other) if (self.args == []) and (other.args == []): return True elif (self.args == []) or (other.args == []): diff --git a/sympy/solvers/ode.py b/sympy/solvers/ode.py index aee9f8361b..4cff01f78e 100644 --- a/sympy/solvers/ode.py +++ b/sympy/solvers/ode.py @@ -1959,17 +1959,17 @@ def _sympify(eq): t = funcs[0].args[0] dictsol = dict() for sol in sols: - func = list(sol.atoms(AppliedUndef))[0] - if sol.rhs == func: - sol = sol.reversed - solved = sol.lhs == func and not sol.rhs.has(func) - if not solved: - rhs = solve(sol, func) - if not rhs: + sol_func = list(sol.atoms(AppliedUndef))[0] + if not (sol.lhs == sol_func and not sol.rhs.has(sol_func)) and not (\ + sol.rhs == sol_func and not sol.lhs.has(sol_func)): + solved = solve(sol, sol_func) + if not solved: raise NotImplementedError - else: - rhs = sol.rhs - dictsol[func] = rhs + dictsol[sol_func] = solved + if sol.lhs == sol_func: + dictsol[sol_func] = sol.rhs + if sol.rhs == sol_func: + dictsol[sol_func] = sol.lhs checkeq = [] for eq in eqs: for func in funcs: @@ -2243,13 +2243,15 @@ def checkodesol(ode, sol, func=None, order='auto', solve_for_func=True): order = ode_order(ode, func) solved = sol.lhs == func and not sol.rhs.has(func) if solve_for_func and not solved: - rhs = solve(sol, func) - if rhs: - eqs = [Eq(func, t) for t in rhs] - if len(rhs) == 1: - eqs = eqs[0] - return checkodesol(ode, eqs, order=order, - solve_for_func=False) + solved = solve(sol, func) + if solved: + if len(solved) == 1: + result = checkodesol(ode, Eq(func, solved[0]), + order=order, solve_for_func=False) + else: + result = checkodesol(ode, [Eq(func, t) for t in solved], + order=order, solve_for_func=False) + return result s = True testnum = 0 diff --git a/sympy/solvers/solveset.py b/sympy/solvers/solveset.py index 28ade30649..9f413e43cb 100644 --- a/sympy/solvers/solveset.py +++ b/sympy/solvers/solveset.py @@ -363,7 +363,7 @@ def _solve_as_rational(f, symbol, domain): return valid_solns - invalid_solns -def _solve_trig(f, symbol, domain): +def _solve_real_trig(f, symbol): """ Helper to solve trigonometric equations """ f = trigsimp(f) f_original = f @@ -379,9 +379,8 @@ def _solve_trig(f, symbol, domain): solns = solveset_complex(g, y) - solveset_complex(h, y) if isinstance(solns, FiniteSet): - result = Union(*[invert_complex(exp(I*symbol), s, symbol)[1] + return Union(*[invert_complex(exp(I*symbol), s, symbol)[1] for s in solns]) - return Intersection(result, domain) elif solns is S.EmptySet: return S.EmptySet else: @@ -575,7 +574,7 @@ def _solveset(f, symbol, domain, _check=False): result = Union(*[solver(m, symbol) for m in f.args]) elif _is_function_class_equation(TrigonometricFunction, f, symbol) or \ _is_function_class_equation(HyperbolicFunction, f, symbol): - result = _solve_trig(f, symbol, domain) + result = _solve_real_trig(f, symbol) elif f.is_Piecewise: dom = domain result = EmptySet()
Add a test for `Interval(..) in Interval(..) == False`
sympy/sympy
diff --git a/sympy/core/tests/test_subs.py b/sympy/core/tests/test_subs.py index 3faf17b3a4..73238f0289 100644 --- a/sympy/core/tests/test_subs.py +++ b/sympy/core/tests/test_subs.py @@ -622,14 +622,14 @@ def test_mul2(): def test_noncommutative_subs(): x,y = symbols('x,y', commutative=False) - assert (x*y*x).subs([(x, x*y), (y, x)], simultaneous=True) == (x*y*x**2*y) + assert (x*y*x).subs([(x,x*y),(y,x)],simultaneous=True) == (x*y*x**2*y) def test_issue_2877(): f = Float(2.0) assert (x + f).subs({f: 2}) == x + 2 - def r(a, b, c): + def r(a,b,c): return factor(a*x**2 + b*x + c) e = r(5/6, 10, 5) assert nsimplify(e) == 5*x**2/6 + 10*x + 5 @@ -678,15 +678,3 @@ def test_RootOf_issue_10092(): eq = x**3 - 17*x**2 + 81*x - 118 r = RootOf(eq, 0) assert (x < r).subs(x, r) is S.false - - -def test_issue_8886(): - from sympy.physics.mechanics import ReferenceFrame as R - from sympy.abc import x - # if something can't be sympified we assume that it - # doesn't play well with SymPy and disallow the - # substitution - v = R('A').x - assert x.subs(x, v) == x - assert v.subs(v, x) == v - assert v.__eq__(x) is False diff --git a/sympy/functions/elementary/tests/test_hyperbolic.py b/sympy/functions/elementary/tests/test_hyperbolic.py index 46bf4efbc7..5341dd92d0 100644 --- a/sympy/functions/elementary/tests/test_hyperbolic.py +++ b/sympy/functions/elementary/tests/test_hyperbolic.py @@ -539,8 +539,8 @@ def test_asech(): assert asech(2/sqrt(2 + sqrt(2))) == acosh(sqrt(2 + sqrt(2))/2) assert asech(S(2)) == acosh(1/S(2)) - # asech(x) == I*acos(1/x) - # (Note: the exact formula is asech(x) == +/- I*acos(1/x)) + # asech(x) == I*acos(x) + # (Note: the exact formula is asech(x) == +/- I*acos(x)) assert asech(-sqrt(2)) == I*acos(-1/sqrt(2)) assert asech(-2/sqrt(3)) == I*acos(-sqrt(3)/2) assert asech(-S(2)) == I*acos(-S.Half) diff --git a/sympy/sets/tests/test_sets.py b/sympy/sets/tests/test_sets.py index fa9fedafaf..d112a25476 100644 --- a/sympy/sets/tests/test_sets.py +++ b/sympy/sets/tests/test_sets.py @@ -423,6 +423,8 @@ def test_contains(): assert Interval(0, 2, True, True).contains(0) is S.false assert Interval(0, 2, True, True).contains(2) is S.false + assert (Interval(0, 2) in Interval(0, 2)) is False + assert FiniteSet(1, 2, 3).contains(2) is S.true assert FiniteSet(1, 2, Symbol('x')).contains(Symbol('x')) is S.true diff --git a/sympy/solvers/tests/test_solveset.py b/sympy/solvers/tests/test_solveset.py index 83fc70b391..7af972fcdd 100644 --- a/sympy/solvers/tests/test_solveset.py +++ b/sympy/solvers/tests/test_solveset.py @@ -737,7 +737,9 @@ def test_solve_trig(): y, a = symbols('y,a') assert solveset(sin(y + a) - sin(y), a, domain=S.Reals) == \ - imageset(Lambda(n, 2*n*pi), S.Integers) + Union(imageset(Lambda(n, 2*n*pi), S.Integers), + imageset(Lambda(n, + -I*(I*(2*n*pi +arg(-exp(-2*I*y))) + 2*im(y))), S.Integers)) @XFAIL
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 1, "test_score": 0 }, "num_modified_files": 4 }
1.0
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "mpmath>=0.19", "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.7", "reqs_path": [ "requirements/base.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
certifi @ file:///croot/certifi_1671487769961/work/certifi exceptiongroup==1.2.2 importlib-metadata==6.7.0 iniconfig==2.0.0 mpmath==1.3.0 packaging==24.0 pluggy==1.2.0 pytest==7.4.4 -e git+https://github.com/sympy/sympy.git@7e2c5280fd12994b952d599b19d5e5be14629918#egg=sympy tomli==2.0.1 typing_extensions==4.7.1 zipp==3.15.0
name: sympy channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2022.12.7=py37h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=22.3.1=py37h06a4308_0 - python=3.7.16=h7a1cb2a_0 - readline=8.2=h5eee18b_0 - setuptools=65.6.3=py37h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.38.4=py37h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - exceptiongroup==1.2.2 - importlib-metadata==6.7.0 - iniconfig==2.0.0 - mpmath==1.3.0 - packaging==24.0 - pluggy==1.2.0 - pytest==7.4.4 - tomli==2.0.1 - typing-extensions==4.7.1 - zipp==3.15.0 prefix: /opt/conda/envs/sympy
[ "sympy/solvers/tests/test_solveset.py::test_solve_trig" ]
[ "sympy/core/tests/test_subs.py::test_mul2", "sympy/sets/tests/test_sets.py::test_image_Intersection", "sympy/sets/tests/test_sets.py::test_union_boundary_of_joining_sets", "sympy/solvers/tests/test_solveset.py::test_solve_sqrt_fail", "sympy/solvers/tests/test_solveset.py::test_uselogcombine_1", "sympy/solvers/tests/test_solveset.py::test_uselogcombine_2", "sympy/solvers/tests/test_solveset.py::test_rewrite_trigh", "sympy/solvers/tests/test_solveset.py::test_solve_only_exp_2", "sympy/solvers/tests/test_solveset.py::test_solve_quintics", "sympy/solvers/tests/test_solveset.py::test_solve_trig_abs", "sympy/solvers/tests/test_solveset.py::test_solve_trig_simplified", "sympy/solvers/tests/test_solveset.py::test_solve_lambert", "sympy/solvers/tests/test_solveset.py::test_conditionset_equality", "sympy/solvers/tests/test_solveset.py::test_issue_failing_pow" ]
[ "sympy/core/tests/test_subs.py::test_subs", "sympy/core/tests/test_subs.py::test_subs_AccumBounds", "sympy/core/tests/test_subs.py::test_trigonometric", "sympy/core/tests/test_subs.py::test_powers", "sympy/core/tests/test_subs.py::test_logexppow", "sympy/core/tests/test_subs.py::test_bug", "sympy/core/tests/test_subs.py::test_subbug1", "sympy/core/tests/test_subs.py::test_subbug2", "sympy/core/tests/test_subs.py::test_dict_set", "sympy/core/tests/test_subs.py::test_dict_ambigous", "sympy/core/tests/test_subs.py::test_deriv_sub_bug3", "sympy/core/tests/test_subs.py::test_equality_subs1", "sympy/core/tests/test_subs.py::test_equality_subs2", "sympy/core/tests/test_subs.py::test_issue_3742", "sympy/core/tests/test_subs.py::test_subs_dict1", "sympy/core/tests/test_subs.py::test_mul", "sympy/core/tests/test_subs.py::test_subs_simple", "sympy/core/tests/test_subs.py::test_subs_constants", "sympy/core/tests/test_subs.py::test_subs_commutative", "sympy/core/tests/test_subs.py::test_subs_noncommutative", "sympy/core/tests/test_subs.py::test_subs_basic_funcs", "sympy/core/tests/test_subs.py::test_subs_wild", "sympy/core/tests/test_subs.py::test_subs_mixed", "sympy/core/tests/test_subs.py::test_division", "sympy/core/tests/test_subs.py::test_add", "sympy/core/tests/test_subs.py::test_subs_issue_4009", "sympy/core/tests/test_subs.py::test_functions_subs", "sympy/core/tests/test_subs.py::test_derivative_subs", "sympy/core/tests/test_subs.py::test_derivative_subs2", "sympy/core/tests/test_subs.py::test_derivative_subs3", "sympy/core/tests/test_subs.py::test_issue_5284", "sympy/core/tests/test_subs.py::test_subs_iter", "sympy/core/tests/test_subs.py::test_subs_dict", "sympy/core/tests/test_subs.py::test_no_arith_subs_on_floats", "sympy/core/tests/test_subs.py::test_issue_5651", "sympy/core/tests/test_subs.py::test_issue_6075", "sympy/core/tests/test_subs.py::test_issue_6079", "sympy/core/tests/test_subs.py::test_issue_4680", "sympy/core/tests/test_subs.py::test_issue_6158", "sympy/core/tests/test_subs.py::test_Function_subs", "sympy/core/tests/test_subs.py::test_simultaneous_subs", "sympy/core/tests/test_subs.py::test_issue_6419_6421", "sympy/core/tests/test_subs.py::test_issue_6559", "sympy/core/tests/test_subs.py::test_issue_5261", "sympy/core/tests/test_subs.py::test_issue_6923", "sympy/core/tests/test_subs.py::test_2arg_hack", "sympy/core/tests/test_subs.py::test_noncommutative_subs", "sympy/core/tests/test_subs.py::test_issue_2877", "sympy/core/tests/test_subs.py::test_issue_5910", "sympy/core/tests/test_subs.py::test_issue_5217", "sympy/core/tests/test_subs.py::test_pow_eval_subs_no_cache", "sympy/core/tests/test_subs.py::test_RootOf_issue_10092", "sympy/functions/elementary/tests/test_hyperbolic.py::test_sinh", "sympy/functions/elementary/tests/test_hyperbolic.py::test_sinh_series", "sympy/functions/elementary/tests/test_hyperbolic.py::test_cosh", "sympy/functions/elementary/tests/test_hyperbolic.py::test_cosh_series", "sympy/functions/elementary/tests/test_hyperbolic.py::test_tanh", "sympy/functions/elementary/tests/test_hyperbolic.py::test_tanh_series", "sympy/functions/elementary/tests/test_hyperbolic.py::test_coth", "sympy/functions/elementary/tests/test_hyperbolic.py::test_coth_series", "sympy/functions/elementary/tests/test_hyperbolic.py::test_csch", "sympy/functions/elementary/tests/test_hyperbolic.py::test_csch_series", "sympy/functions/elementary/tests/test_hyperbolic.py::test_sech", "sympy/functions/elementary/tests/test_hyperbolic.py::test_sech_series", "sympy/functions/elementary/tests/test_hyperbolic.py::test_asinh", "sympy/functions/elementary/tests/test_hyperbolic.py::test_asinh_rewrite", "sympy/functions/elementary/tests/test_hyperbolic.py::test_asinh_series", "sympy/functions/elementary/tests/test_hyperbolic.py::test_acosh", "sympy/functions/elementary/tests/test_hyperbolic.py::test_acosh_infinities", "sympy/functions/elementary/tests/test_hyperbolic.py::test_acosh_series", "sympy/functions/elementary/tests/test_hyperbolic.py::test_asech", "sympy/functions/elementary/tests/test_hyperbolic.py::test_asech_infinities", "sympy/functions/elementary/tests/test_hyperbolic.py::test_asech_series", "sympy/functions/elementary/tests/test_hyperbolic.py::test_asech_rewrite", "sympy/functions/elementary/tests/test_hyperbolic.py::test_atanh", "sympy/functions/elementary/tests/test_hyperbolic.py::test_atanh_series", "sympy/functions/elementary/tests/test_hyperbolic.py::test_atanh_infinities", "sympy/functions/elementary/tests/test_hyperbolic.py::test_acoth", "sympy/functions/elementary/tests/test_hyperbolic.py::test_acoth_series", "sympy/functions/elementary/tests/test_hyperbolic.py::test_inverses", "sympy/functions/elementary/tests/test_hyperbolic.py::test_leading_term", "sympy/functions/elementary/tests/test_hyperbolic.py::test_complex", "sympy/functions/elementary/tests/test_hyperbolic.py::test_complex_2899", "sympy/functions/elementary/tests/test_hyperbolic.py::test_simplifications", "sympy/functions/elementary/tests/test_hyperbolic.py::test_issue_4136", "sympy/functions/elementary/tests/test_hyperbolic.py::test_sinh_rewrite", "sympy/functions/elementary/tests/test_hyperbolic.py::test_cosh_rewrite", "sympy/functions/elementary/tests/test_hyperbolic.py::test_tanh_rewrite", "sympy/functions/elementary/tests/test_hyperbolic.py::test_coth_rewrite", "sympy/functions/elementary/tests/test_hyperbolic.py::test_csch_rewrite", "sympy/functions/elementary/tests/test_hyperbolic.py::test_sech_rewrite", "sympy/functions/elementary/tests/test_hyperbolic.py::test_derivs", "sympy/functions/elementary/tests/test_hyperbolic.py::test_sinh_expansion", "sympy/functions/elementary/tests/test_hyperbolic.py::test_cosh_expansion", "sympy/sets/tests/test_sets.py::test_interval_arguments", "sympy/sets/tests/test_sets.py::test_interval_symbolic_end_points", "sympy/sets/tests/test_sets.py::test_union", "sympy/sets/tests/test_sets.py::test_difference", "sympy/sets/tests/test_sets.py::test_Complement", "sympy/sets/tests/test_sets.py::test_complement", "sympy/sets/tests/test_sets.py::test_intersect", "sympy/sets/tests/test_sets.py::test_intersection", "sympy/sets/tests/test_sets.py::test_issue_9623", "sympy/sets/tests/test_sets.py::test_is_disjoint", "sympy/sets/tests/test_sets.py::test_ProductSet_of_single_arg_is_arg", "sympy/sets/tests/test_sets.py::test_interval_subs", "sympy/sets/tests/test_sets.py::test_interval_to_mpi", "sympy/sets/tests/test_sets.py::test_measure", "sympy/sets/tests/test_sets.py::test_is_subset", "sympy/sets/tests/test_sets.py::test_is_proper_subset", "sympy/sets/tests/test_sets.py::test_is_superset", "sympy/sets/tests/test_sets.py::test_is_proper_superset", "sympy/sets/tests/test_sets.py::test_contains", "sympy/sets/tests/test_sets.py::test_interval_symbolic", "sympy/sets/tests/test_sets.py::test_union_contains", "sympy/sets/tests/test_sets.py::test_is_number", "sympy/sets/tests/test_sets.py::test_Interval_is_left_unbounded", "sympy/sets/tests/test_sets.py::test_Interval_is_right_unbounded", "sympy/sets/tests/test_sets.py::test_Interval_as_relational", "sympy/sets/tests/test_sets.py::test_Finite_as_relational", "sympy/sets/tests/test_sets.py::test_Union_as_relational", "sympy/sets/tests/test_sets.py::test_Intersection_as_relational", "sympy/sets/tests/test_sets.py::test_EmptySet", "sympy/sets/tests/test_sets.py::test_finite_basic", "sympy/sets/tests/test_sets.py::test_powerset", "sympy/sets/tests/test_sets.py::test_product_basic", "sympy/sets/tests/test_sets.py::test_real", "sympy/sets/tests/test_sets.py::test_supinf", "sympy/sets/tests/test_sets.py::test_universalset", "sympy/sets/tests/test_sets.py::test_Union_of_ProductSets_shares", "sympy/sets/tests/test_sets.py::test_Interval_free_symbols", "sympy/sets/tests/test_sets.py::test_image_interval", "sympy/sets/tests/test_sets.py::test_image_piecewise", "sympy/sets/tests/test_sets.py::test_image_FiniteSet", "sympy/sets/tests/test_sets.py::test_image_Union", "sympy/sets/tests/test_sets.py::test_image_EmptySet", "sympy/sets/tests/test_sets.py::test_issue_5724_7680", "sympy/sets/tests/test_sets.py::test_boundary", "sympy/sets/tests/test_sets.py::test_boundary_Union", "sympy/sets/tests/test_sets.py::test_boundary_ProductSet", "sympy/sets/tests/test_sets.py::test_boundary_ProductSet_line", "sympy/sets/tests/test_sets.py::test_is_open", "sympy/sets/tests/test_sets.py::test_is_closed", "sympy/sets/tests/test_sets.py::test_closure", "sympy/sets/tests/test_sets.py::test_interior", "sympy/sets/tests/test_sets.py::test_issue_7841", "sympy/sets/tests/test_sets.py::test_Eq", "sympy/sets/tests/test_sets.py::test_SymmetricDifference", "sympy/sets/tests/test_sets.py::test_issue_9536", "sympy/sets/tests/test_sets.py::test_issue_9637", "sympy/sets/tests/test_sets.py::test_issue_9808", "sympy/sets/tests/test_sets.py::test_issue_9956", "sympy/sets/tests/test_sets.py::test_issue_Symbol_inter", "sympy/sets/tests/test_sets.py::test_issue_10113", "sympy/sets/tests/test_sets.py::test_issue_10248", "sympy/sets/tests/test_sets.py::test_issue_9447", "sympy/sets/tests/test_sets.py::test_issue_10337", "sympy/sets/tests/test_sets.py::test_issue_10326", "sympy/sets/tests/test_sets.py::test_issue_9706", "sympy/sets/tests/test_sets.py::test_issue_10285", "sympy/solvers/tests/test_solveset.py::test_invert_real", "sympy/solvers/tests/test_solveset.py::test_invert_complex", "sympy/solvers/tests/test_solveset.py::test_domain_check", "sympy/solvers/tests/test_solveset.py::test_is_function_class_equation", "sympy/solvers/tests/test_solveset.py::test_garbage_input", "sympy/solvers/tests/test_solveset.py::test_solve_mul", "sympy/solvers/tests/test_solveset.py::test_solve_invert", "sympy/solvers/tests/test_solveset.py::test_errorinverses", "sympy/solvers/tests/test_solveset.py::test_solve_polynomial", "sympy/solvers/tests/test_solveset.py::test_return_root_of", "sympy/solvers/tests/test_solveset.py::test__has_rational_power", "sympy/solvers/tests/test_solveset.py::test_solveset_sqrt_1", "sympy/solvers/tests/test_solveset.py::test_solveset_sqrt_2", "sympy/solvers/tests/test_solveset.py::test_solve_sqrt_3", "sympy/solvers/tests/test_solveset.py::test_solve_polynomial_symbolic_param", "sympy/solvers/tests/test_solveset.py::test_solve_rational", "sympy/solvers/tests/test_solveset.py::test_solveset_real_gen_is_pow", "sympy/solvers/tests/test_solveset.py::test_no_sol", "sympy/solvers/tests/test_solveset.py::test_sol_zero_real", "sympy/solvers/tests/test_solveset.py::test_no_sol_rational_extragenous", "sympy/solvers/tests/test_solveset.py::test_solve_polynomial_cv_1a", "sympy/solvers/tests/test_solveset.py::test_solveset_real_rational", "sympy/solvers/tests/test_solveset.py::test_solveset_real_log", "sympy/solvers/tests/test_solveset.py::test_poly_gens", "sympy/solvers/tests/test_solveset.py::test_solve_abs", "sympy/solvers/tests/test_solveset.py::test_real_imag_splitting", "sympy/solvers/tests/test_solveset.py::test_units", "sympy/solvers/tests/test_solveset.py::test_solve_only_exp_1", "sympy/solvers/tests/test_solveset.py::test_atan2", "sympy/solvers/tests/test_solveset.py::test_piecewise", "sympy/solvers/tests/test_solveset.py::test_solveset_complex_polynomial", "sympy/solvers/tests/test_solveset.py::test_sol_zero_complex", "sympy/solvers/tests/test_solveset.py::test_solveset_complex_rational", "sympy/solvers/tests/test_solveset.py::test_solveset_complex_exp", "sympy/solvers/tests/test_solveset.py::test_solve_complex_log", "sympy/solvers/tests/test_solveset.py::test_solve_complex_sqrt", "sympy/solvers/tests/test_solveset.py::test_solveset_complex_tan", "sympy/solvers/tests/test_solveset.py::test_solve_invalid_sol", "sympy/solvers/tests/test_solveset.py::test_solveset", "sympy/solvers/tests/test_solveset.py::test_conditionset", "sympy/solvers/tests/test_solveset.py::test_solveset_domain", "sympy/solvers/tests/test_solveset.py::test_improve_coverage", "sympy/solvers/tests/test_solveset.py::test_issue_9522", "sympy/solvers/tests/test_solveset.py::test_linear_eq_to_matrix", "sympy/solvers/tests/test_solveset.py::test_linsolve", "sympy/solvers/tests/test_solveset.py::test_issue_9556", "sympy/solvers/tests/test_solveset.py::test_issue_9611", "sympy/solvers/tests/test_solveset.py::test_issue_9557", "sympy/solvers/tests/test_solveset.py::test_issue_9778", "sympy/solvers/tests/test_solveset.py::test_issue_9849", "sympy/solvers/tests/test_solveset.py::test_issue_9953", "sympy/solvers/tests/test_solveset.py::test_issue_9913", "sympy/solvers/tests/test_solveset.py::test_issue_10397", "sympy/solvers/tests/test_solveset.py::test_simplification", "sympy/solvers/tests/test_solveset.py::test_issue_10555", "sympy/solvers/tests/test_solveset.py::test_issue_8715" ]
[]
BSD
462
sympy__sympy-10743
7e2c5280fd12994b952d599b19d5e5be14629918
2016-03-06 16:06:59
8bb5814067cfa0348fb8b708848f35dba2b55ff4
diff --git a/sympy/functions/special/zeta_functions.py b/sympy/functions/special/zeta_functions.py index 561c3e0cab..d5240777eb 100644 --- a/sympy/functions/special/zeta_functions.py +++ b/sympy/functions/special/zeta_functions.py @@ -433,10 +433,7 @@ def eval(cls, z, a_=None): elif z is S.Infinity: return S.One elif z is S.Zero: - if a.is_negative: - return S.Half - a - 1 - else: - return S.Half - a + return S.Half - a elif z is S.One: return S.ComplexInfinity elif z.is_Integer:
zeta(0, n) where n is negative is wrong ```Python >>> n=sympy.Symbol("n", negative=True) >>> sympy.zeta(0, n) -n - 1/2 ``` This should be `-n+1/2`
sympy/sympy
diff --git a/sympy/functions/special/tests/test_zeta_functions.py b/sympy/functions/special/tests/test_zeta_functions.py index de2007fe16..6f246db8e2 100644 --- a/sympy/functions/special/tests/test_zeta_functions.py +++ b/sympy/functions/special/tests/test_zeta_functions.py @@ -6,6 +6,7 @@ x = Symbol('x') a = Symbol('a') +b = Symbol('b', negative=True) z = Symbol('z') s = Symbol('s') @@ -17,6 +18,7 @@ def test_zeta_eval(): assert zeta(0) == Rational(-1, 2) assert zeta(0, x) == Rational(1, 2) - x + assert zeta(0, b) == Rational(1, 2) - b assert zeta(1) == zoo assert zeta(1, 2) == zoo @@ -52,10 +54,10 @@ def test_zeta_eval(): assert zeta(-4, -8) == 8772 assert zeta(0, 1) == -Rational(1, 2) - assert zeta(0, -1) == Rational(1, 2) + assert zeta(0, -1) == Rational(3, 2) assert zeta(0, 2) == -Rational(3, 2) - assert zeta(0, -2) == Rational(3, 2) + assert zeta(0, -2) == Rational(5, 2) assert zeta( 3).evalf(20).epsilon_eq(Float("1.2020569031595942854", 20), 1e-19)
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 0, "test_score": 0 }, "num_modified_files": 1 }
1.0
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "mpmath>=0.19", "pytest", "pytest-cov" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.5", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==22.2.0 certifi==2021.5.30 coverage==6.2 importlib-metadata==4.8.3 iniconfig==1.1.1 mpmath==1.3.0 packaging==21.3 pluggy==1.0.0 py==1.11.0 pyparsing==3.1.4 pytest==7.0.1 pytest-cov==4.0.0 -e git+https://github.com/sympy/sympy.git@7e2c5280fd12994b952d599b19d5e5be14629918#egg=sympy tomli==1.2.3 typing_extensions==4.1.1 zipp==3.6.0
name: sympy channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==22.2.0 - coverage==6.2 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - mpmath==1.3.0 - packaging==21.3 - pluggy==1.0.0 - py==1.11.0 - pyparsing==3.1.4 - pytest==7.0.1 - pytest-cov==4.0.0 - tomli==1.2.3 - typing-extensions==4.1.1 - zipp==3.6.0 prefix: /opt/conda/envs/sympy
[ "sympy/functions/special/tests/test_zeta_functions.py::test_zeta_eval" ]
[]
[ "sympy/functions/special/tests/test_zeta_functions.py::test_zeta_series", "sympy/functions/special/tests/test_zeta_functions.py::test_dirichlet_eta_eval", "sympy/functions/special/tests/test_zeta_functions.py::test_rewriting", "sympy/functions/special/tests/test_zeta_functions.py::test_derivatives", "sympy/functions/special/tests/test_zeta_functions.py::test_polylog_expansion", "sympy/functions/special/tests/test_zeta_functions.py::test_lerchphi_expansion", "sympy/functions/special/tests/test_zeta_functions.py::test_stieltjes", "sympy/functions/special/tests/test_zeta_functions.py::test_stieltjes_evalf", "sympy/functions/special/tests/test_zeta_functions.py::test_issue_10475" ]
[]
BSD
463
MITLibraries__slingshot-11
755a842371e63a1c70fde8568523b9b5db0d304e
2016-03-07 17:04:21
755a842371e63a1c70fde8568523b9b5db0d304e
diff --git a/slingshot/cli.py b/slingshot/cli.py index 55efcf6..047d98e 100644 --- a/slingshot/cli.py +++ b/slingshot/cli.py @@ -1,5 +1,6 @@ # -*- coding: utf-8 -*- from __future__ import absolute_import +from datetime import datetime import os import shutil @@ -58,4 +59,8 @@ def run(layers, store, url, namespace, username, password): submit(zf, url, auth) except Exception as e: shutil.rmtree(bag, ignore_errors=True) + click.echo("%sZ: %s failed with %r" % + (datetime.utcnow().isoformat(), data_layer, e)) raise e + click.echo("%sZ: %s uploaded" % (datetime.utcnow().isoformat(), + data_layer))
Add logging
MITLibraries/slingshot
diff --git a/tests/test_cli.py b/tests/test_cli.py index 1aff724..61eca94 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -59,3 +59,21 @@ def test_run_uses_authentication(runner, layers_dir): '--username', 'foo', '--password', 'bar']) assert m.request_history[0].headers['Authorization'] == \ 'Basic Zm9vOmJhcg==' + + +def test_run_logs_uploaded_layers_to_stdout(runner, layers_dir): + with requests_mock.Mocker() as m: + store = tempfile.mkdtemp() + m.post('http://localhost') + res = runner.invoke(main, ['run', layers_dir, store, + 'http://localhost']) + assert 'SDE_DATA_BD_A8GNS_2003.zip uploaded' in res.output + + +def test_run_logs_failed_layers_to_stdout(runner, layers_dir): + with requests_mock.Mocker() as m: + store = tempfile.mkdtemp() + m.post('http://localhost', status_code=500) + res = runner.invoke(main, ['run', layers_dir, store, + 'http://localhost']) + assert 'SDE_DATA_BD_A8GNS_2003.zip failed' in res.output
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 3, "test_score": 1 }, "num_modified_files": 1 }
unknown
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "mock", "requests_mock" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.9", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
bagit==1.5.4 certifi==2025.1.31 charset-normalizer==3.4.1 click==6.3 exceptiongroup==1.2.2 idna==3.10 iniconfig==2.1.0 mock==5.2.0 packaging==24.2 pluggy==1.5.0 pytest==8.3.5 requests==2.32.3 requests-mock==1.12.1 -e git+https://github.com/MITLibraries/slingshot.git@755a842371e63a1c70fde8568523b9b5db0d304e#egg=slingshot tomli==2.2.1 urllib3==2.3.0
name: slingshot channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - bagit==1.5.4 - certifi==2025.1.31 - charset-normalizer==3.4.1 - click==6.3 - exceptiongroup==1.2.2 - idna==3.10 - iniconfig==2.1.0 - mock==5.2.0 - packaging==24.2 - pluggy==1.5.0 - pytest==8.3.5 - requests==2.32.3 - requests-mock==1.12.1 - tomli==2.2.1 - urllib3==2.3.0 - wheel==0.26.0 prefix: /opt/conda/envs/slingshot
[ "tests/test_cli.py::test_run_logs_uploaded_layers_to_stdout", "tests/test_cli.py::test_run_logs_failed_layers_to_stdout" ]
[]
[ "tests/test_cli.py::test_run_submits_bags", "tests/test_cli.py::test_run_leaves_bag_on_success", "tests/test_cli.py::test_run_removes_bag_on_failure", "tests/test_cli.py::test_run_uses_supplied_namespace", "tests/test_cli.py::test_run_uses_authentication" ]
[]
Apache License 2.0
464
MITLibraries__slingshot-12
6dca99653150369b2b5b422292acd1e0b1fb131e
2016-03-07 18:19:18
6dca99653150369b2b5b422292acd1e0b1fb131e
diff --git a/slingshot/cli.py b/slingshot/cli.py index 047d98e..75ad0c9 100644 --- a/slingshot/cli.py +++ b/slingshot/cli.py @@ -28,7 +28,9 @@ def main(): @click.option('--username', help="Username for kepler submission.") @click.option('--password', help="Password for kepler submission. Omit for prompt.") -def run(layers, store, url, namespace, username, password): [email protected]('--fail-after', default=5, + help="Stop after number of consecutive failures. Default is 5.") +def run(layers, store, url, namespace, username, password, fail_after): """Create and upload bags to the specified endpoint. This script will create bags from all the layers in the LAYERS @@ -50,6 +52,7 @@ def run(layers, store, url, namespace, username, password): auth = username, password if not all(auth): auth = None + failures = 0 for data_layer in uploadable(layers, store): bag = prep_bag(os.path.join(layers, data_layer), store) try: @@ -57,10 +60,15 @@ def run(layers, store, url, namespace, username, password): bag_name = make_uuid(os.path.basename(bag), namespace) with temp_archive(bag, bag_name) as zf: submit(zf, url, auth) + click.echo("%sZ: %s uploaded" % (datetime.utcnow().isoformat(), + data_layer)) + failures = 0 except Exception as e: shutil.rmtree(bag, ignore_errors=True) + failures += 1 click.echo("%sZ: %s failed with %r" % (datetime.utcnow().isoformat(), data_layer, e)) - raise e - click.echo("%sZ: %s uploaded" % (datetime.utcnow().isoformat(), - data_layer)) + if failures >= fail_after: + click.echo("%sZ: Maximum number of consecutive failures (%d)" % + (datetime.utcnow().isoformat(), failures)) + raise e
Set script failure conditions The script should not completely fail for one bad layer, but rather log it and continue. It should fail immediately in certain cases, such as, 404 and 401. It should probably fail after some number of layers have failed consecutively, since that would likely indicate something more fundamental is wrong.
MITLibraries/slingshot
diff --git a/tests/test_cli.py b/tests/test_cli.py index 61eca94..e32903d 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -77,3 +77,12 @@ def test_run_logs_failed_layers_to_stdout(runner, layers_dir): res = runner.invoke(main, ['run', layers_dir, store, 'http://localhost']) assert 'SDE_DATA_BD_A8GNS_2003.zip failed' in res.output + + +def test_run_fails_after_consecutive_failures(runner, layers_dir): + with requests_mock.Mocker() as m: + store = tempfile.mkdtemp() + m.post('http://localhost', status_code=500) + res = runner.invoke(main, ['run', layers_dir, store, + 'http://localhost', '--fail-after', 1]) + assert 'Maximum number of consecutive failures' in res.output
{ "commit_name": "head_commit", "failed_lite_validators": [], "has_test_patch": true, "is_lite": true, "llm_score": { "difficulty_score": 0, "issue_text_score": 2, "test_score": 3 }, "num_modified_files": 1 }
unknown
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "mock", "requests_mock" ], "pre_install": null, "python": "3.9", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
bagit==1.5.4 certifi==2025.1.31 charset-normalizer==3.4.1 click==6.3 exceptiongroup==1.2.2 idna==3.10 iniconfig==2.1.0 mock==5.2.0 packaging==24.2 pluggy==1.5.0 pytest==8.3.5 requests==2.32.3 requests-mock==1.12.1 -e git+https://github.com/MITLibraries/slingshot.git@6dca99653150369b2b5b422292acd1e0b1fb131e#egg=slingshot tomli==2.2.1 urllib3==2.3.0
name: slingshot channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - bagit==1.5.4 - certifi==2025.1.31 - charset-normalizer==3.4.1 - click==6.3 - exceptiongroup==1.2.2 - idna==3.10 - iniconfig==2.1.0 - mock==5.2.0 - packaging==24.2 - pluggy==1.5.0 - pytest==8.3.5 - requests==2.32.3 - requests-mock==1.12.1 - tomli==2.2.1 - urllib3==2.3.0 - wheel==0.26.0 prefix: /opt/conda/envs/slingshot
[ "tests/test_cli.py::test_run_fails_after_consecutive_failures" ]
[]
[ "tests/test_cli.py::test_run_submits_bags", "tests/test_cli.py::test_run_leaves_bag_on_success", "tests/test_cli.py::test_run_removes_bag_on_failure", "tests/test_cli.py::test_run_uses_supplied_namespace", "tests/test_cli.py::test_run_uses_authentication", "tests/test_cli.py::test_run_logs_uploaded_layers_to_stdout", "tests/test_cli.py::test_run_logs_failed_layers_to_stdout" ]
[]
Apache License 2.0
465
PumucklOnTheAir__TestFramework-132
8d1c52e7c2569834f12f7a0739748e7333ca0099
2016-03-07 18:38:58
8d1c52e7c2569834f12f7a0739748e7333ca0099
diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 0000000..5989d64 --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,13 @@ + +little text describing your pull request. + + +Following tickets are finished: +resolved #115 +resolved #116 + + +tested on Pi: +- [ ] test_R_Server_VLAN.py +- [ ] test_AP_*.py +- [ ] test_A_Server_2.py diff --git a/.travis.yml b/.travis.yml index e596f5f..7f48a93 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,6 +1,7 @@ language: python python: - "3.4" + - "3.5" cache: directories: @@ -33,6 +34,7 @@ script: - python -m pytest --cov-append --cov=./ framework_unittests/test_A_Server.py - python -m pytest --cov-append --cov=./ test_server_alive.py - python -m pytest --cov-append --cov=./ framework_unittests/test_A_IPC.py + - python -m pytest --cov-append --cov=./ framework_unittests/test_A_cli.py - python -m pytest --ff -n auto --cov-append --cov=./ framework_unittests/test_AP_*.py - "python -m pep8 --exclude=migrations --ignore=E501,E225,E126 */*.py *.py" diff --git a/cli.py b/cli.py index 7002ec1..634ae87 100755 --- a/cli.py +++ b/cli.py @@ -152,6 +152,22 @@ def create_parsers(): help="List of routers", nargs="+") parser_online.add_argument("-a", "--all", action="store_true", default=False, help="Apply to all routers") + # subparser for test set + parser_test_set = subparsers.add_parser("start", help="Start a test set") + parser_test_set.add_argument("-r", "--routers", metavar="Router ID", type=int, default=[], action="store", + help="", nargs="+") + parser_test_set.add_argument("-a", "--all", action="store_true", default=False, help="Apply to all routers") + parser_test_set.add_argument("-s", "--set", metavar="Test set", type=str, default=[], action="store", + help="Name of set") + + # subparser for test results + parser_test_result = subparsers.add_parser("results", help="Manage the test results") + parser_test_result.add_argument("-r", "--routers", metavar="Router ID", type=int, default=[], action="store", + help="", nargs="+") + parser_test_result.add_argument("-a", "--all", action="store_true", default=False, help="Apply to all routers") + parser_test_result.add_argument("-rm", "--remove", action="store_true", default=False, + help="Remove all results. Ignoring parameter -r.") + return parser @@ -249,6 +265,32 @@ def main(): online_all = args.all server_proxy.router_online(args.routers, online_all) + elif args.mode == "start": + """ + subparse: start + """ + if args.all: + router_id = -1 + else: + router_id = args.routers[0] + set_name = args.set + server_proxy.start_test_set(router_id, set_name) + + elif args.mode == "results": + """ + subparse: results + """ + + if args.remove: + removed = server_proxy.delete_test_results() + print("Removed all " + str(removed) + " results.") + else: + if args.all: + router_id = -1 + else: + router_id = args.routers[0] + util.print_test_results(server_proxy.get_test_results(router_id)) + else: logging.info("Check --help for help") diff --git a/config/configmanager.py b/config/configmanager.py index cc4985c..440d828 100644 --- a/config/configmanager.py +++ b/config/configmanager.py @@ -222,7 +222,7 @@ class ConfigManager: return None @staticmethod - def get_test_config() -> []: + def _get_test_config() -> []: """ Read the Test Config file @@ -232,29 +232,15 @@ class ConfigManager: return ConfigManager.read_file(path) @staticmethod - def get_test_dict() -> []: + def get_test_sets() -> []: """ Read the Test Config file :return: Dictionary with a specific output from the file """ - output = ConfigManager.get_test_config() + output = ConfigManager._get_test_config() return output - @staticmethod - def get_test_list() -> []: - """ - Read the Test Config file - - :return: List with a specific output from the file - """ - output = ConfigManager.get_test_config() - test_list = [] - for x in output: - for v in x.values(): - test_list.append(v) - return test_list - @staticmethod def get_firmware_config() -> []: """ diff --git a/firmware/firmware_handler.py b/firmware/firmware_handler.py index a28c6c8..e0c4dd0 100644 --- a/firmware/firmware_handler.py +++ b/firmware/firmware_handler.py @@ -138,6 +138,8 @@ class FirmwareHandler: hashs = [] non_parsed_firmwares = self._read_firmwares_from_manifest(release_model) for firmware in non_parsed_firmwares: + if firmware.find("---\n") != -1: # skip the end of the file + continue firmware_name = "gluon" + firmware.split("gluon")[1].split("-sysupgrade")[0] + "-" + \ FirmwareHandler.UPDATE_TYPE + "." + firmware.split(".")[-1].replace("\n", "") hash_firmware = firmware.split(' ')[4] diff --git a/requirements.txt b/requirements.txt index 33608a8..595564a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,18 +1,17 @@ -pyflakes -pep8 -pyyaml -typing -paramiko -pyroute2 -pylint -recommonmark -sphinx -selenium -sphinx_rtd_theme -pytest-cov -pytest-cache -pytest-xdist -codecov -pycrypto -ecdsa -ipaddress \ No newline at end of file +pyflakes==1.0.0 +pep8==1.6.2 +PyYAML==3.11 +typing==3.5.0.1 +paramiko==1.16.0 +pyroute2==0.3.15 +pylint==1.5.3 +recommonmark==0.4.0 +Sphinx==1.3.3 +sphinx-rtd-theme==0.1.9 +selenium==2.48.0 +pytest-cov==2.2.0 +pytest-cache==1.0 +pytest-xdist==1.13.1 +pycrypto==2.6.1 +ecdsa==0.13 +ipaddress==1.0.16 \ No newline at end of file diff --git a/server/server.py b/server/server.py index d494619..9c018da 100755 --- a/server/server.py +++ b/server/server.py @@ -9,6 +9,7 @@ from log.loggersetup import LoggerSetup import logging from concurrent.futures import ThreadPoolExecutor from unittest.result import TestResult +import importlib from threading import Event, Semaphore from network.remote_system import RemoteSystem, RemoteSystemJob from unittest import defaultTestLoader @@ -45,7 +46,7 @@ class Server(ServerProxy): # runtime vars _routers = [] # all registered routers on the system - _reports = [] # all test reports + _test_results = [] # all test reports in form (router.id, str(test), TestResult) _stopped = False # marks if the server is still running _max_subprocesses = 0 # will be set at start. describes how many Processes are needed in the Pool @@ -56,6 +57,7 @@ class Server(ServerProxy): _task_pool = None # multiprocessing.pool.Pool for task execution _job_wait_executor = None # ThreadPoolExecutor for I/O handling on tasks _semaphore_task_management = Semaphore(1) + _test_sets = {} # Dict[List[str]] # NVAssistent _nv_assistent = None @@ -135,6 +137,7 @@ class Server(ServerProxy): def __load_configuration(cls): logging.debug("Load configuration") cls._routers = ConfigManager.get_router_manual_list() + cls._test_sets = ConfigManager.get_test_sets() @classmethod def stop(cls) -> None: @@ -247,31 +250,28 @@ class Server(ServerProxy): return cls.__start_task(remote_sys, remote_job) @classmethod - def start_test(cls, router_id: int, test_name: str) -> bool: + def start_test_set(cls, router_id: int, test_set_name: str) -> bool: """ Start an specific test on a router - :param router_id: The id of the router on which the test will run - :param test_name: The name of the test to execute + :param router_id: The id of the router on which the test will run. + If id is -1 the test will be executed on all routers. + :param test_set_name: The name of the test set to execute :return: True if test was successful added in the queue """ - router = cls.get_router_by_id(router_id) - if router is None: - logging.error("Router ID unknown") - return False - - # TODO: Testverwaltung - ermittlung des passenden Tests #36 - # cls.get_test_by_name - from firmware_tests.connection_test import ConnectionTest, VeryLongTest - if test_name == "ConnectionTest": - demo_test = ConnectionTest # Important: Param is a class and not an object - elif test_name == "VeryLongTest": - demo_test = VeryLongTest - else: - logging.error("Testname unknown") - return False - return cls.__start_task(router, demo_test) + for file_name in cls._test_sets[test_set_name]: + module = importlib.import_module("firmware_tests." + file_name) + import inspect + + for name, obj in inspect.getmembers(module): + if inspect.isclass(obj) and issubclass(obj, FirmwareTest) and name != "FirmwareTest": + if router_id == -1: + for router in cls._routers: + cls.__start_task(router, obj) + else: + cls.__start_task(cls.get_router_by_id(router_id), obj) + return True @classmethod def __start_task(cls, remote_sys: RemoteSystem, job: Union[RemoteSystemJobClass, RemoteSystemJob]) -> bool: @@ -282,7 +282,7 @@ class Server(ServerProxy): :param remote_sys: the RemoteSystem :param job: the Job - :return: true if job directly started, false if + :return: true if job directly started, false if not """ assert(cls._pid == os.getpid()) # Check if it is the the same PID as the PID Process which started the ProcessPool @@ -390,7 +390,7 @@ class Server(ServerProxy): logging.debug("%sTest done " + str(test), LoggerSetup.get_log_deep(1)) logging.debug("%sFrom " + str(router), LoggerSetup.get_log_deep(2)) - cls._reports.append(result) + cls._test_results.append((router.id, str(test), result)) except Exception as e: # TODO #105 logging.error("%sTest raised an Exception: " + str(e), LoggerSetup.get_log_deep(1)) @@ -401,11 +401,11 @@ class Server(ServerProxy): # result.addError(None, (type(exception), exception, None)) # TODO exception handling for failed Tests - cls._reports.append(result) + cls._test_results.append((router.id, str(test), result)) finally: cls.set_running_task(router, None) - # logging.debug(str(cls._reports)) + # logging.debug(str(cls._test_results)) # start next test in the queue cls.__start_task(router, None) @@ -468,10 +468,6 @@ class Server(ServerProxy): :return: List is a copy of the original list. """ - # check if list is still valid - for router in cls._routers: - assert isinstance(router, Router) - return cls._routers.copy() @classmethod @@ -502,13 +498,33 @@ class Server(ServerProxy): return cls._running_tasks.copy() @classmethod - def get_reports(cls) -> []: + def get_test_results(cls, router_id: int = -1) -> [(int, str, TestResult)]: + """ + Returns the firmware test results for the router + + :param router_id: the specific router or all router if id = -1 + :return: List of results + """ + + if router_id == -1: + return cls._test_results + else: + results = [] + for result in cls._test_results: + if result[0] == router_id: + results.append(result) + return results + + @classmethod + def delete_test_results(cls) -> int: """ - Returns the test results. + Remove all test results - :return: List of reports + :return: Number of deleted results """ - return cls._reports + size_results = len(cls._test_results) + cls._test_results = [] + return size_results @classmethod def get_tests(cls) -> List[FirmwareTestClass]: diff --git a/server/serverproxy.py b/server/serverproxy.py index 102cb67..a072aa8 100644 --- a/server/serverproxy.py +++ b/server/serverproxy.py @@ -1,6 +1,7 @@ from abc import ABCMeta, abstractclassmethod from typing import List from router.router import Router +from unittest import TestResult class ServerProxy(metaclass=ABCMeta): @@ -11,12 +12,12 @@ class ServerProxy(metaclass=ABCMeta): the return value is given by copy and not by reference! """"" @abstractclassmethod - def start_test(self, router_id: int, test_name: str) -> bool: + def start_test_set(self, router_id: int, test_set_name: str) -> bool: """ Start an specific test on a router :param router_id: The id of the router on which the test will run - :param test_name: The name of the test to execute + :param test_set_name: The name of the test set to execute :return: True if test was successful added in the queue """ pass @@ -50,11 +51,21 @@ class ServerProxy(metaclass=ABCMeta): pass @abstractclassmethod - def get_reports(self) -> []: + def get_test_results(self, router_id: int = -1) -> [(int, str, TestResult)]: """ - Returns the test results. + Returns the firmware test results for the router - :return: List of reports + :param router_id: the specific router or all router if id = -1 + :return: List of results + """ + pass + + @abstractclassmethod + def delete_test_results(self) -> int: + """ + Remove all test results + + :return: Number of deleted results """ pass diff --git a/util/cli_util.py b/util/cli_util.py index abac537..ad95e15 100644 --- a/util/cli_util.py +++ b/util/cli_util.py @@ -1,4 +1,5 @@ import logging +from unittest import TestResult class CLIUtil: @@ -11,6 +12,7 @@ class CLIUtil: def print_dynamic_table(content, headers): """ prints a dynamically formatted table + :param content: list of lists of data :param headers: list of headers """ @@ -48,6 +50,7 @@ class CLIUtil: def print_status(self, routers, headers): """ prints the status of all routers + :param routers: list of routers :param headers: list of headers """ @@ -62,6 +65,7 @@ class CLIUtil: def print_header(): """ prints header for the command line + :return: """ print("\v\t" + OutputColors.bold + "Freifunk Testframework\v" + OutputColors.clear) @@ -70,6 +74,7 @@ class CLIUtil: def return_progressbar(router, tid, percentage): """ returns the visual progress of a test on a router + :param router: router name :param tid: ID of test :param percentage: progress of test in percent @@ -84,6 +89,7 @@ class CLIUtil: def print_list(content): """ prints a simple list(table) sorted by the first row and formatted + :param content: list of list (table) :return: """ @@ -107,6 +113,7 @@ class CLIUtil: def print_router(router_list): """ prints a detailed list of info on a router + :param router_list: list of info on router :return: """ @@ -114,6 +121,25 @@ class CLIUtil: for elem in router_list: print("{:<15}{:<20}".format(str(elem[0]) + ":", str(elem[1]))) + @staticmethod + def print_test_results(result_list: [(int, str, TestResult)]): + """ + Prints a the TestResult list + + :param result_list: + :return: + """ + headers = ["Router ID", "Test", "(S|F|E)"] + content = [] + print("------Testresults------") + for result in result_list: + content.append([str(result[0]), result[1], "(" + str(result[2].testsRun - len(result[2].failures) - + len(result[2].errors)) + + "|" + str(len(result[2].failures)) + + "|" + str(len(result[2].errors)) + ")"]) + + CLIUtil.print_dynamic_table(content, headers) + class OutputColors: green = '\033[92m'
(29) TestResult Verwaltung Ziel: Testergebnisse sollten verwaltet werden können über die CLI. Aufgabe: Die CLI muss TestResults ausgeben können und sie auch ggf. löschen können. MetaDaten zum dazugehörigen Test können womöglich schon von der Testverwaltung #36 geladen werden. Beim Anzeigen der Tests oder direkt nach der Ausführung + Speicherung. Vor der Umsetzung ist zu überlegen, ob die TestResults über das IPC bezogen werden oder über das Dateisystem oder einer externen DB. Ggf. kann also in diesem Ticket auch die QS Maßnahme für die Persistenz der TestResults erfolgen und somit insgesamt Zeit gespart werden. Infos: Die Testergebnisse werden während der Laufzeit im Server in der Liste _reports gespeichert. DatenTyp: unittest.TestResult Unteraufgaben: - [x] TestResults ausgeben auf der CLI - [x] TestResults löschen über die CLI Neue UserStory/Tickets: - [ ] Optional: TestResults persistent - [ ] Optional: MetaDaten zum TestResult
PumucklOnTheAir/TestFramework
diff --git a/config/test_config.yaml b/config/test_config.yaml index b977360..3d8be05 100644 --- a/config/test_config.yaml +++ b/config/test_config.yaml @@ -1,3 +1,3 @@ --- # config for the Test-Files -- {Test_Name: Test1, Params: [Param1]} -- {Test_Name: Test2, Params: [Param1, Param2]} \ No newline at end of file +set_1: [demo_1] +set_2: [demo_2, demo_1] \ No newline at end of file diff --git a/firmware_tests/__init__.py b/firmware_tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/firmware_tests/connection_test.py b/firmware_tests/connection_test.py deleted file mode 100644 index e509601..0000000 --- a/firmware_tests/connection_test.py +++ /dev/null @@ -1,49 +0,0 @@ -from server.test import FirmwareTest -import os - - -class ConnectionTest(FirmwareTest): - """ - This is a demo test - only to test the functionality of the framework itself. - """"" - def test_self_router(self): - # print(str(self.router)) - assert self.remote_system.id == 0 - - def test_ping_static(self): - # print("connection test: " + str(getpid())) - # os.system("ip a") - response = os.system("ping -t 5 -c 1 " + "www.p8h.de") - assert response == 0 # not working because no normal eth0 stack available - # from subprocess import Popen, PIPE - # process = Popen(["ip", "a"], stdout=PIPE, stderr=PIPE) - # stdout, sterr = process.communicate() - # print(str(stdout.decode('utf-8'))) - # response = os.system("ping -c 1 " + "p8h.de") - # assert response == #0 - - def test_ping_router(self): - hostname = self.remote_system.ip - response = os.system("ping -t 5 -c 1 " + hostname) - print(hostname) - assert response == 0 - - -class VeryLongTest(FirmwareTest): - """ - This is a demo test - only to test the functionality of the framework itself and it is very short.. - """"" - def test_very_long_test(self): - lol = True - assert lol - assert not not lol - - def test_buzz1(self): - lol = True - assert lol - assert not not lol - - def test_foo2(self): - lol = True - assert lol - assert not not lol diff --git a/firmware_tests/demo_1.py b/firmware_tests/demo_1.py new file mode 100644 index 0000000..6f3a139 --- /dev/null +++ b/firmware_tests/demo_1.py @@ -0,0 +1,14 @@ +from server.test import FirmwareTest +import os + + +class ConnectionTest(FirmwareTest): + """ + This is a demo test - only to test the functionality of the framework itself. + """"" + def test_self_router(self): + assert self.remote_system.id == 0 + + def test_ping_local(self): + response = os.system("ping -t 5 -c 1 " + "localhost") + assert response == 0 diff --git a/firmware_tests/demo_2.py b/firmware_tests/demo_2.py new file mode 100644 index 0000000..a63e2b6 --- /dev/null +++ b/firmware_tests/demo_2.py @@ -0,0 +1,21 @@ +from server.test import FirmwareTest + + +class StupidTest(FirmwareTest): + """ + This is a demo test - only to test the functionality of the framework itself and it is very short.. + """"" + def test_not_very_long_test(self): + lol = True + assert lol + assert not not lol + + def test_buzz1(self): + lol = True + assert lol + assert not not lol + + def test_foo2(self): + lol = True + assert lol + assert not not lol diff --git a/framework_unittests/configs/config_no_vlan/server_config.yaml b/framework_unittests/configs/config_no_vlan/server_config.yaml index 37a304b..c38a8d8 100644 --- a/framework_unittests/configs/config_no_vlan/server_config.yaml +++ b/framework_unittests/configs/config_no_vlan/server_config.yaml @@ -4,7 +4,5 @@ Server_Name: TestServer, # set the level for what you want to log: NOTSET: 0, DEBUG: 10, INFO: 20, WARNING: 30, ERROR: 40, CRITICAL: 50 Log_Level: 10, # set True or False -Vlan_On: False, -# set True or False to use output on every console -Use_Console_Output: False +Vlan_On: False } \ No newline at end of file diff --git a/framework_unittests/configs/config_no_vlan/test_config.yaml b/framework_unittests/configs/config_no_vlan/test_config.yaml index b977360..3d8be05 100644 --- a/framework_unittests/configs/config_no_vlan/test_config.yaml +++ b/framework_unittests/configs/config_no_vlan/test_config.yaml @@ -1,3 +1,3 @@ --- # config for the Test-Files -- {Test_Name: Test1, Params: [Param1]} -- {Test_Name: Test2, Params: [Param1, Param2]} \ No newline at end of file +set_1: [demo_1] +set_2: [demo_2, demo_1] \ No newline at end of file diff --git a/framework_unittests/test_AP_Yaml.py b/framework_unittests/test_AP_Yaml.py index 23863a2..f639ac8 100644 --- a/framework_unittests/test_AP_Yaml.py +++ b/framework_unittests/test_AP_Yaml.py @@ -79,31 +79,6 @@ class MyTestCase(unittest.TestCase): data = ConfigManager.get_server_property("Server_Name") self.assertEqual(data, "TestServer", "test_Yaml: Wrong size of the List") - # test tests - def test_config_test(self): - """ - Tests the test config - :return: Tests results - """ - data = ConfigManager.get_test_config() - self.assertEqual(len(data), 2, "test_Yaml: Wrong size of the List") - - def test_config_test_dict(self): - """ - Tests the test config - :return: Tests results - """ - data = ConfigManager.get_test_dict() - self.assertEqual(len(data), 2, "test_Yaml: Wrong size of the List") - - def test_config_test_list(self): - """ - Tests the test config - :return: Tests results - """ - data = ConfigManager.get_test_list() - self.assertEqual(len(data), 4, "test_Yaml: Wrong size of the List") - # firmware tests def test_firmware_property(self): """ diff --git a/framework_unittests/test_A_cli_util.py b/framework_unittests/test_AP_cli_util.py similarity index 100% rename from framework_unittests/test_A_cli_util.py rename to framework_unittests/test_AP_cli_util.py diff --git a/framework_unittests/test_A_IPC.py b/framework_unittests/test_A_IPC.py index 9f370dd..790d852 100644 --- a/framework_unittests/test_A_IPC.py +++ b/framework_unittests/test_A_IPC.py @@ -38,7 +38,7 @@ class TestIPC(TestCase): ipc_client.connect(False) server_proxy = ipc_client.get_server_proxy() - rep = server_proxy.get_reports() + rep = server_proxy.get_test_results() # print(rep) assert rep[1].text == "test" @@ -76,7 +76,7 @@ class DummyServer(ServerProxy): return ["lol"] @classmethod - def get_reports(cls) -> []: + def get_test_results(cls) -> []: d = DummyObject("test") return [id(d), d] @@ -127,3 +127,11 @@ class DummyServer(ServerProxy): @classmethod def get_routers_task_queue_size(cls, router_id: int) -> [str]: pass + + @classmethod + def start_test_set(cls, router_id: int, test_set_name: str) -> bool: + pass + + @classmethod + def delete_test_results(cls) -> int: + pass diff --git a/framework_unittests/test_A_Server_2.py b/framework_unittests/test_A_Server_2.py index 870fccf..e507cc8 100644 --- a/framework_unittests/test_A_Server_2.py +++ b/framework_unittests/test_A_Server_2.py @@ -11,7 +11,7 @@ import socket def block_until_server_is_online(): sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) print("wait", flush=True) - while not not sock.connect_ex(('localhost', 5000)): + while sock.connect_ex(('localhost', 5000)): time.sleep(3) print('.', end="", flush=True) sock.close() @@ -32,7 +32,7 @@ class ServerCore(object): for i in range(2): # do it two times to be sure routers = server_proxy.get_routers() for router in routers: - while not not server_proxy.get_routers_task_queue_size(router.id): + while server_proxy.get_routers_task_queue_size(router.id): time.sleep(2) print('.', end="", flush=True) @@ -47,7 +47,7 @@ class ServerCore(object): for i in range(2): # do it two times to be sure routers = self.server_proxy.get_routers() for router in routers: - while not not self.server_proxy.get_routers_task_queue_size(router.id): + while self.server_proxy.get_routers_task_queue_size(router.id): time.sleep(2) print('.', end="", flush=True) @@ -56,38 +56,54 @@ class ServerCore(object): assert len(routers) != 0 assert isinstance(routers[0], Router) - def test_little_self_check(self): - started = self.server_proxy.start_test(0, "ConnectionTest") + def test_test_set(self): + started = self.server_proxy.start_test_set(0, "set_2") assert started # wait until tests are done, assumes that exactly two tests are already finished - while not self.server_proxy.get_reports(): + while not len(self.server_proxy.get_test_results()) == 2: time.sleep(2) print('.', end="", flush=True) - reports = self.server_proxy.get_reports() - assert len(reports) != 0 - assert len(reports[-1].errors) == 0 # check last report + reports = self.server_proxy.get_test_results() + assert len(reports) == 2 + assert len(reports[-1][2].errors) == 0 # check last report + + started = self.server_proxy.start_test_set(0, "set_1") - def test_long_self_check(self): - started = self.server_proxy.start_test(0, "ConnectionTest") assert started - started2 = self.server_proxy.start_test(0, "VeryLongTest") - assert not started2 - if started and not started2: - while not len(self.server_proxy.get_reports()) == 3: - time.sleep(2) - print('.', end="", flush=True) + # wait until tests are done, assumes that exactly two tests are already finished + while not len(self.server_proxy.get_test_results()) == 3: + time.sleep(2) + print('.', end="", flush=True) + + reports = self.server_proxy.get_test_results() + assert len(reports) == 3 + assert len(reports[2][-1].errors) == 0 # check last report + + def test_test_results(self): + self.server_proxy.delete_test_results() + + started = self.server_proxy.start_test_set(0, "set_2") + assert started + + while not len(self.server_proxy.get_test_results()) == 2: + time.sleep(2) + print('.', end="", flush=True) - self.server_proxy.stop_all_tasks() + reports = self.server_proxy.get_test_results() - reports = self.server_proxy.get_reports() - assert reports[-1].wasSuccessful() # check last report + for report in reports: + assert report[0] == 0 + assert report[1] != "" + assert len(report[2].errors) == 0 - # def test_jobs(self): - # raise NotImplemented + removed_results = self.server_proxy.delete_test_results() + assert len(reports) == removed_results + time.sleep(0.5) + assert not len(self.server_proxy.get_test_results()) class ServerTestCase2(ServerCore, unittest.TestCase): diff --git a/framework_unittests/test_A_cli.py b/framework_unittests/test_A_cli.py index 2184b19..f00dceb 100644 --- a/framework_unittests/test_A_cli.py +++ b/framework_unittests/test_A_cli.py @@ -124,7 +124,7 @@ class TestCLItoServerConnection(unittest.TestCase): @staticmethod def serverStartWithParams(): base_dir = os.path.dirname(os.path.dirname(__file__)) # This is your Project Root - config_path = os.path.join(base_dir, 'tests/configs/config_no_vlan') # Join Project Root with config + config_path = os.path.join(base_dir, 'framework_unittests/configs/config_no_vlan') Server.start(config_path=config_path) def setUp(self): @@ -136,8 +136,49 @@ class TestCLItoServerConnection(unittest.TestCase): response = os.system(self.path_cli) assert response == 0 + def test_cli_start_test_set(self): + response = os.system(self.path_cli + " start -s set_1 -r 0") + assert response == 0 + + # assumes that there is only one test in the set + while self.server_proxy.get_routers_task_queue_size(0): + time.sleep(2) + print('.', end="", flush=True) + assert len(self.server_proxy.get_test_results()) + + response = os.system(self.path_cli + " start -s set_1 -a") + assert response == 0 + + routers = self.server_proxy.get_routers() + for router in routers: + while self.server_proxy.get_routers_task_queue_size(router.id): + time.sleep(2) + print('.', end="", flush=True) + assert len(self.server_proxy.get_test_results()) == len(routers) + 1 + + def test_cli_test_results(self): + assert not os.system(self.path_cli + " results -rm -a") + os.system(self.path_cli + " start -s set_1 -a") + + routers = self.server_proxy.get_routers() + for router in routers: + while self.server_proxy.get_routers_task_queue_size(router.id): + time.sleep(2) + print('.', end="", flush=True) + + response = os.system(self.path_cli + " results -r 0") + assert response == 0 + response = os.system(self.path_cli + " results -a") + assert response == 0 + + response = os.system(self.path_cli + " results -rm") + assert response == 0 + response = os.system(self.path_cli + " results -rm -a") + assert response == 0 + response = os.system(self.path_cli + " results -rm -r 0") + assert response == 0 + assert not len(self.server_proxy.get_test_results()) + def test_get_version(self): version = self.server_proxy.get_server_version() assert version == Server.VERSION - - # TODO compare Version with Version from Server.VERSION and ./cli version (exists?)
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_issue_reference", "has_added_files", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 1, "test_score": 3 }, "num_modified_files": 8 }
unknown
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "pytest-cov", "pytest-cache", "pytest-xdist" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.9", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
alabaster==0.7.16 astroid==3.3.9 attrs==25.3.0 babel==2.17.0 bcrypt==4.3.0 certifi==2025.1.31 cffi==1.17.1 charset-normalizer==3.4.1 codecov==2.1.13 commonmark==0.9.1 coverage==7.8.0 cryptography==44.0.2 dill==0.3.9 docutils==0.21.2 ecdsa==0.19.1 exceptiongroup==1.2.2 execnet==2.1.1 h11==0.14.0 idna==3.10 imagesize==1.4.1 importlib_metadata==8.6.1 iniconfig==2.1.0 ipaddress==1.0.23 isort==6.0.1 Jinja2==3.1.6 MarkupSafe==3.0.2 mccabe==0.7.0 outcome==1.3.0.post0 packaging==24.2 paramiko==3.5.1 pep8==1.7.1 platformdirs==4.3.7 pluggy==1.5.0 pycparser==2.22 pycrypto==2.6.1 pyflakes==3.3.2 Pygments==2.19.1 pylint==3.3.6 PyNaCl==1.5.0 pyroute2==0.8.1 PySocks==1.7.1 pytest==8.3.5 pytest-cache==1.0 pytest-cov==6.0.0 pytest-xdist==3.6.1 PyYAML==6.0.2 recommonmark==0.7.1 requests==2.32.3 selenium==4.30.0 six==1.17.0 sniffio==1.3.1 snowballstemmer==2.2.0 sortedcontainers==2.4.0 Sphinx==7.4.7 sphinx-rtd-theme==3.0.2 sphinxcontrib-applehelp==2.0.0 sphinxcontrib-devhelp==2.0.0 sphinxcontrib-htmlhelp==2.1.0 sphinxcontrib-jquery==4.1 sphinxcontrib-jsmath==1.0.1 sphinxcontrib-qthelp==2.0.0 sphinxcontrib-serializinghtml==2.0.0 -e git+https://github.com/PumucklOnTheAir/TestFramework.git@8d1c52e7c2569834f12f7a0739748e7333ca0099#egg=TestFramework tomli==2.2.1 tomlkit==0.13.2 trio==0.29.0 trio-websocket==0.12.2 typing==3.7.4.3 typing_extensions==4.13.0 urllib3==2.3.0 websocket-client==1.8.0 wsproto==1.2.0 zipp==3.21.0
name: TestFramework channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - alabaster==0.7.16 - astroid==3.3.9 - attrs==25.3.0 - babel==2.17.0 - bcrypt==4.3.0 - certifi==2025.1.31 - cffi==1.17.1 - charset-normalizer==3.4.1 - codecov==2.1.13 - commonmark==0.9.1 - coverage==7.8.0 - cryptography==44.0.2 - dill==0.3.9 - docutils==0.21.2 - ecdsa==0.19.1 - exceptiongroup==1.2.2 - execnet==2.1.1 - h11==0.14.0 - idna==3.10 - imagesize==1.4.1 - importlib-metadata==8.6.1 - iniconfig==2.1.0 - ipaddress==1.0.23 - isort==6.0.1 - jinja2==3.1.6 - markupsafe==3.0.2 - mccabe==0.7.0 - outcome==1.3.0.post0 - packaging==24.2 - paramiko==3.5.1 - pep8==1.7.1 - platformdirs==4.3.7 - pluggy==1.5.0 - pycparser==2.22 - pycrypto==2.6.1 - pyflakes==3.3.2 - pygments==2.19.1 - pylint==3.3.6 - pynacl==1.5.0 - pyroute2==0.8.1 - pysocks==1.7.1 - pytest==8.3.5 - pytest-cache==1.0 - pytest-cov==6.0.0 - pytest-xdist==3.6.1 - pyyaml==6.0.2 - recommonmark==0.7.1 - requests==2.32.3 - selenium==4.30.0 - six==1.17.0 - sniffio==1.3.1 - snowballstemmer==2.2.0 - sortedcontainers==2.4.0 - sphinx==7.4.7 - sphinx-rtd-theme==3.0.2 - sphinxcontrib-applehelp==2.0.0 - sphinxcontrib-devhelp==2.0.0 - sphinxcontrib-htmlhelp==2.1.0 - sphinxcontrib-jquery==4.1 - sphinxcontrib-jsmath==1.0.1 - sphinxcontrib-qthelp==2.0.0 - sphinxcontrib-serializinghtml==2.0.0 - tomli==2.2.1 - tomlkit==0.13.2 - trio==0.29.0 - trio-websocket==0.12.2 - typing==3.7.4.3 - typing-extensions==4.13.0 - urllib3==2.3.0 - websocket-client==1.8.0 - wsproto==1.2.0 - zipp==3.21.0 prefix: /opt/conda/envs/TestFramework
[ "framework_unittests/test_A_IPC.py::TestIPC::test_proxy_object" ]
[ "framework_unittests/test_A_Server_2.py::ServerTestCase2::test_test_set", "framework_unittests/test_A_cli.py::TestCLItoServerConnection::test_get_version", "firmware_tests/demo_1.py::ConnectionTest::test_ping_local", "firmware_tests/demo_1.py::ConnectionTest::test_self_router", "framework_unittests/test_A_Server_2.py::ServerTestCase2::test_get_routers", "framework_unittests/test_A_Server_2.py::ServerTestCase2::test_test_results", "framework_unittests/test_A_cli.py::TestCLItoServerConnection::test_cli_connected", "framework_unittests/test_A_cli.py::TestCLItoServerConnection::test_cli_start_test_set", "framework_unittests/test_A_cli.py::TestCLItoServerConnection::test_cli_test_results" ]
[ "firmware_tests/demo_2.py::StupidTest::test_buzz1", "firmware_tests/demo_2.py::StupidTest::test_foo2", "firmware_tests/demo_2.py::StupidTest::test_not_very_long_test", "framework_unittests/test_AP_Yaml.py::MyTestCase::test_config_router_auto", "framework_unittests/test_AP_Yaml.py::MyTestCase::test_config_router_auto_with_length", "framework_unittests/test_AP_Yaml.py::MyTestCase::test_config_router_manual", "framework_unittests/test_AP_Yaml.py::MyTestCase::test_config_server", "framework_unittests/test_AP_Yaml.py::MyTestCase::test_config_server_dict", "framework_unittests/test_AP_Yaml.py::MyTestCase::test_config_server_list", "framework_unittests/test_AP_Yaml.py::MyTestCase::test_config_server_prop", "framework_unittests/test_AP_Yaml.py::MyTestCase::test_firmware_config", "framework_unittests/test_AP_Yaml.py::MyTestCase::test_firmware_dict", "framework_unittests/test_AP_Yaml.py::MyTestCase::test_firmware_list", "framework_unittests/test_AP_Yaml.py::MyTestCase::test_firmware_property", "framework_unittests/test_AP_Yaml.py::MyTestCase::test_get_config_router_auto", "framework_unittests/test_AP_Yaml.py::MyTestCase::test_get_config_router_manual", "framework_unittests/test_AP_Yaml.py::MyTestCase::test_web_interface_config", "framework_unittests/test_AP_Yaml.py::MyTestCase::test_web_interface_dict", "framework_unittests/test_AP_Yaml.py::MyTestCase::test_web_interface_list", "framework_unittests/test_AP_cli_util.py::MyTestCase::test_create_util", "framework_unittests/test_A_IPC.py::TestIPC::test_exist_stop_event", "framework_unittests/test_A_cli.py::CLITestClass::test_no_args", "framework_unittests/test_A_cli.py::CLITestClass::test_reboot", "framework_unittests/test_A_cli.py::CLITestClass::test_status", "framework_unittests/test_A_cli.py::CLITestClass::test_sysupdate", "framework_unittests/test_A_cli.py::CLITestClass::test_sysupgrade", "framework_unittests/test_A_cli.py::CLITestClass::test_update_info", "framework_unittests/test_A_cli.py::CLITestClass::test_webconfig" ]
[]
null
466
sympy__sympy-10753
b23618db3be6f3af9f288002064a14b5aa4ea932
2016-03-07 20:40:57
8bb5814067cfa0348fb8b708848f35dba2b55ff4
diff --git a/sympy/core/basic.py b/sympy/core/basic.py index d8dec8588a..bc9fef1d79 100644 --- a/sympy/core/basic.py +++ b/sympy/core/basic.py @@ -1,5 +1,6 @@ """Base class for all the objects in SymPy""" from __future__ import print_function, division +from collections import Mapping from .assumptions import BasicMeta, ManagedProperties from .cache import cacheit @@ -833,7 +834,7 @@ def subs(self, *args, **kwargs): sequence = args[0] if isinstance(sequence, set): unordered = True - elif isinstance(sequence, (Dict, dict)): + elif isinstance(sequence, (Dict, Mapping)): unordered = True sequence = sequence.items() elif not iterable(sequence): diff --git a/sympy/functions/combinatorial/numbers.py b/sympy/functions/combinatorial/numbers.py index 75a2ae6bca..f4d2bc28ce 100644 --- a/sympy/functions/combinatorial/numbers.py +++ b/sympy/functions/combinatorial/numbers.py @@ -164,7 +164,6 @@ def _eval_rewrite_as_sqrt(self, n): # # #----------------------------------------------------------------------------# - class bernoulli(Function): r""" Bernoulli numbers / Bernoulli polynomials @@ -1003,6 +1002,7 @@ def _eval_is_prime(self): return (n - 8).is_zero + ####################################################################### ### ### Functions for enumerating partitions, permutations and combinations
Any dict-like object in expr.subs https://github.com/sympy/sympy/blob/master/sympy/core/basic.py#L836 Explicitly checks for instances of `Dict` or `dict`. Realistically this should be anything `dict`-like, for example a `collections.ChainMap` object. As `Dict` is your own wrapper, it is up to you how to handle it's inheritance, however I suggest rather than, or in addition to `dict`, you have `collections.abc.Mapping`. All standard library `dict`-like objects are instances of `collections.abc.Mapping`, thus this should cover all cases within the stdlib. Currently I have to wrap my `ChainMap`s with a `dict()` call, which wastes computational time and memory; when duct-typing should be satisfied with `OrderedDict` or `ChainMap` as a drop-in replacements for `dict`.
sympy/sympy
diff --git a/sympy/core/tests/test_basic.py b/sympy/core/tests/test_basic.py index 148082694c..c2de60073f 100644 --- a/sympy/core/tests/test_basic.py +++ b/sympy/core/tests/test_basic.py @@ -1,6 +1,9 @@ """This tests sympy/core/basic.py with (ideally) no reference to subclasses of Basic or Atom.""" +import collections +import sys + from sympy.core.basic import Basic, Atom, preorder_traversal from sympy.core.singleton import S, Singleton from sympy.core.symbol import symbols @@ -64,6 +67,10 @@ def test_subs(): assert b21.subs([(b2, b1), (b1, b2)]) == Basic(b2, b2) assert b21.subs({b1: b2, b2: b1}) == Basic(b2, b2) + if sys.version_info >= (3, 3): + assert b21.subs(collections.ChainMap({b1: b2}, {b2: b1})) == Basic(b2, b2) + if sys.version_info >= (2, 7): + assert b21.subs(collections.OrderedDict([(b2, b1), (b1, b2)])) == Basic(b2, b2) raises(ValueError, lambda: b21.subs('bad arg')) raises(ValueError, lambda: b21.subs(b1, b2, b3))
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 0, "test_score": 0 }, "num_modified_files": 2 }
1.0
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "mpmath>=0.19", "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.7", "reqs_path": [ "requirements/base.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
certifi @ file:///croot/certifi_1671487769961/work/certifi exceptiongroup==1.2.2 importlib-metadata==6.7.0 iniconfig==2.0.0 mpmath==1.3.0 packaging==24.0 pluggy==1.2.0 pytest==7.4.4 -e git+https://github.com/sympy/sympy.git@b23618db3be6f3af9f288002064a14b5aa4ea932#egg=sympy tomli==2.0.1 typing_extensions==4.7.1 zipp==3.15.0
name: sympy channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2022.12.7=py37h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=22.3.1=py37h06a4308_0 - python=3.7.16=h7a1cb2a_0 - readline=8.2=h5eee18b_0 - setuptools=65.6.3=py37h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.38.4=py37h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - exceptiongroup==1.2.2 - importlib-metadata==6.7.0 - iniconfig==2.0.0 - mpmath==1.3.0 - packaging==24.0 - pluggy==1.2.0 - pytest==7.4.4 - tomli==2.0.1 - typing-extensions==4.7.1 - zipp==3.15.0 prefix: /opt/conda/envs/sympy
[ "sympy/core/tests/test_basic.py::test_subs" ]
[]
[ "sympy/core/tests/test_basic.py::test_structure", "sympy/core/tests/test_basic.py::test_equality", "sympy/core/tests/test_basic.py::test_matches_basic", "sympy/core/tests/test_basic.py::test_has", "sympy/core/tests/test_basic.py::test_atoms", "sympy/core/tests/test_basic.py::test_free_symbols_empty", "sympy/core/tests/test_basic.py::test_doit", "sympy/core/tests/test_basic.py::test_S", "sympy/core/tests/test_basic.py::test_xreplace", "sympy/core/tests/test_basic.py::test_Singleton", "sympy/core/tests/test_basic.py::test_preorder_traversal", "sympy/core/tests/test_basic.py::test_sorted_args", "sympy/core/tests/test_basic.py::test_call", "sympy/core/tests/test_basic.py::test_literal_evalf_is_number_is_zero_is_comparable" ]
[]
BSD
467
juju-solutions__charms.reactive-58
1ae5c5b86dff4cecfb261ebccbca7780e3546fa3
2016-03-07 23:46:27
59b07bd9447d8a4cb027ea2515089216b8d20549
diff --git a/charms/reactive/__init__.py b/charms/reactive/__init__.py index 1cdcca0..15209ec 100644 --- a/charms/reactive/__init__.py +++ b/charms/reactive/__init__.py @@ -36,6 +36,7 @@ from .decorators import only_once # noqa from .decorators import when_file_changed # noqa from . import bus +from . import relations from charmhelpers.core import hookenv from charmhelpers.core import unitdata @@ -59,6 +60,9 @@ def main(relation_name=None): if 'JUJU_HOOK_NAME' not in os.environ: os.environ['JUJU_HOOK_NAME'] = os.path.basename(sys.argv[0]) + # update data to be backwards compatible after fix for issue 28 + relations._migrate_conversations() + def flush_kv(): if unitdata._KV: unitdata._KV.flush() diff --git a/charms/reactive/relations.py b/charms/reactive/relations.py index fe513ac..efdfcc4 100644 --- a/charms/reactive/relations.py +++ b/charms/reactive/relations.py @@ -22,6 +22,7 @@ from six import with_metaclass from charmhelpers.core import hookenv from charmhelpers.core import unitdata from charmhelpers.cli import cmdline +from charms.reactive.bus import get_states from charms.reactive.bus import get_state from charms.reactive.bus import set_state from charms.reactive.bus import remove_state @@ -377,17 +378,25 @@ class Conversation(object): Conversations use the idea of :class:`scope` to determine how units and services are grouped together. """ - def __init__(self, relation_name=None, units=None, scope=None): - self.relation_name = relation_name or hookenv.relation_type() - self.units = set(units or [hookenv.remote_unit()]) - self.scope = scope or hookenv.remote_unit() + def __init__(self, namespace, units, scope): + self.namespace = namespace + self.units = set(units) + self.scope = scope + + @classmethod + def _key(cls, namespace, scope): + return 'reactive.conversations.%s.%s' % (namespace, scope) @property def key(self): """ The key under which this conversation will be stored. """ - return 'reactive.conversations.%s.%s' % (self.relation_name, self.scope) + return self._key(self.namespace, self.scope) + + @property + def relation_name(self): + return self.namespace.split(':')[0] @property def relation_ids(self): @@ -395,12 +404,13 @@ class Conversation(object): The set of IDs of the specific relation instances that this conversation is communicating with. """ - relation_ids = [] - services = set(unit.split('/')[0] for unit in self.units) - for relation_id in hookenv.relation_ids(self.relation_name): - if hookenv.remote_service_name(relation_id) in services: - relation_ids.append(relation_id) - return relation_ids + if self.scope == scopes.GLOBAL: + # the namespace is the relation name and this conv speaks for all + # connected instances of that relation + return hookenv.relation_ids(self.namespace) + else: + # the namespace is the relation ID + return [self.namespace] @classmethod def join(cls, scope): @@ -414,14 +424,20 @@ class Conversation(object): :meth:`~charmhelpers.core.unitdata.Storage.flush` be called. """ relation_name = hookenv.relation_type() + relation_id = hookenv.relation_id() unit = hookenv.remote_unit() service = hookenv.remote_service_name() if scope is scopes.UNIT: scope = unit + namespace = relation_id elif scope is scopes.SERVICE: scope = service - key = 'reactive.conversations.%s.%s' % (relation_name, scope) - conversation = cls.deserialize(unitdata.kv().get(key, {'scope': scope})) + namespace = relation_id + else: + namespace = relation_name + key = cls._key(namespace, scope) + data = unitdata.kv().get(key, {'namespace': namespace, 'scope': scope, 'units': []}) + conversation = cls.deserialize(data) conversation.units.add(unit) unitdata.kv().set(key, cls.serialize(conversation)) return conversation @@ -454,8 +470,8 @@ class Conversation(object): Serialize a conversation instance for storage. """ return { - 'relation_name': conversation.relation_name, - 'units': list(conversation.units), + 'namespace': conversation.namespace, + 'units': sorted(conversation.units), 'scope': conversation.scope, } @@ -643,6 +659,48 @@ class Conversation(object): return unitdata.kv().get(key, default) +def _migrate_conversations(): + """ + Due to issue #28 (https://github.com/juju-solutions/charms.reactive/issues/28), + conversations needed to be updated to be namespaced per relation ID for SERVICE + and UNIT scope. To ensure backwards compatibility, this updates all convs in + the old format to the new. + + TODO: Remove in 2.0.0 + """ + for key, data in unitdata.kv().getrange('reactive.conversations.').items(): + if 'namespace' in data: + continue + relation_name = data.pop('relation_name') + if data['scope'] == scopes.GLOBAL: + data['namespace'] = relation_name + unitdata.kv().set(key, data) + else: + # split the conv based on the relation ID + new_keys = [] + for rel_id in hookenv.relation_ids(relation_name): + new_key = Conversation._key(rel_id, data['scope']) + new_units = set(hookenv.related_units(rel_id)) & set(data['units']) + if new_units: + unitdata.kv().set(new_key, { + 'namespace': rel_id, + 'scope': data['scope'], + 'units': sorted(new_units), + }) + new_keys.append(new_key) + unitdata.kv().unset(key) + # update the states pointing to the old conv key to point to the + # (potentially multiple) new key(s) + for state, value in get_states().items(): + if not value: + continue + if key not in value['conversations']: + continue + value['conversations'].remove(key) + value['conversations'].extend(new_keys) + set_state(state, value) + + @cmdline.subcommand() def relation_call(method, relation_name=None, state=None, *args): """Invoke a method on the class implementing a relation via the CLI""" diff --git a/tox.ini b/tox.ini index cabe5d9..04bcddf 100644 --- a/tox.ini +++ b/tox.ini @@ -19,8 +19,8 @@ commands = flake8 --ignore=E501 {toxinidir}/charms [testenv:py2] basepython = python2 -commands = nosetests +commands = nosetests {posargs} [testenv:py3] basepython = python3 -commands = nosetests +commands = nosetests {posargs}
charms.reactive.relations assumes the relation name is a key The RelationBase and Conversation model assumes that a relation name name is a key, rather than using the relation id + unit name. This will cause a reactive charm to fail, depending how the related service chooses to use it. For example, here is a metadata.yaml snippet from a client that stores its data in three separate silos: ```yaml requires: session: interface: pgsql public: interface: pgsql confidential: interface: pgsql ``` On the client side, everything is fine. However, on the server side we have this: ```yaml provides: db: interface: pgsql ``` If the client is related three times to the same server, which would be common for development and staging, then the reactive model fails as the relation name can not be used as a key to identify which of the three db interfaces is needed. The relation id must be used.
juju-solutions/charms.reactive
diff --git a/tests/test_relations.py b/tests/test_relations.py index 197f210..c4977da 100644 --- a/tests/test_relations.py +++ b/tests/test_relations.py @@ -31,6 +31,7 @@ class TestAutoAccessors(unittest.TestCase): kv_p = mock.patch.object(relations.unitdata, 'kv') self.kv = kv_p.start() self.addCleanup(kv_p.stop) + self.kv.return_value.get.side_effect = lambda k, v=None: v def test_accessor_doc(self): self.assertEqual(DummyRelationSubclass.field_one.__doc__, 'Get the field-one, if available, or None.') @@ -151,6 +152,25 @@ class TestRelationBase(unittest.TestCase): rb.conversation.assert_called_once_with('scope') conv.remove_state.assert_called_once_with('state') + def test_is_state(self): + conv = mock.Mock(name='conv') + rb = relations.RelationBase('relname', 'unit') + rb.conversation = mock.Mock(return_value=conv) + rb.conversation.return_value.is_state.return_value = False + assert not rb.is_state('state', 'scope') + rb.conversation.assert_called_once_with('scope') + conv.is_state.assert_called_once_with('state') + rb.conversation.return_value.is_state.return_value = True + assert rb.is_state('state', 'scope') + + def test_toggle_state(self): + conv = mock.Mock(name='conv') + rb = relations.RelationBase('relname', 'unit') + rb.conversation = mock.Mock(return_value=conv) + rb.toggle_state('state', 'active', 'scope') + rb.conversation.assert_called_once_with('scope') + conv.toggle_state.assert_called_once_with('state', 'active') + def test_set_remote(self): conv = mock.Mock(name='conv') rb = relations.RelationBase('relname', 'unit') @@ -190,82 +210,86 @@ class TestConversation(unittest.TestCase): if not hasattr(cls, 'assertItemsEqual'): cls.assertItemsEqual = cls.assertCountEqual - @mock.patch.object(relations, 'hookenv') - def test_init(self, hookenv): - hookenv.relation_type.return_value = 'relation_type' - hookenv.remote_unit.return_value = 'remote_unit' - - c1 = relations.Conversation() - self.assertEqual(c1.relation_name, 'relation_type') - self.assertEqual(c1.units, set(['remote_unit'])) - self.assertEqual(c1.scope, 'remote_unit') - - c2 = relations.Conversation('rel', ['unit'], 'scope') - self.assertEqual(c2.relation_name, 'rel') - self.assertEqual(c2.units, set(['unit'])) - self.assertEqual(c2.scope, 'scope') - def test_key(self): c1 = relations.Conversation('rel', ['unit'], 'scope') self.assertEqual(c1.key, 'reactive.conversations.rel.scope') - @mock.patch.object(relations.hookenv, 'remote_service_name') @mock.patch.object(relations.hookenv, 'relation_ids') - def test_relation_ids(self, relation_ids, remote_service_name): - relation_ids.return_value = ['rel:1', 'rel:2', 'rel:3'] - remote_service_name.side_effect = ['foo', 'bar', 'foo'] - c1 = relations.Conversation('rel', ['foo/1', 'qux/1', 'foo/2'], 'scope') - self.assertEqual(c1.relation_ids, ['rel:1', 'rel:3']) - self.assertEqual(remote_service_name.call_args_list, [ - mock.call('rel:1'), - mock.call('rel:2'), - mock.call('rel:3'), - ]) + def test_relation_ids(self, relation_ids): + relation_ids.return_value = ['rel:1', 'rel:2'] + c1 = relations.Conversation('rel:0', [], 'scope') + self.assertEqual(c1.relation_ids, ['rel:0']) + assert not relation_ids.called + + c2 = relations.Conversation('rel', [], relations.scopes.GLOBAL) + self.assertEqual(c2.relation_ids, ['rel:1', 'rel:2']) relation_ids.assert_called_once_with('rel') @mock.patch.object(relations, 'unitdata') @mock.patch.object(relations, 'hookenv') def test_join(self, hookenv, unitdata): hookenv.relation_type.return_value = 'relation_type' + hookenv.relation_id.return_value = 'relation_type:0' hookenv.remote_unit.return_value = 'service/0' hookenv.remote_service_name.return_value = 'service' unitdata.kv().get.side_effect = [ - {'scope': 'scope'}, { - 'relation_name': 'relation_type', - 'units': {'service/1'}, + 'namespace': 'relation_type', + 'units': [], + 'scope': 'my-global', + }, + { + 'namespace': 'relation_type:0', + 'units': ['service/1'], 'scope': 'service', }, - {'scope': 'service/0'}, + { + 'namespace': 'relation_type:0', + 'units': [], + 'scope': 'service/0', + }, ] - conv = relations.Conversation.join('scope') - self.assertEqual(conv.relation_name, 'relation_type') + conv = relations.Conversation.join('my-global') + self.assertEqual(conv.namespace, 'relation_type') self.assertEqual(conv.units, {'service/0'}) - self.assertEqual(conv.scope, 'scope') - unitdata.kv().get.assert_called_with('reactive.conversations.relation_type.scope', {'scope': 'scope'}) - unitdata.kv().set.assert_called_with('reactive.conversations.relation_type.scope', { - 'relation_name': 'relation_type', + self.assertEqual(conv.scope, 'my-global') + unitdata.kv().get.assert_called_with('reactive.conversations.relation_type.my-global', { + 'namespace': 'relation_type', + 'scope': 'my-global', + 'units': [], + }) + unitdata.kv().set.assert_called_with('reactive.conversations.relation_type.my-global', { + 'namespace': 'relation_type', 'units': ['service/0'], - 'scope': 'scope', + 'scope': 'my-global', }) conv = relations.Conversation.join(relations.scopes.SERVICE) - self.assertEqual(conv.relation_name, 'relation_type') + self.assertEqual(conv.namespace, 'relation_type:0') self.assertEqual(conv.units, {'service/0', 'service/1'}) self.assertEqual(conv.scope, 'service') - unitdata.kv().get.assert_called_with('reactive.conversations.relation_type.service', {'scope': 'service'}) - self.assertEqual(unitdata.kv().set.call_args[0][0], 'reactive.conversations.relation_type.service') - self.assertEqual(unitdata.kv().set.call_args[0][1]['relation_name'], 'relation_type') - self.assertItemsEqual(unitdata.kv().set.call_args[0][1]['units'], ['service/0', 'service/1']) - self.assertEqual(unitdata.kv().set.call_args[0][1]['scope'], 'service') + unitdata.kv().get.assert_called_with('reactive.conversations.relation_type:0.service', { + 'namespace': 'relation_type:0', + 'scope': 'service', + 'units': [], + }) + unitdata.kv().set.assert_called_with('reactive.conversations.relation_type:0.service', { + 'namespace': 'relation_type:0', + 'units': ['service/0', 'service/1'], + 'scope': 'service', + }) conv = relations.Conversation.join(relations.scopes.UNIT) self.assertEqual(conv.relation_name, 'relation_type') self.assertEqual(conv.units, {'service/0'}) self.assertEqual(conv.scope, 'service/0') - unitdata.kv().get.assert_called_with('reactive.conversations.relation_type.service/0', {'scope': 'service/0'}) - unitdata.kv().set.assert_called_with('reactive.conversations.relation_type.service/0', { - 'relation_name': 'relation_type', + unitdata.kv().get.assert_called_with('reactive.conversations.relation_type:0.service/0', { + 'namespace': 'relation_type:0', + 'scope': 'service/0', + 'units': [], + }) + unitdata.kv().set.assert_called_with('reactive.conversations.relation_type:0.service/0', { + 'namespace': 'relation_type:0', 'units': ['service/0'], 'scope': 'service/0', }) @@ -278,7 +302,7 @@ class TestConversation(unittest.TestCase): conv.depart() self.assertEqual(conv.units, {'service/1'}, 'scope') unitdata.kv().set.assert_called_with(conv.key, { - 'relation_name': 'rel', + 'namespace': 'rel', 'units': ['service/1'], 'scope': 'scope', }) @@ -293,16 +317,16 @@ class TestConversation(unittest.TestCase): @mock.patch.object(relations, 'unitdata') def test_load(self, unitdata): unitdata.kv().get.side_effect = [ - {'relation_name': 'rel1', 'units': ['service/0'], 'scope': 'scope'}, + {'namespace': 'rel:1', 'units': ['service/0'], 'scope': 'scope'}, None, - {'relation_name': 'rel2', 'units': ['service/1'], 'scope': 'service'}, + {'namespace': 'rel:2', 'units': ['service/1'], 'scope': 'service'}, ] convs = relations.Conversation.load(['key1', 'key2', 'key3']) self.assertEqual(len(convs), 2) - self.assertEqual(convs[0].relation_name, 'rel1') + self.assertEqual(convs[0].relation_name, 'rel') self.assertEqual(convs[0].units, {'service/0'}) self.assertEqual(convs[0].scope, 'scope') - self.assertEqual(convs[1].relation_name, 'rel2') + self.assertEqual(convs[1].relation_name, 'rel') self.assertEqual(convs[1].units, {'service/1'}) self.assertEqual(convs[1].scope, 'service') self.assertEqual(unitdata.kv().get.call_args_list, [ @@ -460,6 +484,91 @@ class TestConversation(unittest.TestCase): kv().get.assert_called_once_with('reactive.conversations.rel.scope.local-data.foo', 'default') +class TestMigrateConvs(unittest.TestCase): + @mock.patch.object(relations, 'set_state') + @mock.patch.object(relations, 'get_states') + @mock.patch.object(relations, 'hookenv') + @mock.patch.object(relations.unitdata, 'kv') + def test_migrate(self, kv, mhookenv, get_states, set_state): + kv().getrange.side_effect = [ + {'reactive.conversations.rel:0.service': { + 'namespace': 'rel:0', + }}, + {'reactive.conversations.rel.global': { + 'relation_name': 'rel', + 'scope': 'global', + 'units': ['service/0', 'service/1', 'service/3'], + }}, + {'reactive.conversations.rel.service': { + 'relation_name': 'rel', + 'scope': 'service', + 'units': ['service/0', 'service/1', 'service/3'], + }}, + {'reactive.conversations.rel.service/3': { + 'relation_name': 'rel', + 'scope': 'service/3', + 'units': ['service/3'], + }}, + ] + mhookenv.relation_ids.return_value = ['rel:1', 'rel:2'] + mhookenv.related_units.side_effect = [ + ['service/0', 'service/2'], ['service/3'], + ['service/0', 'service/2'], ['service/3'], + ] + get_states.side_effect = [ + { + 'rel.joined': {'conversations': ['reactive.conversations.rel.service']}, + 'foo': None, + }, + { + 'rel.joined': {'conversations': ['reactive.conversations.rel.service/3']}, + 'foo': {'conversations': []}, + }, + ] + relations._migrate_conversations() + assert not kv().set.called + + kv().set.reset_mock() + relations._migrate_conversations() + kv().set.assert_called_with('reactive.conversations.rel.global', { + 'namespace': 'rel', + 'scope': 'global', + 'units': ['service/0', 'service/1', 'service/3'], + }) + assert not kv().unset.called + assert not set_state.called + + kv().set.reset_mock() + relations._migrate_conversations() + kv().set.assert_any_call('reactive.conversations.rel:1.service', { + 'namespace': 'rel:1', + 'scope': 'service', + 'units': ['service/0'], + }) + kv().set.assert_called_with('reactive.conversations.rel:2.service', { + 'namespace': 'rel:2', + 'scope': 'service', + 'units': ['service/3'], + }) + kv().unset.assert_called_with('reactive.conversations.rel.service') + set_state.assert_called_with('rel.joined', {'conversations': [ + 'reactive.conversations.rel:1.service', + 'reactive.conversations.rel:2.service', + ]}) + + kv().set.reset_mock() + relations._migrate_conversations() + kv().set.assert_called_with('reactive.conversations.rel:2.service/3', { + 'namespace': 'rel:2', + 'scope': 'service/3', + 'units': ['service/3'], + }) + kv().unset.assert_called_with('reactive.conversations.rel.service/3') + set_state.assert_called_with('rel.joined', {'conversations': [ + 'reactive.conversations.rel:2.service/3', + ]}) + + class TestRelationCall(unittest.TestCase): def setUp(self): self.r1 = mock.Mock(name='r1')
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 1, "test_score": 2 }, "num_modified_files": 3 }
0.4
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "coverage", "mock", "nose", "flake8", "ipython", "ipdb", "pytest" ], "pre_install": null, "python": "3.6", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==22.2.0 backcall==0.2.0 certifi==2021.5.30 charmhelpers==1.2.1 -e git+https://github.com/juju-solutions/charms.reactive.git@1ae5c5b86dff4cecfb261ebccbca7780e3546fa3#egg=charms.reactive coverage==6.2 decorator==5.1.1 flake8==5.0.4 importlib-metadata==4.2.0 importlib-resources==5.4.0 iniconfig==1.1.1 ipdb==0.13.13 ipython==7.16.3 ipython-genutils==0.2.0 jedi==0.17.2 Jinja2==3.0.3 MarkupSafe==2.0.1 mccabe==0.7.0 mock==5.2.0 netaddr==0.10.1 nose==1.3.7 packaging==21.3 parso==0.7.1 pbr==6.1.1 pexpect==4.9.0 pickleshare==0.7.5 pluggy==1.0.0 prompt-toolkit==3.0.36 ptyprocess==0.7.0 py==1.11.0 pyaml==23.5.8 pycodestyle==2.9.1 pyflakes==2.5.0 Pygments==2.14.0 pyparsing==3.1.4 pytest==7.0.1 PyYAML==6.0.1 six==1.17.0 tomli==1.2.3 traitlets==4.3.3 typing_extensions==4.1.1 wcwidth==0.2.13 zipp==3.6.0
name: charms.reactive channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==22.2.0 - backcall==0.2.0 - charmhelpers==1.2.1 - coverage==6.2 - decorator==5.1.1 - flake8==5.0.4 - importlib-metadata==4.2.0 - importlib-resources==5.4.0 - iniconfig==1.1.1 - ipdb==0.13.13 - ipython==7.16.3 - ipython-genutils==0.2.0 - jedi==0.17.2 - jinja2==3.0.3 - markupsafe==2.0.1 - mccabe==0.7.0 - mock==5.2.0 - netaddr==0.10.1 - nose==1.3.7 - packaging==21.3 - parso==0.7.1 - pbr==6.1.1 - pexpect==4.9.0 - pickleshare==0.7.5 - pluggy==1.0.0 - prompt-toolkit==3.0.36 - ptyprocess==0.7.0 - py==1.11.0 - pyaml==23.5.8 - pycodestyle==2.9.1 - pyflakes==2.5.0 - pygments==2.14.0 - pyparsing==3.1.4 - pytest==7.0.1 - pyyaml==6.0.1 - six==1.17.0 - tomli==1.2.3 - traitlets==4.3.3 - typing-extensions==4.1.1 - wcwidth==0.2.13 - zipp==3.6.0 prefix: /opt/conda/envs/charms.reactive
[ "tests/test_relations.py::TestConversation::test_depart", "tests/test_relations.py::TestConversation::test_join", "tests/test_relations.py::TestConversation::test_load", "tests/test_relations.py::TestConversation::test_relation_ids", "tests/test_relations.py::TestMigrateConvs::test_migrate" ]
[]
[ "tests/test_relations.py::TestAutoAccessors::test_accessor", "tests/test_relations.py::TestAutoAccessors::test_accessor_doc", "tests/test_relations.py::TestRelationBase::test_conversation", "tests/test_relations.py::TestRelationBase::test_find_impl", "tests/test_relations.py::TestRelationBase::test_find_subclass", "tests/test_relations.py::TestRelationBase::test_from_name", "tests/test_relations.py::TestRelationBase::test_from_state", "tests/test_relations.py::TestRelationBase::test_get_local", "tests/test_relations.py::TestRelationBase::test_get_remote", "tests/test_relations.py::TestRelationBase::test_is_state", "tests/test_relations.py::TestRelationBase::test_remove_state", "tests/test_relations.py::TestRelationBase::test_set_local", "tests/test_relations.py::TestRelationBase::test_set_remote", "tests/test_relations.py::TestRelationBase::test_set_state", "tests/test_relations.py::TestRelationBase::test_toggle_state", "tests/test_relations.py::TestConversation::test_get_local", "tests/test_relations.py::TestConversation::test_get_remote", "tests/test_relations.py::TestConversation::test_is_state", "tests/test_relations.py::TestConversation::test_key", "tests/test_relations.py::TestConversation::test_remove_state", "tests/test_relations.py::TestConversation::test_set_local", "tests/test_relations.py::TestConversation::test_set_remote", "tests/test_relations.py::TestConversation::test_set_state", "tests/test_relations.py::TestConversation::test_toggle_state", "tests/test_relations.py::TestRelationCall::test_call_conversations", "tests/test_relations.py::TestRelationCall::test_call_name", "tests/test_relations.py::TestRelationCall::test_call_state", "tests/test_relations.py::TestRelationCall::test_no_impl" ]
[]
Apache License 2.0
468
sympy__sympy-10762
325341a268a9e7aa07511245b964a3d727167150
2016-03-08 12:12:40
8bb5814067cfa0348fb8b708848f35dba2b55ff4
diff --git a/sympy/core/power.py b/sympy/core/power.py index 3eaa577d84..d329846b7b 100644 --- a/sympy/core/power.py +++ b/sympy/core/power.py @@ -1196,7 +1196,7 @@ def _eval_nseries(self, x, n, logx): dn = 0 terms = [1/prefactor] - for m in range(1, ceiling((n - dn)/l*cf)): + for m in range(1, ceiling((n - dn + 1)/l*cf)): new_term = terms[-1]*(-rest) if new_term.is_Pow: new_term = new_term._eval_expand_multinomial(
(1/(x**-2 + x**-3)).series(x, 0) gives wrong result `(1/(x**-2 + x**-3)).series(x, 0)` returns `x**3 - x**4 + O(x**6)` which is incorrect. Wolfram Alpha gives `x**3 - x**4 + x**5 - x**6 + x**7 + O(x**8)`.
sympy/sympy
diff --git a/sympy/series/tests/test_series.py b/sympy/series/tests/test_series.py index 5547f436e0..def2ac9019 100644 --- a/sympy/series/tests/test_series.py +++ b/sympy/series/tests/test_series.py @@ -153,3 +153,6 @@ def test_exp_product_positive_factors(): def test_issue_8805(): assert series(1, n=8) == 1 + +def test_issue_10761(): + assert series(1/(x**-2 + x**-3), x, 0) == x**3 - x**4 + x**5 + O(x**6)
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 0, "test_score": 0 }, "num_modified_files": 1 }
1.0
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "mpmath>=0.19", "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.7", "reqs_path": [ "requirements/base.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
certifi @ file:///croot/certifi_1671487769961/work/certifi exceptiongroup==1.2.2 importlib-metadata==6.7.0 iniconfig==2.0.0 mpmath==1.3.0 packaging==24.0 pluggy==1.2.0 pytest==7.4.4 -e git+https://github.com/sympy/sympy.git@325341a268a9e7aa07511245b964a3d727167150#egg=sympy tomli==2.0.1 typing_extensions==4.7.1 zipp==3.15.0
name: sympy channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2022.12.7=py37h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=22.3.1=py37h06a4308_0 - python=3.7.16=h7a1cb2a_0 - readline=8.2=h5eee18b_0 - setuptools=65.6.3=py37h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.38.4=py37h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - exceptiongroup==1.2.2 - importlib-metadata==6.7.0 - iniconfig==2.0.0 - mpmath==1.3.0 - packaging==24.0 - pluggy==1.2.0 - pytest==7.4.4 - tomli==2.0.1 - typing-extensions==4.7.1 - zipp==3.15.0 prefix: /opt/conda/envs/sympy
[ "sympy/series/tests/test_series.py::test_issue_10761" ]
[]
[ "sympy/series/tests/test_series.py::test_sin", "sympy/series/tests/test_series.py::test_cos", "sympy/series/tests/test_series.py::test_exp", "sympy/series/tests/test_series.py::test_exp2", "sympy/series/tests/test_series.py::test_issue_5223", "sympy/series/tests/test_series.py::test_issue_3978", "sympy/series/tests/test_series.py::test_acceleration", "sympy/series/tests/test_series.py::test_issue_5852", "sympy/series/tests/test_series.py::test_issue_4583", "sympy/series/tests/test_series.py::test_issue_6318", "sympy/series/tests/test_series.py::test_x_is_base_detection", "sympy/series/tests/test_series.py::test_sin_power", "sympy/series/tests/test_series.py::test_issue_7203", "sympy/series/tests/test_series.py::test_exp_product_positive_factors", "sympy/series/tests/test_series.py::test_issue_8805" ]
[]
BSD
469
sympy__sympy-10783
5ed90ba9232c8e29591990ee8deba16d55749ef4
2016-03-10 21:10:30
8bb5814067cfa0348fb8b708848f35dba2b55ff4
diff --git a/sympy/matrices/matrices.py b/sympy/matrices/matrices.py index 00ab86f2e3..95e44ee1fc 100644 --- a/sympy/matrices/matrices.py +++ b/sympy/matrices/matrices.py @@ -3220,7 +3220,8 @@ def condition_number(self): singular_values """ - + if not self: + return S.Zero singularvalues = self.singular_values() return Max(*singularvalues) / Min(*singularvalues)
condition_number() for empty matrices giving ValueError
sympy/sympy
diff --git a/sympy/matrices/tests/test_matrices.py b/sympy/matrices/tests/test_matrices.py index 60dae527a7..3f741dd050 100644 --- a/sympy/matrices/tests/test_matrices.py +++ b/sympy/matrices/tests/test_matrices.py @@ -2047,6 +2047,9 @@ def test_condition_number(): assert all(Float(1.).epsilon_eq(Mc.subs(x, val).evalf()) for val in [Rational(1, 5), Rational(1, 2), Rational(1, 10), pi/2, pi, 7*pi/4 ]) + #issue 10782 + assert Matrix([]).condition_number() == 0 + def test_equality(): A = Matrix(((1, 2, 3), (4, 5, 6), (7, 8, 9)))
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 3, "test_score": 0 }, "num_modified_files": 1 }
1.0
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "mpmath>=0.19", "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.7", "reqs_path": [ "requirements/base.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
certifi @ file:///croot/certifi_1671487769961/work/certifi exceptiongroup==1.2.2 importlib-metadata==6.7.0 iniconfig==2.0.0 mpmath==1.3.0 packaging==24.0 pluggy==1.2.0 pytest==7.4.4 -e git+https://github.com/sympy/sympy.git@5ed90ba9232c8e29591990ee8deba16d55749ef4#egg=sympy tomli==2.0.1 typing_extensions==4.7.1 zipp==3.15.0
name: sympy channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2022.12.7=py37h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=22.3.1=py37h06a4308_0 - python=3.7.16=h7a1cb2a_0 - readline=8.2=h5eee18b_0 - setuptools=65.6.3=py37h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.38.4=py37h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - exceptiongroup==1.2.2 - importlib-metadata==6.7.0 - iniconfig==2.0.0 - mpmath==1.3.0 - packaging==24.0 - pluggy==1.2.0 - pytest==7.4.4 - tomli==2.0.1 - typing-extensions==4.7.1 - zipp==3.15.0 prefix: /opt/conda/envs/sympy
[ "sympy/matrices/tests/test_matrices.py::test_condition_number" ]
[ "sympy/matrices/tests/test_matrices.py::test_eigen_vects", "sympy/matrices/tests/test_matrices.py::test_issue_3959", "sympy/matrices/tests/test_matrices.py::test_issue_3979", "sympy/matrices/tests/test_matrices.py::test_pinv_rank_deficient", "sympy/matrices/tests/test_matrices.py::test_from_ndarray" ]
[ "sympy/matrices/tests/test_matrices.py::test_args", "sympy/matrices/tests/test_matrices.py::test_division", "sympy/matrices/tests/test_matrices.py::test_sum", "sympy/matrices/tests/test_matrices.py::test_addition", "sympy/matrices/tests/test_matrices.py::test_fancy_index_matrix", "sympy/matrices/tests/test_matrices.py::test_multiplication", "sympy/matrices/tests/test_matrices.py::test_power", "sympy/matrices/tests/test_matrices.py::test_creation", "sympy/matrices/tests/test_matrices.py::test_tolist", "sympy/matrices/tests/test_matrices.py::test_as_mutable", "sympy/matrices/tests/test_matrices.py::test_determinant", "sympy/matrices/tests/test_matrices.py::test_det_LU_decomposition", "sympy/matrices/tests/test_matrices.py::test_berkowitz_minors", "sympy/matrices/tests/test_matrices.py::test_slicing", "sympy/matrices/tests/test_matrices.py::test_submatrix_assignment", "sympy/matrices/tests/test_matrices.py::test_extract", "sympy/matrices/tests/test_matrices.py::test_reshape", "sympy/matrices/tests/test_matrices.py::test_applyfunc", "sympy/matrices/tests/test_matrices.py::test_expand", "sympy/matrices/tests/test_matrices.py::test_random", "sympy/matrices/tests/test_matrices.py::test_LUdecomp", "sympy/matrices/tests/test_matrices.py::test_LUsolve", "sympy/matrices/tests/test_matrices.py::test_QRsolve", "sympy/matrices/tests/test_matrices.py::test_inverse", "sympy/matrices/tests/test_matrices.py::test_matrix_inverse_mod", "sympy/matrices/tests/test_matrices.py::test_util", "sympy/matrices/tests/test_matrices.py::test_jacobian_hessian", "sympy/matrices/tests/test_matrices.py::test_QR", "sympy/matrices/tests/test_matrices.py::test_QR_non_square", "sympy/matrices/tests/test_matrices.py::test_nullspace", "sympy/matrices/tests/test_matrices.py::test_columnspace", "sympy/matrices/tests/test_matrices.py::test_wronskian", "sympy/matrices/tests/test_matrices.py::test_eigen", "sympy/matrices/tests/test_matrices.py::test_subs", "sympy/matrices/tests/test_matrices.py::test_xreplace", "sympy/matrices/tests/test_matrices.py::test_simplify", "sympy/matrices/tests/test_matrices.py::test_transpose", "sympy/matrices/tests/test_matrices.py::test_conjugate", "sympy/matrices/tests/test_matrices.py::test_conj_dirac", "sympy/matrices/tests/test_matrices.py::test_trace", "sympy/matrices/tests/test_matrices.py::test_shape", "sympy/matrices/tests/test_matrices.py::test_col_row_op", "sympy/matrices/tests/test_matrices.py::test_zip_row_op", "sympy/matrices/tests/test_matrices.py::test_issue_3950", "sympy/matrices/tests/test_matrices.py::test_issue_3981", "sympy/matrices/tests/test_matrices.py::test_evalf", "sympy/matrices/tests/test_matrices.py::test_is_symbolic", "sympy/matrices/tests/test_matrices.py::test_is_upper", "sympy/matrices/tests/test_matrices.py::test_is_lower", "sympy/matrices/tests/test_matrices.py::test_is_nilpotent", "sympy/matrices/tests/test_matrices.py::test_zeros_ones_fill", "sympy/matrices/tests/test_matrices.py::test_empty_zeros", "sympy/matrices/tests/test_matrices.py::test_issue_3749", "sympy/matrices/tests/test_matrices.py::test_inv_iszerofunc", "sympy/matrices/tests/test_matrices.py::test_jacobian_metrics", "sympy/matrices/tests/test_matrices.py::test_jacobian2", "sympy/matrices/tests/test_matrices.py::test_issue_4564", "sympy/matrices/tests/test_matrices.py::test_nonvectorJacobian", "sympy/matrices/tests/test_matrices.py::test_vec", "sympy/matrices/tests/test_matrices.py::test_vech", "sympy/matrices/tests/test_matrices.py::test_vech_errors", "sympy/matrices/tests/test_matrices.py::test_diag", "sympy/matrices/tests/test_matrices.py::test_get_diag_blocks1", "sympy/matrices/tests/test_matrices.py::test_get_diag_blocks2", "sympy/matrices/tests/test_matrices.py::test_inv_block", "sympy/matrices/tests/test_matrices.py::test_creation_args", "sympy/matrices/tests/test_matrices.py::test_diagonal_symmetrical", "sympy/matrices/tests/test_matrices.py::test_diagonalization", "sympy/matrices/tests/test_matrices.py::test_jordan_form", "sympy/matrices/tests/test_matrices.py::test_jordan_form_complex_issue_9274", "sympy/matrices/tests/test_matrices.py::test_issue_10220", "sympy/matrices/tests/test_matrices.py::test_Matrix_berkowitz_charpoly", "sympy/matrices/tests/test_matrices.py::test_exp", "sympy/matrices/tests/test_matrices.py::test_has", "sympy/matrices/tests/test_matrices.py::test_errors", "sympy/matrices/tests/test_matrices.py::test_len", "sympy/matrices/tests/test_matrices.py::test_integrate", "sympy/matrices/tests/test_matrices.py::test_limit", "sympy/matrices/tests/test_matrices.py::test_diff", "sympy/matrices/tests/test_matrices.py::test_getattr", "sympy/matrices/tests/test_matrices.py::test_hessenberg", "sympy/matrices/tests/test_matrices.py::test_cholesky", "sympy/matrices/tests/test_matrices.py::test_LDLdecomposition", "sympy/matrices/tests/test_matrices.py::test_cholesky_solve", "sympy/matrices/tests/test_matrices.py::test_LDLsolve", "sympy/matrices/tests/test_matrices.py::test_lower_triangular_solve", "sympy/matrices/tests/test_matrices.py::test_upper_triangular_solve", "sympy/matrices/tests/test_matrices.py::test_diagonal_solve", "sympy/matrices/tests/test_matrices.py::test_matrix_norm", "sympy/matrices/tests/test_matrices.py::test_singular_values", "sympy/matrices/tests/test_matrices.py::test_equality", "sympy/matrices/tests/test_matrices.py::test_col_join", "sympy/matrices/tests/test_matrices.py::test_row_insert", "sympy/matrices/tests/test_matrices.py::test_col_insert", "sympy/matrices/tests/test_matrices.py::test_normalized", "sympy/matrices/tests/test_matrices.py::test_print_nonzero", "sympy/matrices/tests/test_matrices.py::test_zeros_eye", "sympy/matrices/tests/test_matrices.py::test_is_zero", "sympy/matrices/tests/test_matrices.py::test_rotation_matrices", "sympy/matrices/tests/test_matrices.py::test_DeferredVector", "sympy/matrices/tests/test_matrices.py::test_DeferredVector_not_iterable", "sympy/matrices/tests/test_matrices.py::test_DeferredVector_Matrix", "sympy/matrices/tests/test_matrices.py::test_GramSchmidt", "sympy/matrices/tests/test_matrices.py::test_casoratian", "sympy/matrices/tests/test_matrices.py::test_zero_dimension_multiply", "sympy/matrices/tests/test_matrices.py::test_slice_issue_2884", "sympy/matrices/tests/test_matrices.py::test_slice_issue_3401", "sympy/matrices/tests/test_matrices.py::test_copyin", "sympy/matrices/tests/test_matrices.py::test_invertible_check", "sympy/matrices/tests/test_matrices.py::test_issue_5964", "sympy/matrices/tests/test_matrices.py::test_issue_7604", "sympy/matrices/tests/test_matrices.py::test_is_Identity", "sympy/matrices/tests/test_matrices.py::test_dot", "sympy/matrices/tests/test_matrices.py::test_dual", "sympy/matrices/tests/test_matrices.py::test_anti_symmetric", "sympy/matrices/tests/test_matrices.py::test_normalize_sort_diogonalization", "sympy/matrices/tests/test_matrices.py::test_issue_5321", "sympy/matrices/tests/test_matrices.py::test_issue_5320", "sympy/matrices/tests/test_matrices.py::test_cross", "sympy/matrices/tests/test_matrices.py::test_hash", "sympy/matrices/tests/test_matrices.py::test_adjoint", "sympy/matrices/tests/test_matrices.py::test_simplify_immutable", "sympy/matrices/tests/test_matrices.py::test_rank", "sympy/matrices/tests/test_matrices.py::test_replace", "sympy/matrices/tests/test_matrices.py::test_replace_map", "sympy/matrices/tests/test_matrices.py::test_atoms", "sympy/matrices/tests/test_matrices.py::test_pinv", "sympy/matrices/tests/test_matrices.py::test_pinv_solve", "sympy/matrices/tests/test_matrices.py::test_gauss_jordan_solve", "sympy/matrices/tests/test_matrices.py::test_issue_7201", "sympy/matrices/tests/test_matrices.py::test_free_symbols", "sympy/matrices/tests/test_matrices.py::test_hermitian", "sympy/matrices/tests/test_matrices.py::test_doit", "sympy/matrices/tests/test_matrices.py::test_issue_9457_9467_9876", "sympy/matrices/tests/test_matrices.py::test_issue_9422", "sympy/matrices/tests/test_matrices.py::test_issue_10658" ]
[]
BSD
470
BrandonLMorris__auacm-cli-11
5c13a4843e281aa1470d2bd28fe39c07f4e39e92
2016-03-11 15:15:59
5c13a4843e281aa1470d2bd28fe39c07f4e39e92
diff --git a/src/auacm/competition.py b/src/auacm/competition.py index f2d9561..794f8a4 100644 --- a/src/auacm/competition.py +++ b/src/auacm/competition.py @@ -1,6 +1,6 @@ """Subcommands related to competitions""" -import auacm, requests, textwrap +import auacm, requests, textwrap, argparse from datetime import datetime from auacm.utils import subcommand from auacm.exceptions import CompetitionNotFoundError @@ -10,7 +10,7 @@ from auacm.exceptions import CompetitionNotFoundError def get_comps(args=None): """Retrieve one or more competitions from the server""" if args: - return _get_one_comp(args) + return get_one_comp(args) response = requests.get(auacm.BASE_URL + 'competitions') @@ -35,13 +35,30 @@ def get_comps(args=None): {} ''').format(current, upcoming, past).strip() -def _get_one_comp(args): +def get_one_comp(args): """Retrieve info on one specific competition""" - response = requests.get(auacm.BASE_URL + 'competitions/' + str(args[0])) + parser = argparse.ArgumentParser( + add_help=False, + usage='competition [-i/--id] <competition>' + ) + parser.add_argument('-i', '--id', action='store_true') + parser.add_argument('competition') + args = parser.parse_args(args) + + if not args.id: + cid = _cid_from_name(args.competition) + if cid == -1: + raise CompetitionNotFoundError( + 'Could not find a competition with the name ' + + args.competition) + else: + cid = args.competition + + response = requests.get(auacm.BASE_URL + 'competitions/' + str(cid)) if not response.ok or response.status_code == 404: raise CompetitionNotFoundError( - 'Could not find competition with id: ' + str(args[0])) + 'Could not find competition with id: ' + str(args.competition)) comp = response.json()['data'] @@ -62,6 +79,21 @@ def _get_one_comp(args): {} ''').format(comp_str, teams, problems) +def _cid_from_name(comp_name): + """Return the competition of an id based on it's name""" + comps = requests.get(auacm.BASE_URL + 'competitions').json()['data'] + for comp in comps['upcoming']: + if comp_name.lower() in comp['name'].lower(): + return int(comp['cid']) + for comp in comps['ongoing']: + if comp_name.lower() in comp['name'].lower(): + return int(comp['cid']) + for comp in comps['past']: + if comp_name.lower() in comp['name'].lower(): + return int(comp['cid']) + + return -1 + def _format_comps(comps): """Return a formatted string for a list of competitions""" result = list() @@ -85,7 +117,7 @@ def _format_teams(teams): def _format_problems(probs): """Return a formatted string of the problems passed in""" result = '' - for label, prob in probs.items(): + for label, prob in sorted(probs.items()): result += '{}\t{} ({})\n'.format(label, prob['name'], prob['pid']) return result.strip() diff --git a/src/auacm/main.py b/src/auacm/main.py index 32d55e0..35e463b 100644 --- a/src/auacm/main.py +++ b/src/auacm/main.py @@ -7,7 +7,7 @@ The central entry point of the auacm app. import requests, sys, textwrap import auacm import auacm.utils as utils -from auacm.exceptions import ConnectionError, ProblemNotFoundError, UnauthorizedException, InvalidSubmission +from auacm.exceptions import ConnectionError, ProblemNotFoundError, UnauthorizedException, InvalidSubmission, CompetitionNotFoundError def main(args): """ @@ -44,7 +44,8 @@ def main(args): print(utils.callbacks[args[0]](args[1:]) or '') except (ProblemNotFoundError, UnauthorizedException, - InvalidSubmission) as exp: + InvalidSubmission, + CompetitionNotFoundError) as exp: print(exp.message) exit(1) except (requests.exceptions.ConnectionError, ConnectionError):
List recent and ongoing competitions A `comp[etition]` command that will simply list (chronologically) the recent and ongoing competitions.
BrandonLMorris/auacm-cli
diff --git a/tests/competition_tests.py b/tests/competition_tests.py index 7f7c381..b89e62e 100644 --- a/tests/competition_tests.py +++ b/tests/competition_tests.py @@ -21,16 +21,38 @@ class CompetitionTests(unittest.TestCase): self.assertTrue('past fake mock' in result.lower()) @patch('requests.get') - def testGetOneCompetition(self, mock_get): - """Successfully get one competition by it's id""" + def testGetOneCompetitionById(self, mock_get): + """Successfully get one competition by its id""" mock_get.return_value = MockResponse(json=COMPETITION_DETAIL) - result = auacm.competition.get_comps(['2']) + result = auacm.competition.get_comps(['-i', '2']) self.assertTrue('ongoing fake mock' in result.lower()) self.assertTrue('fake problem a' in result.lower()) self.assertTrue('brando the mando' in result.lower()) + @patch('requests.get') + def testGetOneCompetitionByName(self, mock_get): + """Successfully get one competition by its name""" + mock_get.side_effect = [ + MockResponse(json=COMPETITIONS_RESPONSE), + MockResponse(json=COMPETITION_DETAIL)] + + result = auacm.competition.get_comps(['ongoing']) + + self.assertTrue('ongoing fake mock' in result.lower()) + self.assertTrue('fake problem a' in result.lower()) + self.assertTrue('brando the mando' in result.lower()) + + @patch('requests.get') + def testGetOneCompetitionBadName(self, mock_get): + """Attempt to get a competition that doesn't exist by name""" + mock_get.side_effect = [ + MockResponse(json=COMPETITIONS_RESPONSE)] + + self.assertRaises( + auacm.exceptions.CompetitionNotFoundError, + auacm.competition.get_comps, ['not real']) @patch('requests.get') def testGetOneCompetitionBad(self, mock_get): @@ -39,7 +61,7 @@ class CompetitionTests(unittest.TestCase): self.assertRaises( auacm.exceptions.CompetitionNotFoundError, - auacm.competition.get_comps, ['99999999']) + auacm.competition.get_comps, ['-i', '99999999']) if __name__ == '__main__':
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_many_modified_files", "has_many_hunks", "has_pytest_match_arg" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 2, "test_score": 3 }, "num_modified_files": 2 }
0.2
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requests", "pip_packages": [ "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.9", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
-e git+https://github.com/BrandonLMorris/auacm-cli.git@5c13a4843e281aa1470d2bd28fe39c07f4e39e92#egg=auacm Brotli @ file:///croot/brotli-split_1736182456865/work certifi @ file:///croot/certifi_1738623731865/work/certifi charset-normalizer @ file:///croot/charset-normalizer_1721748349566/work exceptiongroup==1.2.2 idna @ file:///croot/idna_1714398848350/work iniconfig==2.1.0 packaging==24.2 pluggy==1.5.0 PySocks @ file:///tmp/build/80754af9/pysocks_1605305812635/work pytest==8.3.5 requests @ file:///croot/requests_1730999120400/work tomli==2.2.1 urllib3 @ file:///croot/urllib3_1737133630106/work
name: auacm-cli channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - brotli-python=1.0.9=py39h6a678d5_9 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2025.1.31=py39h06a4308_0 - charset-normalizer=3.3.2=pyhd3eb1b0_0 - idna=3.7=py39h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - pysocks=1.7.1=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - requests=2.32.3=py39h06a4308_1 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - urllib3=2.3.0=py39h06a4308_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - exceptiongroup==1.2.2 - iniconfig==2.1.0 - packaging==24.2 - pluggy==1.5.0 - pytest==8.3.5 - tomli==2.2.1 prefix: /opt/conda/envs/auacm-cli
[ "tests/competition_tests.py::CompetitionTests::testGetOneCompetitionBadName", "tests/competition_tests.py::CompetitionTests::testGetOneCompetitionByName" ]
[]
[ "tests/competition_tests.py::CompetitionTests::testGetAllCompetitons", "tests/competition_tests.py::CompetitionTests::testGetOneCompetitionBad", "tests/competition_tests.py::CompetitionTests::testGetOneCompetitionById" ]
[]
MIT License
471
docker__docker-py-988
fa7068cb7cf2ae1efcc2b3b99f24f4c7aa29e989
2016-03-11 20:04:00
4c34be5d4ab8a5a017950712e9c96b56d78d1c58
diff --git a/docker/utils/utils.py b/docker/utils/utils.py index bc26ce82..d4393d58 100644 --- a/docker/utils/utils.py +++ b/docker/utils/utils.py @@ -460,16 +460,16 @@ def kwargs_from_env(ssl_version=None, assert_hostname=None): tls_verify = os.environ.get('DOCKER_TLS_VERIFY') if tls_verify == '': tls_verify = False - enable_tls = True else: tls_verify = tls_verify is not None - enable_tls = cert_path or tls_verify + enable_tls = cert_path or tls_verify params = {} if host: - params['base_url'] = (host.replace('tcp://', 'https://') - if enable_tls else host) + params['base_url'] = ( + host.replace('tcp://', 'https://') if enable_tls else host + ) if not enable_tls: return params
Certificate error in docker ci for test-docker-py https://jenkins.dockerproject.org/job/Docker-PRs/24848/console for detail. in docker-py, when checkout to the commit of 387db11009f4b4f64a4f2c6fd64d3eeb01828585,the error appears,if I remove the commit ,we will not have the error. ``` ==================================== ERRORS ==================================== _________________ ERROR at setup of InformationTest.test_info __________________ /docker-py/tests/integration/conftest.py:17: in setup_test_session c = docker_client() /docker-py/tests/helpers.py:61: in docker_client return docker.Client(**docker_client_kwargs(**kwargs)) /docker-py/tests/helpers.py:65: in docker_client_kwargs client_kwargs = docker.utils.kwargs_from_env(assert_hostname=False) /docker-py/docker/utils/utils.py:486: in kwargs_from_env assert_fingerprint=tls_verify) /docker-py/docker/tls.py:47: in __init__ 'Path to a certificate and key files must be provided' E TLSParameterError: Path to a certificate and key files must be provided through the client_config param. TLS configurations should map the Docker CLI client configurations. See https://docs.docker.com/engine/articles/https/ for API details. ________________ ERROR at setup of InformationTest.test_search _________________ ... ```
docker/docker-py
diff --git a/tests/unit/utils_test.py b/tests/unit/utils_test.py index 87796d11..65b7cf8a 100644 --- a/tests/unit/utils_test.py +++ b/tests/unit/utils_test.py @@ -228,19 +228,7 @@ class KwargsFromEnvTest(base.BaseTestCase): DOCKER_TLS_VERIFY='') os.environ.pop('DOCKER_CERT_PATH', None) kwargs = kwargs_from_env(assert_hostname=True) - self.assertEqual('https://192.168.59.103:2376', kwargs['base_url']) - self.assertTrue('ca.pem' in kwargs['tls'].ca_cert) - self.assertTrue('cert.pem' in kwargs['tls'].cert[0]) - self.assertTrue('key.pem' in kwargs['tls'].cert[1]) - self.assertEqual(True, kwargs['tls'].assert_hostname) - self.assertEqual(False, kwargs['tls'].verify) - try: - client = Client(**kwargs) - self.assertEqual(kwargs['base_url'], client.base_url) - self.assertEqual(kwargs['tls'].cert, client.cert) - self.assertFalse(kwargs['tls'].verify) - except TypeError as e: - self.fail(e) + self.assertEqual('tcp://192.168.59.103:2376', kwargs['base_url']) def test_kwargs_from_env_no_cert_path(self): try:
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_hyperlinks", "has_git_commit_hash" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 2, "test_score": 0 }, "num_modified_files": 1 }
1.7
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.7", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
certifi @ file:///croot/certifi_1671487769961/work/certifi -e git+https://github.com/docker/docker-py.git@fa7068cb7cf2ae1efcc2b3b99f24f4c7aa29e989#egg=docker_py exceptiongroup==1.2.2 importlib-metadata==6.7.0 iniconfig==2.0.0 packaging==24.0 pluggy==1.2.0 pytest==7.4.4 requests==2.5.3 six==1.17.0 tomli==2.0.1 typing_extensions==4.7.1 websocket-client==0.32.0 zipp==3.15.0
name: docker-py channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2022.12.7=py37h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=22.3.1=py37h06a4308_0 - python=3.7.16=h7a1cb2a_0 - readline=8.2=h5eee18b_0 - setuptools=65.6.3=py37h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.38.4=py37h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - exceptiongroup==1.2.2 - importlib-metadata==6.7.0 - iniconfig==2.0.0 - packaging==24.0 - pluggy==1.2.0 - pytest==7.4.4 - requests==2.5.3 - six==1.17.0 - tomli==2.0.1 - typing-extensions==4.7.1 - websocket-client==0.32.0 - zipp==3.15.0 prefix: /opt/conda/envs/docker-py
[ "tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_tls_verify_false_no_cert" ]
[ "tests/unit/utils_test.py::SSLAdapterTest::test_only_uses_tls" ]
[ "tests/unit/utils_test.py::HostConfigTest::test_create_endpoint_config_with_aliases", "tests/unit/utils_test.py::HostConfigTest::test_create_host_config_invalid_cpu_cfs_types", "tests/unit/utils_test.py::HostConfigTest::test_create_host_config_no_options", "tests/unit/utils_test.py::HostConfigTest::test_create_host_config_no_options_newer_api_version", "tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_cpu_period", "tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_cpu_quota", "tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_oom_kill_disable", "tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_shm_size", "tests/unit/utils_test.py::HostConfigTest::test_create_host_config_with_shm_size_in_mb", "tests/unit/utils_test.py::UlimitTest::test_create_host_config_dict_ulimit", "tests/unit/utils_test.py::UlimitTest::test_create_host_config_dict_ulimit_capitals", "tests/unit/utils_test.py::UlimitTest::test_create_host_config_obj_ulimit", "tests/unit/utils_test.py::UlimitTest::test_ulimit_invalid_type", "tests/unit/utils_test.py::LogConfigTest::test_create_host_config_dict_logconfig", "tests/unit/utils_test.py::LogConfigTest::test_create_host_config_obj_logconfig", "tests/unit/utils_test.py::LogConfigTest::test_logconfig_invalid_config_type", "tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_empty", "tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_no_cert_path", "tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_tls", "tests/unit/utils_test.py::KwargsFromEnvTest::test_kwargs_from_env_tls_verify_false", "tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_compact", "tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_complete", "tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_empty", "tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_list", "tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_no_mode", "tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_unicode_bytes_input", "tests/unit/utils_test.py::ConverVolumeBindsTest::test_convert_volume_binds_unicode_unicode_input", "tests/unit/utils_test.py::ParseEnvFileTest::test_parse_env_file_commented_line", "tests/unit/utils_test.py::ParseEnvFileTest::test_parse_env_file_invalid_line", "tests/unit/utils_test.py::ParseEnvFileTest::test_parse_env_file_proper", "tests/unit/utils_test.py::ParseHostTest::test_parse_host", "tests/unit/utils_test.py::ParseHostTest::test_parse_host_empty_value", "tests/unit/utils_test.py::ParseHostTest::test_parse_host_tls", "tests/unit/utils_test.py::ParseRepositoryTagTest::test_index_image_no_tag", "tests/unit/utils_test.py::ParseRepositoryTagTest::test_index_image_sha", "tests/unit/utils_test.py::ParseRepositoryTagTest::test_index_image_tag", "tests/unit/utils_test.py::ParseRepositoryTagTest::test_index_user_image_no_tag", "tests/unit/utils_test.py::ParseRepositoryTagTest::test_index_user_image_tag", "tests/unit/utils_test.py::ParseRepositoryTagTest::test_private_reg_image_no_tag", "tests/unit/utils_test.py::ParseRepositoryTagTest::test_private_reg_image_sha", "tests/unit/utils_test.py::ParseRepositoryTagTest::test_private_reg_image_tag", "tests/unit/utils_test.py::ParseDeviceTest::test_dict", "tests/unit/utils_test.py::ParseDeviceTest::test_full_string_definition", "tests/unit/utils_test.py::ParseDeviceTest::test_hybrid_list", "tests/unit/utils_test.py::ParseDeviceTest::test_partial_string_definition", "tests/unit/utils_test.py::ParseDeviceTest::test_permissionless_string_definition", "tests/unit/utils_test.py::ParseBytesTest::test_parse_bytes_float", "tests/unit/utils_test.py::ParseBytesTest::test_parse_bytes_invalid", "tests/unit/utils_test.py::ParseBytesTest::test_parse_bytes_maxint", "tests/unit/utils_test.py::ParseBytesTest::test_parse_bytes_valid", "tests/unit/utils_test.py::UtilsTest::test_convert_filters", "tests/unit/utils_test.py::UtilsTest::test_create_ipam_config", "tests/unit/utils_test.py::UtilsTest::test_decode_json_header", "tests/unit/utils_test.py::SplitCommandTest::test_split_command_with_unicode", "tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_matching_internal_port_ranges", "tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_matching_internal_ports", "tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_nonmatching_internal_port_ranges", "tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_nonmatching_internal_ports", "tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_one_port", "tests/unit/utils_test.py::PortsTest::test_build_port_bindings_with_port_range", "tests/unit/utils_test.py::PortsTest::test_host_only_with_colon", "tests/unit/utils_test.py::PortsTest::test_non_matching_length_port_ranges", "tests/unit/utils_test.py::PortsTest::test_port_and_range_invalid", "tests/unit/utils_test.py::PortsTest::test_port_only_with_colon", "tests/unit/utils_test.py::PortsTest::test_split_port_invalid", "tests/unit/utils_test.py::PortsTest::test_split_port_no_host_port", "tests/unit/utils_test.py::PortsTest::test_split_port_range_no_host_port", "tests/unit/utils_test.py::PortsTest::test_split_port_range_with_host_ip_no_port", "tests/unit/utils_test.py::PortsTest::test_split_port_range_with_host_port", "tests/unit/utils_test.py::PortsTest::test_split_port_range_with_protocol", "tests/unit/utils_test.py::PortsTest::test_split_port_with_host_ip", "tests/unit/utils_test.py::PortsTest::test_split_port_with_host_ip_no_port", "tests/unit/utils_test.py::PortsTest::test_split_port_with_host_port", "tests/unit/utils_test.py::PortsTest::test_split_port_with_protocol", "tests/unit/utils_test.py::ExcludePathsTest::test_directory", "tests/unit/utils_test.py::ExcludePathsTest::test_directory_with_single_exception", "tests/unit/utils_test.py::ExcludePathsTest::test_directory_with_subdir_exception", "tests/unit/utils_test.py::ExcludePathsTest::test_directory_with_trailing_slash", "tests/unit/utils_test.py::ExcludePathsTest::test_directory_with_wildcard_exception", "tests/unit/utils_test.py::ExcludePathsTest::test_exclude_custom_dockerfile", "tests/unit/utils_test.py::ExcludePathsTest::test_exclude_dockerfile_child", "tests/unit/utils_test.py::ExcludePathsTest::test_exclude_dockerfile_dockerignore", "tests/unit/utils_test.py::ExcludePathsTest::test_no_dupes", "tests/unit/utils_test.py::ExcludePathsTest::test_no_excludes", "tests/unit/utils_test.py::ExcludePathsTest::test_question_mark", "tests/unit/utils_test.py::ExcludePathsTest::test_single_filename", "tests/unit/utils_test.py::ExcludePathsTest::test_single_filename_trailing_slash", "tests/unit/utils_test.py::ExcludePathsTest::test_single_subdir_single_filename", "tests/unit/utils_test.py::ExcludePathsTest::test_single_subdir_wildcard_filename", "tests/unit/utils_test.py::ExcludePathsTest::test_subdirectory", "tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_exclude", "tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_filename_end", "tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_filename_start", "tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_subdir_single_filename", "tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_subdir_wildcard_filename", "tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_with_exception", "tests/unit/utils_test.py::ExcludePathsTest::test_wildcard_with_wildcard_exception", "tests/unit/utils_test.py::TarTest::test_tar_with_directory_symlinks", "tests/unit/utils_test.py::TarTest::test_tar_with_empty_directory", "tests/unit/utils_test.py::TarTest::test_tar_with_excludes", "tests/unit/utils_test.py::TarTest::test_tar_with_file_symlinks" ]
[]
Apache License 2.0
472
sympy__sympy-10808
0da63c857cc29ff62c4a4b25158d43c3cf3776e6
2016-03-12 20:56:31
8bb5814067cfa0348fb8b708848f35dba2b55ff4
diff --git a/sympy/core/expr.py b/sympy/core/expr.py index eab55068b8..3c81fb4ed1 100644 --- a/sympy/core/expr.py +++ b/sympy/core/expr.py @@ -192,6 +192,8 @@ def __int__(self): # (regardless of how much extra work we do to calculate extra decimal # places) we need to test whether we are off by one. from sympy import Dummy + if not self.is_number: + raise TypeError("can't convert symbols to int") r = self.round(2) if not r.is_Number: raise TypeError("can't convert complex to int") @@ -3139,7 +3141,9 @@ def round(self, p=0): """ from sympy import Float x = self - if x.is_number and not x.is_Atom: + if not x.is_number: + raise TypeError("can't round symbolic expression") + if not x.is_Atom: xn = x.n(2) if not pure_complex(xn, or_real=True): raise TypeError('Expected a number but got %s:' % diff --git a/sympy/functions/combinatorial/numbers.py b/sympy/functions/combinatorial/numbers.py index e9f130005d..75a2ae6bca 100644 --- a/sympy/functions/combinatorial/numbers.py +++ b/sympy/functions/combinatorial/numbers.py @@ -20,7 +20,6 @@ from sympy.functions.elementary.integers import floor from sympy.functions.elementary.trigonometric import sin, cos, cot from sympy.functions.elementary.miscellaneous import sqrt -from sympy.utilities.memoization import recurrence_memo from mpmath import bernfrac, workprec from mpmath.libmp import ifib as _ifib @@ -32,6 +31,7 @@ def _product(a, b): p *= k return p +from sympy.utilities.memoization import recurrence_memo # Dummy symbol used for computing polynomial sequences diff --git a/sympy/matrices/matrices.py b/sympy/matrices/matrices.py index 95e44ee1fc..00ab86f2e3 100644 --- a/sympy/matrices/matrices.py +++ b/sympy/matrices/matrices.py @@ -3220,8 +3220,7 @@ def condition_number(self): singular_values """ - if not self: - return S.Zero + singularvalues = self.singular_values() return Max(*singularvalues) / Min(*singularvalues)
maximum recursion depth exceeded from solve(log(y) - log(0.1222*x**0.8628), x) ``` In [1]: solve(log(y) - log(0.1222*x**0.8628), x) --------------------------------------------------------------------------- RecursionError Traceback (most recent call last) <ipython-input-1-61b053bc3d22> in <module>() ----> 1 solve(log(y) - log(0.1222*x**0.8628), x) /Users/aaronmeurer/Documents/Python/sympy/sympy/sympy/solvers/solvers.py in solve(f, *symbols, **flags) 1033 if fi.has(Float): 1034 floats = True -> 1035 f[i] = nsimplify(fi, rational=True) 1036 1037 # Any embedded piecewise functions need to be brought out to the /Users/aaronmeurer/Documents/Python/sympy/sympy/sympy/simplify/simplify.py in nsimplify(expr, constants, tolerance, full, rational) 1130 """ 1131 try: -> 1132 return sympify(as_int(expr)) 1133 except (TypeError, ValueError): 1134 pass /Users/aaronmeurer/Documents/Python/sympy/sympy/sympy/core/compatibility.py in as_int(n) 400 """ 401 try: --> 402 result = int(n) 403 if result != n: 404 raise TypeError /Users/aaronmeurer/Documents/Python/sympy/sympy/sympy/core/expr.py in __int__(self) 193 # places) we need to test whether we are off by one. 194 from sympy import Dummy --> 195 r = self.round(2) 196 if not r.is_Number: 197 raise TypeError("can't convert complex to int") /Users/aaronmeurer/Documents/Python/sympy/sympy/sympy/core/expr.py in round(self, p) 3149 if not x.is_real: 3150 i, r = x.as_real_imag() -> 3151 return i.round(p) + S.ImaginaryUnit*r.round(p) 3152 if not x: 3153 return x /Users/aaronmeurer/Documents/Python/sympy/sympy/sympy/core/expr.py in round(self, p) 3149 if not x.is_real: 3150 i, r = x.as_real_imag() -> 3151 return i.round(p) + S.ImaginaryUnit*r.round(p) 3152 if not x: 3153 return x ... last 2 frames repeated, from the frame below ... /Users/aaronmeurer/Documents/Python/sympy/sympy/sympy/core/expr.py in round(self, p) 3149 if not x.is_real: 3150 i, r = x.as_real_imag() -> 3151 return i.round(p) + S.ImaginaryUnit*r.round(p) 3152 if not x: 3153 return x RecursionError: maximum recursion depth exceeded ``` It used to work in 0.7.6: ``` In [1]: solve(log(y) - log(0.1222*x**0.8628), x) Out[1]: ⎡ 2500⎤ ⎢ ────⎥ ⎢ 2157⎥ ⎣11.4314187062073⋅y ⎦ ``` I bisected it to commit 26f77778e1e93388b1d092038bcb6ccd056bd7b4 Author: Chris Smith <[email protected]> Date: Tue Dec 29 20:39:39 2015 -0600 check that round arg evaluates to a literal number @smichr
sympy/sympy
diff --git a/sympy/core/tests/test_expr.py b/sympy/core/tests/test_expr.py index 541e454167..117d75d8c6 100644 --- a/sympy/core/tests/test_expr.py +++ b/sympy/core/tests/test_expr.py @@ -442,6 +442,7 @@ def test_is_algebraic_expr(): assert (cos(y)/sqrt(x)).is_algebraic_expr(y) is False assert (cos(y)/sqrt(x)).is_algebraic_expr(x, y) is False + def test_SAGE1(): #see https://github.com/sympy/sympy/issues/3346 class MyInt: @@ -1426,10 +1427,12 @@ def test_issue_4199(): assert a._eval_interval(x, -oo, oo) == -y assert a._eval_interval(x, oo, -oo) == y + def test_eval_interval_zoo(): # Test that limit is used when zoo is returned assert Si(1/x)._eval_interval(x, 0, 1) == -pi/2 + Si(1) + def test_primitive(): assert (3*(x + 1)**2).primitive() == (3, (x + 1)**2) assert (6*x + 2).primitive() == (2, 3*x + 1) @@ -1657,6 +1660,7 @@ def test_round(): assert S.NegativeInfinity.round() == S.NegativeInfinity assert S.ComplexInfinity.round() == S.ComplexInfinity + def test_round_exception_nostr(): # Don't use the string form of the expression in the round exception, as # it's too slow @@ -1669,6 +1673,7 @@ def test_round_exception_nostr(): # Did not raise raise AssertionError("Did not raise") + def test_extract_branch_factor(): assert exp_polar(2.0*I*pi).extract_branch_factor() == (1, 1) @@ -1706,3 +1711,9 @@ def test_issue_7426(): def test_issue_10161(): x = symbols('x', real=True) assert x*abs(x)*abs(x) == x**3 + + +def test_issue_10755(): + x = symbols('x') + raises(TypeError, lambda: int(log(x))) + raises(TypeError, lambda: log(x).round(2)) diff --git a/sympy/matrices/tests/test_matrices.py b/sympy/matrices/tests/test_matrices.py index 3f741dd050..60dae527a7 100644 --- a/sympy/matrices/tests/test_matrices.py +++ b/sympy/matrices/tests/test_matrices.py @@ -2047,9 +2047,6 @@ def test_condition_number(): assert all(Float(1.).epsilon_eq(Mc.subs(x, val).evalf()) for val in [Rational(1, 5), Rational(1, 2), Rational(1, 10), pi/2, pi, 7*pi/4 ]) - #issue 10782 - assert Matrix([]).condition_number() == 0 - def test_equality(): A = Matrix(((1, 2, 3), (4, 5, 6), (7, 8, 9)))
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_git_commit_hash", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 0, "test_score": 2 }, "num_modified_files": 3 }
1.0
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "mpmath>=0.19", "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.7", "reqs_path": [ "requirements/base.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
certifi @ file:///croot/certifi_1671487769961/work/certifi exceptiongroup==1.2.2 importlib-metadata==6.7.0 iniconfig==2.0.0 mpmath==1.3.0 packaging==24.0 pluggy==1.2.0 pytest==7.4.4 -e git+https://github.com/sympy/sympy.git@0da63c857cc29ff62c4a4b25158d43c3cf3776e6#egg=sympy tomli==2.0.1 typing_extensions==4.7.1 zipp==3.15.0
name: sympy channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2022.12.7=py37h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=22.3.1=py37h06a4308_0 - python=3.7.16=h7a1cb2a_0 - readline=8.2=h5eee18b_0 - setuptools=65.6.3=py37h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.38.4=py37h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - exceptiongroup==1.2.2 - importlib-metadata==6.7.0 - iniconfig==2.0.0 - mpmath==1.3.0 - packaging==24.0 - pluggy==1.2.0 - pytest==7.4.4 - tomli==2.0.1 - typing-extensions==4.7.1 - zipp==3.15.0 prefix: /opt/conda/envs/sympy
[ "sympy/core/tests/test_expr.py::test_issue_10755" ]
[ "sympy/core/tests/test_expr.py::test_call_2", "sympy/core/tests/test_expr.py::test_float_0_fail", "sympy/matrices/tests/test_matrices.py::test_eigen_vects", "sympy/matrices/tests/test_matrices.py::test_issue_3959", "sympy/matrices/tests/test_matrices.py::test_issue_3979", "sympy/matrices/tests/test_matrices.py::test_pinv_rank_deficient", "sympy/matrices/tests/test_matrices.py::test_from_ndarray" ]
[ "sympy/core/tests/test_expr.py::test_basic", "sympy/core/tests/test_expr.py::test_ibasic", "sympy/core/tests/test_expr.py::test_relational", "sympy/core/tests/test_expr.py::test_relational_assumptions", "sympy/core/tests/test_expr.py::test_relational_noncommutative", "sympy/core/tests/test_expr.py::test_basic_nostr", "sympy/core/tests/test_expr.py::test_series_expansion_for_uniform_order", "sympy/core/tests/test_expr.py::test_leadterm", "sympy/core/tests/test_expr.py::test_as_leading_term", "sympy/core/tests/test_expr.py::test_leadterm2", "sympy/core/tests/test_expr.py::test_leadterm3", "sympy/core/tests/test_expr.py::test_as_leading_term2", "sympy/core/tests/test_expr.py::test_as_leading_term3", "sympy/core/tests/test_expr.py::test_as_leading_term4", "sympy/core/tests/test_expr.py::test_as_leading_term_stub", "sympy/core/tests/test_expr.py::test_atoms", "sympy/core/tests/test_expr.py::test_is_polynomial", "sympy/core/tests/test_expr.py::test_is_rational_function", "sympy/core/tests/test_expr.py::test_is_algebraic_expr", "sympy/core/tests/test_expr.py::test_SAGE1", "sympy/core/tests/test_expr.py::test_SAGE2", "sympy/core/tests/test_expr.py::test_SAGE3", "sympy/core/tests/test_expr.py::test_len", "sympy/core/tests/test_expr.py::test_doit", "sympy/core/tests/test_expr.py::test_attribute_error", "sympy/core/tests/test_expr.py::test_args", "sympy/core/tests/test_expr.py::test_noncommutative_expand_issue_3757", "sympy/core/tests/test_expr.py::test_as_numer_denom", "sympy/core/tests/test_expr.py::test_as_independent", "sympy/core/tests/test_expr.py::test_replace", "sympy/core/tests/test_expr.py::test_find", "sympy/core/tests/test_expr.py::test_count", "sympy/core/tests/test_expr.py::test_has_basics", "sympy/core/tests/test_expr.py::test_has_multiple", "sympy/core/tests/test_expr.py::test_has_piecewise", "sympy/core/tests/test_expr.py::test_has_iterative", "sympy/core/tests/test_expr.py::test_has_integrals", "sympy/core/tests/test_expr.py::test_has_tuple", "sympy/core/tests/test_expr.py::test_has_units", "sympy/core/tests/test_expr.py::test_has_polys", "sympy/core/tests/test_expr.py::test_has_physics", "sympy/core/tests/test_expr.py::test_as_poly_as_expr", "sympy/core/tests/test_expr.py::test_nonzero", "sympy/core/tests/test_expr.py::test_is_number", "sympy/core/tests/test_expr.py::test_as_coeff_add", "sympy/core/tests/test_expr.py::test_as_coeff_mul", "sympy/core/tests/test_expr.py::test_as_coeff_exponent", "sympy/core/tests/test_expr.py::test_extractions", "sympy/core/tests/test_expr.py::test_nan_extractions", "sympy/core/tests/test_expr.py::test_coeff", "sympy/core/tests/test_expr.py::test_coeff2", "sympy/core/tests/test_expr.py::test_coeff2_0", "sympy/core/tests/test_expr.py::test_coeff_expand", "sympy/core/tests/test_expr.py::test_integrate", "sympy/core/tests/test_expr.py::test_as_base_exp", "sympy/core/tests/test_expr.py::test_issue_4963", "sympy/core/tests/test_expr.py::test_action_verbs", "sympy/core/tests/test_expr.py::test_as_powers_dict", "sympy/core/tests/test_expr.py::test_as_coefficients_dict", "sympy/core/tests/test_expr.py::test_args_cnc", "sympy/core/tests/test_expr.py::test_new_rawargs", "sympy/core/tests/test_expr.py::test_issue_5226", "sympy/core/tests/test_expr.py::test_free_symbols", "sympy/core/tests/test_expr.py::test_issue_5300", "sympy/core/tests/test_expr.py::test_as_coeff_Mul", "sympy/core/tests/test_expr.py::test_as_coeff_Add", "sympy/core/tests/test_expr.py::test_expr_sorting", "sympy/core/tests/test_expr.py::test_as_ordered_factors", "sympy/core/tests/test_expr.py::test_as_ordered_terms", "sympy/core/tests/test_expr.py::test_sort_key_atomic_expr", "sympy/core/tests/test_expr.py::test_issue_4199", "sympy/core/tests/test_expr.py::test_eval_interval_zoo", "sympy/core/tests/test_expr.py::test_primitive", "sympy/core/tests/test_expr.py::test_issue_5843", "sympy/core/tests/test_expr.py::test_is_constant", "sympy/core/tests/test_expr.py::test_equals", "sympy/core/tests/test_expr.py::test_random", "sympy/core/tests/test_expr.py::test_round", "sympy/core/tests/test_expr.py::test_round_exception_nostr", "sympy/core/tests/test_expr.py::test_extract_branch_factor", "sympy/core/tests/test_expr.py::test_identity_removal", "sympy/core/tests/test_expr.py::test_float_0", "sympy/core/tests/test_expr.py::test_issue_6325", "sympy/core/tests/test_expr.py::test_issue_7426", "sympy/core/tests/test_expr.py::test_issue_10161", "sympy/matrices/tests/test_matrices.py::test_args", "sympy/matrices/tests/test_matrices.py::test_division", "sympy/matrices/tests/test_matrices.py::test_sum", "sympy/matrices/tests/test_matrices.py::test_addition", "sympy/matrices/tests/test_matrices.py::test_fancy_index_matrix", "sympy/matrices/tests/test_matrices.py::test_multiplication", "sympy/matrices/tests/test_matrices.py::test_power", "sympy/matrices/tests/test_matrices.py::test_creation", "sympy/matrices/tests/test_matrices.py::test_tolist", "sympy/matrices/tests/test_matrices.py::test_as_mutable", "sympy/matrices/tests/test_matrices.py::test_determinant", "sympy/matrices/tests/test_matrices.py::test_det_LU_decomposition", "sympy/matrices/tests/test_matrices.py::test_berkowitz_minors", "sympy/matrices/tests/test_matrices.py::test_slicing", "sympy/matrices/tests/test_matrices.py::test_submatrix_assignment", "sympy/matrices/tests/test_matrices.py::test_extract", "sympy/matrices/tests/test_matrices.py::test_reshape", "sympy/matrices/tests/test_matrices.py::test_applyfunc", "sympy/matrices/tests/test_matrices.py::test_expand", "sympy/matrices/tests/test_matrices.py::test_random", "sympy/matrices/tests/test_matrices.py::test_LUdecomp", "sympy/matrices/tests/test_matrices.py::test_LUsolve", "sympy/matrices/tests/test_matrices.py::test_QRsolve", "sympy/matrices/tests/test_matrices.py::test_inverse", "sympy/matrices/tests/test_matrices.py::test_matrix_inverse_mod", "sympy/matrices/tests/test_matrices.py::test_util", "sympy/matrices/tests/test_matrices.py::test_jacobian_hessian", "sympy/matrices/tests/test_matrices.py::test_QR", "sympy/matrices/tests/test_matrices.py::test_QR_non_square", "sympy/matrices/tests/test_matrices.py::test_nullspace", "sympy/matrices/tests/test_matrices.py::test_columnspace", "sympy/matrices/tests/test_matrices.py::test_wronskian", "sympy/matrices/tests/test_matrices.py::test_eigen", "sympy/matrices/tests/test_matrices.py::test_subs", "sympy/matrices/tests/test_matrices.py::test_xreplace", "sympy/matrices/tests/test_matrices.py::test_simplify", "sympy/matrices/tests/test_matrices.py::test_transpose", "sympy/matrices/tests/test_matrices.py::test_conjugate", "sympy/matrices/tests/test_matrices.py::test_conj_dirac", "sympy/matrices/tests/test_matrices.py::test_trace", "sympy/matrices/tests/test_matrices.py::test_shape", "sympy/matrices/tests/test_matrices.py::test_col_row_op", "sympy/matrices/tests/test_matrices.py::test_zip_row_op", "sympy/matrices/tests/test_matrices.py::test_issue_3950", "sympy/matrices/tests/test_matrices.py::test_issue_3981", "sympy/matrices/tests/test_matrices.py::test_evalf", "sympy/matrices/tests/test_matrices.py::test_is_symbolic", "sympy/matrices/tests/test_matrices.py::test_is_upper", "sympy/matrices/tests/test_matrices.py::test_is_lower", "sympy/matrices/tests/test_matrices.py::test_is_nilpotent", "sympy/matrices/tests/test_matrices.py::test_zeros_ones_fill", "sympy/matrices/tests/test_matrices.py::test_empty_zeros", "sympy/matrices/tests/test_matrices.py::test_issue_3749", "sympy/matrices/tests/test_matrices.py::test_inv_iszerofunc", "sympy/matrices/tests/test_matrices.py::test_jacobian_metrics", "sympy/matrices/tests/test_matrices.py::test_jacobian2", "sympy/matrices/tests/test_matrices.py::test_issue_4564", "sympy/matrices/tests/test_matrices.py::test_nonvectorJacobian", "sympy/matrices/tests/test_matrices.py::test_vec", "sympy/matrices/tests/test_matrices.py::test_vech", "sympy/matrices/tests/test_matrices.py::test_vech_errors", "sympy/matrices/tests/test_matrices.py::test_diag", "sympy/matrices/tests/test_matrices.py::test_get_diag_blocks1", "sympy/matrices/tests/test_matrices.py::test_get_diag_blocks2", "sympy/matrices/tests/test_matrices.py::test_inv_block", "sympy/matrices/tests/test_matrices.py::test_creation_args", "sympy/matrices/tests/test_matrices.py::test_diagonal_symmetrical", "sympy/matrices/tests/test_matrices.py::test_diagonalization", "sympy/matrices/tests/test_matrices.py::test_jordan_form", "sympy/matrices/tests/test_matrices.py::test_jordan_form_complex_issue_9274", "sympy/matrices/tests/test_matrices.py::test_issue_10220", "sympy/matrices/tests/test_matrices.py::test_Matrix_berkowitz_charpoly", "sympy/matrices/tests/test_matrices.py::test_exp", "sympy/matrices/tests/test_matrices.py::test_has", "sympy/matrices/tests/test_matrices.py::test_errors", "sympy/matrices/tests/test_matrices.py::test_len", "sympy/matrices/tests/test_matrices.py::test_integrate", "sympy/matrices/tests/test_matrices.py::test_limit", "sympy/matrices/tests/test_matrices.py::test_diff", "sympy/matrices/tests/test_matrices.py::test_getattr", "sympy/matrices/tests/test_matrices.py::test_hessenberg", "sympy/matrices/tests/test_matrices.py::test_cholesky", "sympy/matrices/tests/test_matrices.py::test_LDLdecomposition", "sympy/matrices/tests/test_matrices.py::test_cholesky_solve", "sympy/matrices/tests/test_matrices.py::test_LDLsolve", "sympy/matrices/tests/test_matrices.py::test_lower_triangular_solve", "sympy/matrices/tests/test_matrices.py::test_upper_triangular_solve", "sympy/matrices/tests/test_matrices.py::test_diagonal_solve", "sympy/matrices/tests/test_matrices.py::test_matrix_norm", "sympy/matrices/tests/test_matrices.py::test_singular_values", "sympy/matrices/tests/test_matrices.py::test_condition_number", "sympy/matrices/tests/test_matrices.py::test_equality", "sympy/matrices/tests/test_matrices.py::test_col_join", "sympy/matrices/tests/test_matrices.py::test_row_insert", "sympy/matrices/tests/test_matrices.py::test_col_insert", "sympy/matrices/tests/test_matrices.py::test_normalized", "sympy/matrices/tests/test_matrices.py::test_print_nonzero", "sympy/matrices/tests/test_matrices.py::test_zeros_eye", "sympy/matrices/tests/test_matrices.py::test_is_zero", "sympy/matrices/tests/test_matrices.py::test_rotation_matrices", "sympy/matrices/tests/test_matrices.py::test_DeferredVector", "sympy/matrices/tests/test_matrices.py::test_DeferredVector_not_iterable", "sympy/matrices/tests/test_matrices.py::test_DeferredVector_Matrix", "sympy/matrices/tests/test_matrices.py::test_GramSchmidt", "sympy/matrices/tests/test_matrices.py::test_casoratian", "sympy/matrices/tests/test_matrices.py::test_zero_dimension_multiply", "sympy/matrices/tests/test_matrices.py::test_slice_issue_2884", "sympy/matrices/tests/test_matrices.py::test_slice_issue_3401", "sympy/matrices/tests/test_matrices.py::test_copyin", "sympy/matrices/tests/test_matrices.py::test_invertible_check", "sympy/matrices/tests/test_matrices.py::test_issue_5964", "sympy/matrices/tests/test_matrices.py::test_issue_7604", "sympy/matrices/tests/test_matrices.py::test_is_Identity", "sympy/matrices/tests/test_matrices.py::test_dot", "sympy/matrices/tests/test_matrices.py::test_dual", "sympy/matrices/tests/test_matrices.py::test_anti_symmetric", "sympy/matrices/tests/test_matrices.py::test_normalize_sort_diogonalization", "sympy/matrices/tests/test_matrices.py::test_issue_5321", "sympy/matrices/tests/test_matrices.py::test_issue_5320", "sympy/matrices/tests/test_matrices.py::test_cross", "sympy/matrices/tests/test_matrices.py::test_hash", "sympy/matrices/tests/test_matrices.py::test_adjoint", "sympy/matrices/tests/test_matrices.py::test_simplify_immutable", "sympy/matrices/tests/test_matrices.py::test_rank", "sympy/matrices/tests/test_matrices.py::test_replace", "sympy/matrices/tests/test_matrices.py::test_replace_map", "sympy/matrices/tests/test_matrices.py::test_atoms", "sympy/matrices/tests/test_matrices.py::test_pinv", "sympy/matrices/tests/test_matrices.py::test_pinv_solve", "sympy/matrices/tests/test_matrices.py::test_gauss_jordan_solve", "sympy/matrices/tests/test_matrices.py::test_issue_7201", "sympy/matrices/tests/test_matrices.py::test_free_symbols", "sympy/matrices/tests/test_matrices.py::test_hermitian", "sympy/matrices/tests/test_matrices.py::test_doit", "sympy/matrices/tests/test_matrices.py::test_issue_9457_9467_9876", "sympy/matrices/tests/test_matrices.py::test_issue_9422", "sympy/matrices/tests/test_matrices.py::test_issue_10658" ]
[]
BSD
473
cdent__gabbi-119
040821d39bd4fa067ea947bbf37687f76f4f325b
2016-03-14 11:56:49
079743e562b576956841bf7c3ee49e35123e1d69
diff --git a/gabbi/handlers.py b/gabbi/handlers.py index 33a4175..6de7f5b 100644 --- a/gabbi/handlers.py +++ b/gabbi/handlers.py @@ -128,6 +128,8 @@ class HeadersResponseHandler(ResponseHandler): If a header value is wrapped in ``/`` it is treated as a raw regular expression. + + Headers values are always treated as strings. """ test_key_suffix = 'headers' @@ -137,10 +139,10 @@ class HeadersResponseHandler(ResponseHandler): header = header.lower() # case-insensitive comparison response = test.response - header_value = test.replace_template(value) + header_value = test.replace_template(str(value)) try: - response_value = response[header] + response_value = str(response[header]) except KeyError: raise AssertionError( "'%s' header not present in response: %s" % ( @@ -153,6 +155,6 @@ class HeadersResponseHandler(ResponseHandler): 'Expect header %s to match /%s/, got %s' % (header, header_value, response_value)) else: - test.assertEqual(header_value, response[header], + test.assertEqual(header_value, response_value, 'Expect header %s with value %s, got %s' % (header, header_value, response[header]))
If the test value for a response header is not treated by yaml as a string there is an error ``` File "/Users/cdent/src/enamel/.tox/functional/lib/python2.7/site-packages/gabbi/handlers.py", line 149, in action if header_value.startswith('/') and header_value.endswith('/'): AttributeError: 'float' object has no attribute 'startswith' ``` when: ```yaml - name: default version desc: no header sent GET: / request_headers: content-type: application/json response_headers: vary: /openstack-enamel-api-version/ openstack-enamel-api-version: 0.1 ```
cdent/gabbi
diff --git a/gabbi/tests/test_handlers.py b/gabbi/tests/test_handlers.py index a92d53e..d5688b8 100644 --- a/gabbi/tests/test_handlers.py +++ b/gabbi/tests/test_handlers.py @@ -173,6 +173,17 @@ class HandlersTest(unittest.TestCase): self.assertIn("'location' header not present in response:", str(failure.exception)) + def test_resonse_headers_stringify(self): + handler = handlers.HeadersResponseHandler(self.test_class) + self.test.test_data = {'response_headers': { + 'x-alpha-beta': 2.0, + }} + self.test.response = {'x-alpha-beta': '2.0'} + self._assert_handler(handler) + + self.test.response = {'x-alpha-beta': 2.0} + self._assert_handler(handler) + def _assert_handler(self, handler): # Instantiate our contained test class by naming its test # method and then run its tests to confirm.
{ "commit_name": "head_commit", "failed_lite_validators": [], "has_test_patch": true, "is_lite": true, "llm_score": { "difficulty_score": 0, "issue_text_score": 0, "test_score": 0 }, "num_modified_files": 1 }
1.14
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "mock", "testrepository", "coverage", "hacking", "sphinx", "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.5", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
alabaster==0.7.13 attrs==22.2.0 Babel==2.11.0 certifi==2021.5.30 charset-normalizer==2.0.12 colorama==0.4.5 coverage==6.2 decorator==5.1.1 docutils==0.18.1 extras==1.0.0 fixtures==4.0.1 flake8==3.8.4 -e git+https://github.com/cdent/gabbi.git@040821d39bd4fa067ea947bbf37687f76f4f325b#egg=gabbi hacking==4.1.0 httplib2==0.22.0 idna==3.10 imagesize==1.4.1 importlib-metadata==4.8.3 iniconfig==1.1.1 iso8601==1.1.0 Jinja2==3.0.3 jsonpath-rw==1.4.0 jsonpath-rw-ext==1.2.2 MarkupSafe==2.0.1 mccabe==0.6.1 mock==5.2.0 packaging==21.3 pbr==6.1.1 pluggy==1.0.0 ply==3.11 py==1.11.0 pycodestyle==2.6.0 pyflakes==2.2.0 Pygments==2.14.0 pyparsing==3.1.4 pytest==7.0.1 python-subunit==1.4.2 pytz==2025.2 PyYAML==6.0.1 requests==2.27.1 six==1.17.0 snowballstemmer==2.2.0 Sphinx==5.3.0 sphinxcontrib-applehelp==1.0.2 sphinxcontrib-devhelp==1.0.2 sphinxcontrib-htmlhelp==2.0.0 sphinxcontrib-jsmath==1.0.1 sphinxcontrib-qthelp==1.0.3 sphinxcontrib-serializinghtml==1.1.5 testrepository==0.0.21 testtools==2.6.0 tomli==1.2.3 typing_extensions==4.1.1 urllib3==1.26.20 wsgi_intercept==1.13.1 zipp==3.6.0
name: gabbi channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - alabaster==0.7.13 - attrs==22.2.0 - babel==2.11.0 - charset-normalizer==2.0.12 - colorama==0.4.5 - coverage==6.2 - decorator==5.1.1 - docutils==0.18.1 - extras==1.0.0 - fixtures==4.0.1 - flake8==3.8.4 - hacking==4.1.0 - httplib2==0.22.0 - idna==3.10 - imagesize==1.4.1 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - iso8601==1.1.0 - jinja2==3.0.3 - jsonpath-rw==1.4.0 - jsonpath-rw-ext==1.2.2 - markupsafe==2.0.1 - mccabe==0.6.1 - mock==5.2.0 - packaging==21.3 - pbr==6.1.1 - pluggy==1.0.0 - ply==3.11 - py==1.11.0 - pycodestyle==2.6.0 - pyflakes==2.2.0 - pygments==2.14.0 - pyparsing==3.1.4 - pytest==7.0.1 - python-subunit==1.4.2 - pytz==2025.2 - pyyaml==6.0.1 - requests==2.27.1 - six==1.17.0 - snowballstemmer==2.2.0 - sphinx==5.3.0 - sphinxcontrib-applehelp==1.0.2 - sphinxcontrib-devhelp==1.0.2 - sphinxcontrib-htmlhelp==2.0.0 - sphinxcontrib-jsmath==1.0.1 - sphinxcontrib-qthelp==1.0.3 - sphinxcontrib-serializinghtml==1.1.5 - testrepository==0.0.21 - testtools==2.6.0 - tomli==1.2.3 - typing-extensions==4.1.1 - urllib3==1.26.20 - wsgi-intercept==1.13.1 - zipp==3.6.0 prefix: /opt/conda/envs/gabbi
[ "gabbi/tests/test_handlers.py::HandlersTest::test_resonse_headers_stringify" ]
[]
[ "gabbi/tests/test_handlers.py::HandlersTest::test_response_headers", "gabbi/tests/test_handlers.py::HandlersTest::test_response_headers_fail_data", "gabbi/tests/test_handlers.py::HandlersTest::test_response_headers_fail_header", "gabbi/tests/test_handlers.py::HandlersTest::test_response_headers_regex", "gabbi/tests/test_handlers.py::HandlersTest::test_response_json_paths", "gabbi/tests/test_handlers.py::HandlersTest::test_response_json_paths_fail_data", "gabbi/tests/test_handlers.py::HandlersTest::test_response_json_paths_fail_path", "gabbi/tests/test_handlers.py::HandlersTest::test_response_strings", "gabbi/tests/test_handlers.py::HandlersTest::test_response_strings_fail", "gabbi/tests/test_handlers.py::HandlersTest::test_response_strings_fail_big_output", "gabbi/tests/test_handlers.py::HandlersTest::test_response_strings_fail_big_payload" ]
[]
Apache License 2.0
474
sympy__sympy-10827
c6c5dfb9ba567c340a420af67ca0b46eace7a45a
2016-03-14 18:24:56
8bb5814067cfa0348fb8b708848f35dba2b55ff4
diff --git a/.mailmap b/.mailmap index a2c68ddad7..80364fc40b 100644 --- a/.mailmap +++ b/.mailmap @@ -239,7 +239,3 @@ Harshil Goel <[email protected]> harshil goel <[email protected]> Lukas Zorich <[email protected]> lukas <[email protected]> Shashank Agarwal <[email protected]> shashank-agg <[email protected]> Rajath Shashidhara <[email protected]> rajaths589 <[email protected]> -Chaitanya Sai Alaparthi <[email protected]> chaitanya sai alaparthi <[email protected]> -Abhinav Agarwal <[email protected]> abhinavagarwal07 <[email protected]> -Rishabh Daal <[email protected]> rd13123013 <[email protected]> -Aravind Reddy <[email protected]> Aravind <[email protected]> diff --git a/AUTHORS b/AUTHORS index 3187f72b00..4f3b21ed51 100644 --- a/AUTHORS +++ b/AUTHORS @@ -481,16 +481,3 @@ Michał Radwański <[email protected]> Jerry Li <[email protected]> Pablo Zubieta <[email protected]> Curious72 <[email protected]> -Chaitanya Sai Alaparthi <[email protected]> -Ruslan Pisarev <[email protected]> -Akash Trehan <[email protected]> -Nishant Nikhil <[email protected]> -Vladimir Poluhsin <[email protected]> -Akshay Nagar <[email protected]> -James Brandon Milam <[email protected]> -Abhinav Agarwal <[email protected]> -Rishabh Daal <[email protected]> -Sanya Khurana <[email protected]> -Aman Deep <[email protected]> -Aravind Reddy <[email protected]> -Abhishek Verma <[email protected]> diff --git a/sympy/matrices/matrices.py b/sympy/matrices/matrices.py index 95e44ee1fc..8fd39afdee 100644 --- a/sympy/matrices/matrices.py +++ b/sympy/matrices/matrices.py @@ -2080,6 +2080,8 @@ def is_nilpotent(self): >>> a.is_nilpotent() False """ + if not self: + return True if not self.is_square: raise NonSquareMatrixError( "Nilpotency is valid only for square matrices")
Is the empty matrix nilpotent? IndexError: Index out of range: a[0]
sympy/sympy
diff --git a/sympy/matrices/tests/test_matrices.py b/sympy/matrices/tests/test_matrices.py index cfd18eeb8a..94d92b117f 100644 --- a/sympy/matrices/tests/test_matrices.py +++ b/sympy/matrices/tests/test_matrices.py @@ -1189,6 +1189,8 @@ def test_is_nilpotent(): assert a.is_nilpotent() a = Matrix([[1, 0], [0, 1]]) assert not a.is_nilpotent() + a = Matrix([]) + assert a.is_nilpotent() def test_zeros_ones_fill():
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_many_modified_files" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 3, "test_score": 1 }, "num_modified_files": 3 }
1.0
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "mpmath>=0.19", "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.7", "reqs_path": [ "requirements/base.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
certifi @ file:///croot/certifi_1671487769961/work/certifi exceptiongroup==1.2.2 importlib-metadata==6.7.0 iniconfig==2.0.0 mpmath==1.3.0 packaging==24.0 pluggy==1.2.0 pytest==7.4.4 -e git+https://github.com/sympy/sympy.git@c6c5dfb9ba567c340a420af67ca0b46eace7a45a#egg=sympy tomli==2.0.1 typing_extensions==4.7.1 zipp==3.15.0
name: sympy channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2022.12.7=py37h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=22.3.1=py37h06a4308_0 - python=3.7.16=h7a1cb2a_0 - readline=8.2=h5eee18b_0 - setuptools=65.6.3=py37h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.38.4=py37h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - exceptiongroup==1.2.2 - importlib-metadata==6.7.0 - iniconfig==2.0.0 - mpmath==1.3.0 - packaging==24.0 - pluggy==1.2.0 - pytest==7.4.4 - tomli==2.0.1 - typing-extensions==4.7.1 - zipp==3.15.0 prefix: /opt/conda/envs/sympy
[ "sympy/matrices/tests/test_matrices.py::test_is_nilpotent" ]
[ "sympy/matrices/tests/test_matrices.py::test_eigen_vects", "sympy/matrices/tests/test_matrices.py::test_issue_3959", "sympy/matrices/tests/test_matrices.py::test_issue_3979", "sympy/matrices/tests/test_matrices.py::test_pinv_rank_deficient", "sympy/matrices/tests/test_matrices.py::test_from_ndarray" ]
[ "sympy/matrices/tests/test_matrices.py::test_args", "sympy/matrices/tests/test_matrices.py::test_division", "sympy/matrices/tests/test_matrices.py::test_sum", "sympy/matrices/tests/test_matrices.py::test_addition", "sympy/matrices/tests/test_matrices.py::test_fancy_index_matrix", "sympy/matrices/tests/test_matrices.py::test_multiplication", "sympy/matrices/tests/test_matrices.py::test_power", "sympy/matrices/tests/test_matrices.py::test_creation", "sympy/matrices/tests/test_matrices.py::test_tolist", "sympy/matrices/tests/test_matrices.py::test_as_mutable", "sympy/matrices/tests/test_matrices.py::test_determinant", "sympy/matrices/tests/test_matrices.py::test_det_LU_decomposition", "sympy/matrices/tests/test_matrices.py::test_berkowitz_minors", "sympy/matrices/tests/test_matrices.py::test_slicing", "sympy/matrices/tests/test_matrices.py::test_submatrix_assignment", "sympy/matrices/tests/test_matrices.py::test_extract", "sympy/matrices/tests/test_matrices.py::test_reshape", "sympy/matrices/tests/test_matrices.py::test_applyfunc", "sympy/matrices/tests/test_matrices.py::test_expand", "sympy/matrices/tests/test_matrices.py::test_random", "sympy/matrices/tests/test_matrices.py::test_LUdecomp", "sympy/matrices/tests/test_matrices.py::test_LUsolve", "sympy/matrices/tests/test_matrices.py::test_QRsolve", "sympy/matrices/tests/test_matrices.py::test_inverse", "sympy/matrices/tests/test_matrices.py::test_matrix_inverse_mod", "sympy/matrices/tests/test_matrices.py::test_util", "sympy/matrices/tests/test_matrices.py::test_jacobian_hessian", "sympy/matrices/tests/test_matrices.py::test_QR", "sympy/matrices/tests/test_matrices.py::test_QR_non_square", "sympy/matrices/tests/test_matrices.py::test_nullspace", "sympy/matrices/tests/test_matrices.py::test_columnspace", "sympy/matrices/tests/test_matrices.py::test_wronskian", "sympy/matrices/tests/test_matrices.py::test_eigen", "sympy/matrices/tests/test_matrices.py::test_subs", "sympy/matrices/tests/test_matrices.py::test_xreplace", "sympy/matrices/tests/test_matrices.py::test_simplify", "sympy/matrices/tests/test_matrices.py::test_transpose", "sympy/matrices/tests/test_matrices.py::test_conjugate", "sympy/matrices/tests/test_matrices.py::test_conj_dirac", "sympy/matrices/tests/test_matrices.py::test_trace", "sympy/matrices/tests/test_matrices.py::test_shape", "sympy/matrices/tests/test_matrices.py::test_col_row_op", "sympy/matrices/tests/test_matrices.py::test_zip_row_op", "sympy/matrices/tests/test_matrices.py::test_issue_3950", "sympy/matrices/tests/test_matrices.py::test_issue_3981", "sympy/matrices/tests/test_matrices.py::test_evalf", "sympy/matrices/tests/test_matrices.py::test_is_symbolic", "sympy/matrices/tests/test_matrices.py::test_is_upper", "sympy/matrices/tests/test_matrices.py::test_is_lower", "sympy/matrices/tests/test_matrices.py::test_zeros_ones_fill", "sympy/matrices/tests/test_matrices.py::test_empty_zeros", "sympy/matrices/tests/test_matrices.py::test_issue_3749", "sympy/matrices/tests/test_matrices.py::test_inv_iszerofunc", "sympy/matrices/tests/test_matrices.py::test_jacobian_metrics", "sympy/matrices/tests/test_matrices.py::test_jacobian2", "sympy/matrices/tests/test_matrices.py::test_issue_4564", "sympy/matrices/tests/test_matrices.py::test_nonvectorJacobian", "sympy/matrices/tests/test_matrices.py::test_vec", "sympy/matrices/tests/test_matrices.py::test_vech", "sympy/matrices/tests/test_matrices.py::test_vech_errors", "sympy/matrices/tests/test_matrices.py::test_diag", "sympy/matrices/tests/test_matrices.py::test_get_diag_blocks1", "sympy/matrices/tests/test_matrices.py::test_get_diag_blocks2", "sympy/matrices/tests/test_matrices.py::test_inv_block", "sympy/matrices/tests/test_matrices.py::test_creation_args", "sympy/matrices/tests/test_matrices.py::test_diagonal_symmetrical", "sympy/matrices/tests/test_matrices.py::test_diagonalization", "sympy/matrices/tests/test_matrices.py::test_jordan_form", "sympy/matrices/tests/test_matrices.py::test_jordan_form_complex_issue_9274", "sympy/matrices/tests/test_matrices.py::test_issue_10220", "sympy/matrices/tests/test_matrices.py::test_Matrix_berkowitz_charpoly", "sympy/matrices/tests/test_matrices.py::test_exp", "sympy/matrices/tests/test_matrices.py::test_has", "sympy/matrices/tests/test_matrices.py::test_errors", "sympy/matrices/tests/test_matrices.py::test_len", "sympy/matrices/tests/test_matrices.py::test_integrate", "sympy/matrices/tests/test_matrices.py::test_limit", "sympy/matrices/tests/test_matrices.py::test_diff", "sympy/matrices/tests/test_matrices.py::test_getattr", "sympy/matrices/tests/test_matrices.py::test_hessenberg", "sympy/matrices/tests/test_matrices.py::test_cholesky", "sympy/matrices/tests/test_matrices.py::test_LDLdecomposition", "sympy/matrices/tests/test_matrices.py::test_cholesky_solve", "sympy/matrices/tests/test_matrices.py::test_LDLsolve", "sympy/matrices/tests/test_matrices.py::test_lower_triangular_solve", "sympy/matrices/tests/test_matrices.py::test_upper_triangular_solve", "sympy/matrices/tests/test_matrices.py::test_diagonal_solve", "sympy/matrices/tests/test_matrices.py::test_matrix_norm", "sympy/matrices/tests/test_matrices.py::test_singular_values", "sympy/matrices/tests/test_matrices.py::test_condition_number", "sympy/matrices/tests/test_matrices.py::test_equality", "sympy/matrices/tests/test_matrices.py::test_col_join", "sympy/matrices/tests/test_matrices.py::test_row_insert", "sympy/matrices/tests/test_matrices.py::test_col_insert", "sympy/matrices/tests/test_matrices.py::test_normalized", "sympy/matrices/tests/test_matrices.py::test_print_nonzero", "sympy/matrices/tests/test_matrices.py::test_zeros_eye", "sympy/matrices/tests/test_matrices.py::test_is_zero", "sympy/matrices/tests/test_matrices.py::test_rotation_matrices", "sympy/matrices/tests/test_matrices.py::test_DeferredVector", "sympy/matrices/tests/test_matrices.py::test_DeferredVector_not_iterable", "sympy/matrices/tests/test_matrices.py::test_DeferredVector_Matrix", "sympy/matrices/tests/test_matrices.py::test_GramSchmidt", "sympy/matrices/tests/test_matrices.py::test_casoratian", "sympy/matrices/tests/test_matrices.py::test_zero_dimension_multiply", "sympy/matrices/tests/test_matrices.py::test_slice_issue_2884", "sympy/matrices/tests/test_matrices.py::test_slice_issue_3401", "sympy/matrices/tests/test_matrices.py::test_copyin", "sympy/matrices/tests/test_matrices.py::test_invertible_check", "sympy/matrices/tests/test_matrices.py::test_issue_5964", "sympy/matrices/tests/test_matrices.py::test_issue_7604", "sympy/matrices/tests/test_matrices.py::test_is_Identity", "sympy/matrices/tests/test_matrices.py::test_dot", "sympy/matrices/tests/test_matrices.py::test_dual", "sympy/matrices/tests/test_matrices.py::test_anti_symmetric", "sympy/matrices/tests/test_matrices.py::test_normalize_sort_diogonalization", "sympy/matrices/tests/test_matrices.py::test_issue_5321", "sympy/matrices/tests/test_matrices.py::test_issue_5320", "sympy/matrices/tests/test_matrices.py::test_cross", "sympy/matrices/tests/test_matrices.py::test_hash", "sympy/matrices/tests/test_matrices.py::test_adjoint", "sympy/matrices/tests/test_matrices.py::test_simplify_immutable", "sympy/matrices/tests/test_matrices.py::test_rank", "sympy/matrices/tests/test_matrices.py::test_replace", "sympy/matrices/tests/test_matrices.py::test_replace_map", "sympy/matrices/tests/test_matrices.py::test_atoms", "sympy/matrices/tests/test_matrices.py::test_pinv", "sympy/matrices/tests/test_matrices.py::test_pinv_solve", "sympy/matrices/tests/test_matrices.py::test_gauss_jordan_solve", "sympy/matrices/tests/test_matrices.py::test_issue_7201", "sympy/matrices/tests/test_matrices.py::test_free_symbols", "sympy/matrices/tests/test_matrices.py::test_hermitian", "sympy/matrices/tests/test_matrices.py::test_doit", "sympy/matrices/tests/test_matrices.py::test_issue_9457_9467_9876", "sympy/matrices/tests/test_matrices.py::test_issue_9422", "sympy/matrices/tests/test_matrices.py::test_issue_10658" ]
[]
BSD
475
keenlabs__KeenClient-Python-91
add5f7f54acdc32577db690b682c4708a783c735
2016-03-14 23:31:52
142a9aad4e402985163dc472c6ae76b7e3e60bc4
diff --git a/README.md b/README.md index 47f5fcb..25f9da7 100644 --- a/README.md +++ b/README.md @@ -267,6 +267,9 @@ python setup.py tests ### Changelog +##### 0.3.21 ++ Fix bug with scoped key generation not working with newer Keen projects. + ##### 0.3.20 + Add `saved_queries` support + Add Python 3.4 support diff --git a/keen/Padding.py b/keen/Padding.py index 5615340..b213370 100644 --- a/keen/Padding.py +++ b/keen/Padding.py @@ -103,7 +103,10 @@ def appendCMSPadding(str, blocksize=AES_blocksize): def removeCMSPadding(str, blocksize=AES_blocksize): '''CMS padding: Remove padding with bytes containing the number of padding bytes ''' - pad_len = ord(str[-1]) # last byte contains number of padding bytes + try: + pad_len = ord(str[-1]) # last byte contains number of padding bytes + except TypeError: + pad_len = str[-1] assert pad_len <= blocksize, 'padding error' assert pad_len <= len(str), 'padding error' @@ -152,7 +155,10 @@ def appendZeroLenPadding(str, blocksize=AES_blocksize): def removeZeroLenPadding(str, blocksize=AES_blocksize): 'Remove Padding with zeroes + last byte equal to the number of padding bytes' - pad_len = ord(str[-1]) # last byte contains number of padding bytes + try: + pad_len = ord(str[-1]) # last byte contains number of padding bytes + except TypeError: + pad_len = str[-1] assert pad_len < blocksize, 'padding error' assert pad_len < len(str), 'padding error' diff --git a/keen/scoped_keys.py b/keen/scoped_keys.py index 4697c3b..e064c9b 100644 --- a/keen/scoped_keys.py +++ b/keen/scoped_keys.py @@ -8,22 +8,22 @@ from keen import Padding __author__ = 'dkador' # the block size for the cipher object; must be 16, 24, or 32 for AES -BLOCK_SIZE = 32 +OLD_BLOCK_SIZE = 32 -def _pad(s): +def pad_aes256(s): """ Pads an input string to a given block size. :param s: string :returns: The padded string. """ - if len(s) % BLOCK_SIZE == 0: + if len(s) % AES.block_size == 0: return s - return Padding.appendPadding(s, blocksize=BLOCK_SIZE) + return Padding.appendPadding(s, blocksize=AES.block_size) -def _unpad(s): +def unpad_aes256(s): """ Removes padding from an input string based on a given block size. :param s: string @@ -33,14 +33,96 @@ def _unpad(s): return s try: - return Padding.removePadding(s, blocksize=BLOCK_SIZE) + return Padding.removePadding(s, blocksize=AES.block_size) except AssertionError: # if there's an error while removing padding, just return s. return s -# encrypt with AES, encode with hex -def _encode_aes(key, plaintext): +def old_pad(s): + """ + Pads an input string to a given block size. + :param s: string + :returns: The padded string. + """ + if len(s) % OLD_BLOCK_SIZE == 0: + return s + + return Padding.appendPadding(s, blocksize=OLD_BLOCK_SIZE) + + +def old_unpad(s): + """ + Removes padding from an input string based on a given block size. + :param s: string + :returns: The unpadded string. + """ + if not s: + return s + + try: + return Padding.removePadding(s, blocksize=OLD_BLOCK_SIZE) + except AssertionError: + # if there's an error while removing padding, just return s. + return s + + +# encrypt with AES-256-CBC, encode with hex +def encode_aes256(key, plaintext): + """ + Utility method to encode some given plaintext with the given key. Important thing to note: + + This is not a general purpose encryption method - it has specific semantics (see below for + details). + + Takes the given hex string key and converts it to a 256 bit binary blob. Then pads the given + plaintext to AES block size which is always 16 bytes, regardless of AES key size. Then + encrypts using AES-256-CBC using a random IV. Then converts both the IV and the ciphertext + to hex. Finally returns the IV appended by the ciphertext. + + :param key: string, 64 hex chars long + :param plaintext: string, any amount of data + """ + if len(key) != 64: + raise TypeError("encode_aes256() expects a 256 bit key encoded as a 64 hex character string") + + # generate AES.block_size cryptographically secure random bytes for our IV (initial value) + iv = os.urandom(AES.block_size) + # set up an AES cipher object + cipher = AES.new(binascii.unhexlify(key.encode('ascii')), mode=AES.MODE_CBC, IV=iv) + # encrypt the plaintext after padding it + ciphertext = cipher.encrypt(pad_aes256(plaintext)) + # append the hexed IV and the hexed ciphertext + iv_plus_encrypted = binascii.hexlify(iv) + binascii.hexlify(ciphertext) + # return that + return iv_plus_encrypted + + +def decode_aes256(key, iv_plus_encrypted): + """ + Utility method to decode a payload consisting of the hexed IV + the hexed ciphertext using + the given key. See above for more details. + + :param key: string, 64 hex characters long + :param iv_plus_encrypted: string, a hexed IV + hexed ciphertext + """ + # grab first AES.block_size bytes (aka 2 * AES.block_size characters of hex) - that's the IV + iv_size = 2 * AES.block_size + hexed_iv = iv_plus_encrypted[:iv_size] + # grab everything else - that's the ciphertext (aka encrypted message) + hexed_ciphertext = iv_plus_encrypted[iv_size:] + # unhex the iv and ciphertext + iv = binascii.unhexlify(hexed_iv) + ciphertext = binascii.unhexlify(hexed_ciphertext) + # set up the correct AES cipher object + cipher = AES.new(binascii.unhexlify(key.encode('ascii')), mode=AES.MODE_CBC, IV=iv) + # decrypt! + plaintext = cipher.decrypt(ciphertext) + # return the unpadded version of this + return unpad_aes256(plaintext) + + +def old_encode_aes(key, plaintext): """ Utility method to encode some given plaintext with the given key. Important thing to note: @@ -57,16 +139,16 @@ def _encode_aes(key, plaintext): # generate 16 cryptographically secure random bytes for our IV (initial value) iv = os.urandom(16) # set up an AES cipher object - cipher = AES.new(_pad(key), mode=AES.MODE_CBC, IV=iv) + cipher = AES.new(old_pad(key), mode=AES.MODE_CBC, IV=iv) # encrypte the plaintext after padding it - ciphertext = cipher.encrypt(_pad(plaintext)) + ciphertext = cipher.encrypt(old_pad(plaintext)) # append the hexed IV and the hexed ciphertext iv_plus_encrypted = binascii.hexlify(iv) + binascii.hexlify(ciphertext) # return that return iv_plus_encrypted -def _decode_aes(key, iv_plus_encrypted): +def old_decode_aes(key, iv_plus_encrypted): """ Utility method to decode a payload consisting of the hexed IV + the hexed ciphertext using the given key. See above for more details. @@ -82,18 +164,27 @@ def _decode_aes(key, iv_plus_encrypted): iv = binascii.unhexlify(hexed_iv) ciphertext = binascii.unhexlify(hexed_ciphertext) # set up the correct AES cipher object - cipher = AES.new(_pad(key), mode=AES.MODE_CBC, IV=iv) + cipher = AES.new(old_pad(key), mode=AES.MODE_CBC, IV=iv) # decrypt! plaintext = cipher.decrypt(ciphertext) # return the unpadded version of this - return _unpad(plaintext) + return old_unpad(plaintext) def encrypt(api_key, options): options_string = json.dumps(options) - return _encode_aes(api_key, options_string) + if len(api_key) == 64: + return encode_aes256(api_key, options_string) + else: + return old_encode_aes(api_key, options_string) def decrypt(api_key, scoped_key): - json_string = _decode_aes(api_key, scoped_key) - return json.loads(json_string) \ No newline at end of file + if len(api_key) == 64: + json_string = decode_aes256(api_key, scoped_key) + else: + json_string = old_decode_aes(api_key, scoped_key) + try: + return json.loads(json_string) + except TypeError: + return json.loads(json_string.decode()) diff --git a/setup.py b/setup.py index 1aa6a03..79eeb52 100644 --- a/setup.py +++ b/setup.py @@ -28,7 +28,7 @@ if sys.version_info < (2, 7): setup( name="keen", - version="0.3.20", + version="0.3.21", description="Python Client for Keen IO", author="Keen IO", author_email="[email protected]",
New projects have 64-byte master keys, pycrypto supports only 16/24/32-byte AES Generating scoped keys is breaking on newer projects, as the master keys are now 64-byte. They seemed to have changed from 32-byte keys in the past few months. I created a project a month or two ago that has a 32-byte key, and year+ old projects have 32-byte keys. Only the newer one from a week or two ago has 64-byte. ``` python scoped_key = scoped_keys.encrypt(master_key, { "allowed_operations": ["read"], "filters": filters }) ``` Exception being thrown: `ValueError: AES key must be either 16, 24, or 32 bytes long` from pycrypto. My `keen` module is the lastest `0.3.20`, and `pycrypto` is the latest `2.6.1`.
keenlabs/KeenClient-Python
diff --git a/keen/tests/scoped_key_tests.py b/keen/tests/scoped_key_tests.py new file mode 100644 index 0000000..ff65c31 --- /dev/null +++ b/keen/tests/scoped_key_tests.py @@ -0,0 +1,42 @@ +from keen import scoped_keys +from keen.tests.base_test_case import BaseTestCase + + +class ScopedKeyTests(BaseTestCase): + api_key = "24077ACBCB198BAAA2110EDDB673282F8E34909FD823A15C55A6253A664BE368" + bad_api_key = "24077ACBCB198BAAA2110EDDB673282F8E34909FD823A15C55A6253A664BE369" + old_api_key = "ab428324dbdbcfe744" + old_bad_api_key = "badbadbadbad" + options = { + "filters": [{ + "property_name": "accountId", + "operator": "eq", + "property_value": "123456" + }] + } + + def test_scoped_key_encrypts_and_decrypts(self): + encrypted = scoped_keys.encrypt(self.api_key, self.options) + decrypted = scoped_keys.decrypt(self.api_key, encrypted) + self.assert_equal(decrypted, self.options) + + def test_scoped_key_fails_decryption_bad_key(self): + encrypted = scoped_keys.encrypt(self.api_key, self.options) + try: + scoped_keys.decrypt(self.bad_api_key, encrypted) + self.fail("shouldn't get here") + except ValueError as e: + self.assert_not_equal(e, None) + + def test_old_scoped_key_encrypts_and_decrypts(self): + encrypted = scoped_keys.encrypt(self.old_api_key, self.options) + decrypted = scoped_keys.decrypt(self.old_api_key, encrypted) + self.assert_equal(decrypted, self.options) + + def test_old_scoped_key_fails_decryption_on_bad_key(self): + encrypted = scoped_keys.encrypt(self.old_api_key, self.options) + try: + scoped_keys.decrypt(self.old_bad_api_key, encrypted) + self.fail("shouldn't get here") + except ValueError as e: + self.assert_not_equal(e, None)
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 0, "test_score": 0 }, "num_modified_files": 4 }
0.2
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "nose", "mock", "responses", "unittest2", "pytest" ], "pre_install": null, "python": "3.9", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
certifi==2025.1.31 charset-normalizer==3.4.1 exceptiongroup==1.2.2 idna==3.10 iniconfig==2.1.0 -e git+https://github.com/keenlabs/KeenClient-Python.git@add5f7f54acdc32577db690b682c4708a783c735#egg=keen linecache2==1.0.0 mock==5.2.0 nose==1.3.7 packaging==24.2 Padding==0.5 pluggy==1.5.0 pycrypto==2.6.1 pytest==8.3.5 PyYAML==6.0.2 requests==2.32.3 responses==0.25.7 six==1.17.0 tomli==2.2.1 traceback2==1.4.0 unittest2==1.1.0 urllib3==2.3.0
name: KeenClient-Python channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - argparse==1.4.0 - certifi==2025.1.31 - charset-normalizer==3.4.1 - exceptiongroup==1.2.2 - idna==3.10 - iniconfig==2.1.0 - linecache2==1.0.0 - mock==5.2.0 - nose==1.3.7 - packaging==24.2 - padding==0.5 - pluggy==1.5.0 - pycrypto==2.6.1 - pytest==8.3.5 - pyyaml==6.0.2 - requests==2.32.3 - responses==0.25.7 - six==1.17.0 - tomli==2.2.1 - traceback2==1.4.0 - unittest2==1.1.0 - urllib3==2.3.0 prefix: /opt/conda/envs/KeenClient-Python
[ "keen/tests/scoped_key_tests.py::ScopedKeyTests::test_old_scoped_key_encrypts_and_decrypts", "keen/tests/scoped_key_tests.py::ScopedKeyTests::test_old_scoped_key_fails_decryption_on_bad_key", "keen/tests/scoped_key_tests.py::ScopedKeyTests::test_scoped_key_encrypts_and_decrypts", "keen/tests/scoped_key_tests.py::ScopedKeyTests::test_scoped_key_fails_decryption_bad_key" ]
[]
[]
[]
MIT License
476
dask__dask-1051
cf5a6599fbd6269b67792842fc38e0b4dbbb778f
2016-03-15 02:57:04
7f398f2a00e23b105790b6aca4edc6816379089b
diff --git a/dask/array/core.py b/dask/array/core.py index d621540a0..d277652e7 100644 --- a/dask/array/core.py +++ b/dask/array/core.py @@ -1045,6 +1045,8 @@ class Array(Base): def astype(self, dtype, **kwargs): """ Copy of the array, cast to a specified type """ + if dtype == self._dtype: + return self name = tokenize('astype', self, dtype, kwargs) return elemwise(lambda x: x.astype(dtype, **kwargs), self, dtype=dtype, name=name) @@ -1798,6 +1800,13 @@ def concatenate(seq, axis=0): + seq[0].chunks[axis + 1:]) cum_dims = [0] + list(accumulate(add, [len(a.chunks[axis]) for a in seq])) + + if all(a._dtype is not None for a in seq): + dt = reduce(np.promote_types, [a._dtype for a in seq]) + seq = [x.astype(dt) for x in seq] + else: + dt = None + names = [a.name for a in seq] name = 'concatenate-' + tokenize(names, axis) @@ -1812,10 +1821,7 @@ def concatenate(seq, axis=0): dsk = dict(zip(keys, values)) dsk2 = merge(dsk, *[a.dask for a in seq]) - if all(a._dtype is not None for a in seq): - dt = reduce(np.promote_types, [a._dtype for a in seq]) - else: - dt = None + return Array(dsk2, name, chunks, dtype=dt) diff --git a/dask/dataframe/io.py b/dask/dataframe/io.py index f90419c38..3d2d37c4c 100644 --- a/dask/dataframe/io.py +++ b/dask/dataframe/io.py @@ -565,11 +565,11 @@ def from_dask_array(x, columns=None): >>> x = da.ones((4, 2), chunks=(2, 2)) >>> df = dd.io.from_dask_array(x, columns=['a', 'b']) >>> df.compute() - a b - 0 1 1 - 1 1 1 - 2 1 1 - 3 1 1 + a b + 0 1.0 1.0 + 1 1.0 1.0 + 2 1.0 1.0 + 3 1.0 1.0 """ dummy = _dummy_from_array(x, columns)
Bug (?) concatenating arrays of strings Hi, I've run across what I think is a small bug with concatenating dask arrays of strings in which the dtypes of the arrays to be concatenated are different: ``` In [106]: a = np.array(['CA-0', 'CA-1']) In [107]: b = np.array(['TX-0', 'TX-10', 'TX-101', 'TX-102']) In [108]: a = da.from_array(a, chunks=2) In [109]: b = da.from_array(b, chunks=4) In [110]: da.concatenate([a, b]).compute() Out[110]: array(['CA-0', 'CA-1', 'TX-0', 'TX-1', 'TX-1', 'TX-1'], dtype='|S4') In [111]: da.concatenate([b, a]).compute() Out[111]: array(['TX-0', 'TX-10', 'TX-101', 'TX-102', 'CA-0', 'CA-1'], dtype='|S6') ``` If the array with the "smaller" dtype (in this case, S4) is the first array in the sequence to be concatenated, then this "smaller" dtype is used for the end result, truncating the entries in the array with the "larger" dtype (in this case, S6). If the order of the arrays is swapped so that the array with the "larger" dtype comes first, then the concatenation works properly. It looks to me like the error occurs in the [dask.array.core.concatenate3](https://github.com/dask/dask/blob/master/dask/array/core.py#L2952) function where the dtype of the result is inferred from the first array in the sequence, rather than using the dtype computed in the [concatenate](https://github.com/dask/dask/blob/master/dask/array/core.py#L1748) function itself. Todd
dask/dask
diff --git a/dask/array/tests/test_array_core.py b/dask/array/tests/test_array_core.py index f0b15ea2b..447304c49 100644 --- a/dask/array/tests/test_array_core.py +++ b/dask/array/tests/test_array_core.py @@ -285,6 +285,17 @@ def test_concatenate(): assert raises(ValueError, lambda: concatenate([a, b, c], axis=2)) +def test_concatenate_fixlen_strings(): + x = np.array(['a', 'b', 'c']) + y = np.array(['aa', 'bb', 'cc']) + + a = da.from_array(x, chunks=(2,)) + b = da.from_array(y, chunks=(2,)) + + assert_eq(np.concatenate([x, y]), + da.concatenate([a, b])) + + def test_vstack(): x = np.arange(5) y = np.ones(5) @@ -967,6 +978,8 @@ def test_astype(): assert_eq(d.astype('i8'), x.astype('i8')) assert same_keys(d.astype('i8'), d.astype('i8')) + assert d.astype(d.dtype) is d + def test_arithmetic(): x = np.arange(5).astype('f4') + 2
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 0, "test_score": 0 }, "num_modified_files": 2 }
0.8
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[complete]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "numpy>=1.16.0 pandas>=1.0.0 cloudpickle partd distributed s3fs toolz psutil pytables bokeh bcolz scipy h5py ipython", "pip_packages": [ "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y graphviz liblzma-dev" ], "python": "3.5", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
aiobotocore @ file:///opt/conda/conda-bld/aiobotocore_1643638228694/work aiohttp @ file:///tmp/build/80754af9/aiohttp_1632748060317/work aioitertools @ file:///tmp/build/80754af9/aioitertools_1607109665762/work async-timeout==3.0.1 attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work backcall @ file:///home/ktietz/src/ci/backcall_1611930011877/work bcolz==1.2.1 bokeh @ file:///tmp/build/80754af9/bokeh_1620710048147/work botocore @ file:///opt/conda/conda-bld/botocore_1642672735464/work brotlipy==0.7.0 certifi==2021.5.30 cffi @ file:///tmp/build/80754af9/cffi_1625814693874/work chardet @ file:///tmp/build/80754af9/chardet_1607706739153/work click==8.0.3 cloudpickle @ file:///tmp/build/80754af9/cloudpickle_1632508026186/work contextvars==2.4 cryptography @ file:///tmp/build/80754af9/cryptography_1635366128178/work cytoolz==0.11.0 -e git+https://github.com/dask/dask.git@cf5a6599fbd6269b67792842fc38e0b4dbbb778f#egg=dask decorator @ file:///opt/conda/conda-bld/decorator_1643638310831/work distributed @ file:///tmp/build/80754af9/distributed_1615054599257/work fsspec @ file:///opt/conda/conda-bld/fsspec_1642510437511/work h5py==2.10.0 HeapDict @ file:///Users/ktietz/demo/mc3/conda-bld/heapdict_1630598515714/work idna @ file:///tmp/build/80754af9/idna_1637925883363/work idna-ssl @ file:///tmp/build/80754af9/idna_ssl_1611752490495/work immutables @ file:///tmp/build/80754af9/immutables_1628888996840/work importlib-metadata==4.8.3 iniconfig==1.1.1 ipython @ file:///tmp/build/80754af9/ipython_1593447367857/work ipython-genutils @ file:///tmp/build/80754af9/ipython_genutils_1606773439826/work jedi @ file:///tmp/build/80754af9/jedi_1606932572482/work Jinja2 @ file:///opt/conda/conda-bld/jinja2_1647436528585/work jmespath @ file:///Users/ktietz/demo/mc3/conda-bld/jmespath_1630583964805/work locket==0.2.1 MarkupSafe @ file:///tmp/build/80754af9/markupsafe_1621528150516/work mock @ file:///tmp/build/80754af9/mock_1607622725907/work msgpack @ file:///tmp/build/80754af9/msgpack-python_1612287171716/work multidict @ file:///tmp/build/80754af9/multidict_1607367768400/work numexpr @ file:///tmp/build/80754af9/numexpr_1618853194344/work numpy @ file:///tmp/build/80754af9/numpy_and_numpy_base_1603483703303/work olefile @ file:///Users/ktietz/demo/mc3/conda-bld/olefile_1629805411829/work packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work pandas==1.1.5 parso==0.7.0 partd @ file:///opt/conda/conda-bld/partd_1647245470509/work pexpect @ file:///tmp/build/80754af9/pexpect_1605563209008/work pickleshare @ file:///tmp/build/80754af9/pickleshare_1606932040724/work Pillow @ file:///tmp/build/80754af9/pillow_1625670622947/work pluggy==1.0.0 prompt-toolkit @ file:///tmp/build/80754af9/prompt-toolkit_1633440160888/work psutil @ file:///tmp/build/80754af9/psutil_1612297621795/work ptyprocess @ file:///tmp/build/80754af9/ptyprocess_1609355006118/work/dist/ptyprocess-0.7.0-py2.py3-none-any.whl py==1.11.0 pycparser @ file:///tmp/build/80754af9/pycparser_1636541352034/work Pygments @ file:///opt/conda/conda-bld/pygments_1644249106324/work pyOpenSSL @ file:///opt/conda/conda-bld/pyopenssl_1643788558760/work pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work PySocks @ file:///tmp/build/80754af9/pysocks_1605305763431/work pytest==7.0.1 python-dateutil @ file:///tmp/build/80754af9/python-dateutil_1626374649649/work pytz==2021.3 PyYAML==5.4.1 s3fs @ file:///opt/conda/conda-bld/s3fs_1643701468749/work scipy @ file:///tmp/build/80754af9/scipy_1597686635649/work six @ file:///tmp/build/80754af9/six_1644875935023/work sortedcontainers @ file:///tmp/build/80754af9/sortedcontainers_1623949099177/work tables==3.6.1 tblib @ file:///Users/ktietz/demo/mc3/conda-bld/tblib_1629402031467/work tomli==1.2.3 toolz @ file:///tmp/build/80754af9/toolz_1636545406491/work tornado @ file:///tmp/build/80754af9/tornado_1606942266872/work traitlets @ file:///tmp/build/80754af9/traitlets_1632746497744/work typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work urllib3 @ file:///opt/conda/conda-bld/urllib3_1643638302206/work wcwidth @ file:///Users/ktietz/demo/mc3/conda-bld/wcwidth_1629357192024/work wrapt==1.12.1 yarl @ file:///tmp/build/80754af9/yarl_1606939915466/work zict==2.0.0 zipp==3.6.0
name: dask channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - aiobotocore=2.1.0=pyhd3eb1b0_0 - aiohttp=3.7.4.post0=py36h7f8727e_2 - aioitertools=0.7.1=pyhd3eb1b0_0 - async-timeout=3.0.1=py36h06a4308_0 - attrs=21.4.0=pyhd3eb1b0_0 - backcall=0.2.0=pyhd3eb1b0_0 - bcolz=1.2.1=py36h04863e7_0 - blas=1.0=openblas - blosc=1.21.3=h6a678d5_0 - bokeh=2.3.2=py36h06a4308_0 - botocore=1.23.24=pyhd3eb1b0_0 - brotlipy=0.7.0=py36h27cfd23_1003 - bzip2=1.0.8=h5eee18b_6 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - cffi=1.14.6=py36h400218f_0 - chardet=4.0.0=py36h06a4308_1003 - click=8.0.3=pyhd3eb1b0_0 - cloudpickle=2.0.0=pyhd3eb1b0_0 - contextvars=2.4=py_0 - cryptography=35.0.0=py36hd23ed53_0 - cytoolz=0.11.0=py36h7b6447c_0 - decorator=5.1.1=pyhd3eb1b0_0 - distributed=2021.3.0=py36h06a4308_0 - freetype=2.12.1=h4a9f257_0 - fsspec=2022.1.0=pyhd3eb1b0_0 - giflib=5.2.2=h5eee18b_0 - h5py=2.10.0=py36h7918eee_0 - hdf5=1.10.4=hb1b8bf9_0 - heapdict=1.0.1=pyhd3eb1b0_0 - idna=3.3=pyhd3eb1b0_0 - idna_ssl=1.1.0=py36h06a4308_0 - immutables=0.16=py36h7f8727e_0 - ipython=7.16.1=py36h5ca1d4c_0 - ipython_genutils=0.2.0=pyhd3eb1b0_1 - jedi=0.17.2=py36h06a4308_1 - jinja2=3.0.3=pyhd3eb1b0_0 - jmespath=0.10.0=pyhd3eb1b0_0 - jpeg=9e=h5eee18b_3 - lcms2=2.16=hb9589c4_0 - ld_impl_linux-64=2.40=h12ee557_0 - lerc=4.0.0=h6a678d5_0 - libdeflate=1.22=h5eee18b_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgfortran-ng=7.5.0=ha8ba4b0_17 - libgfortran4=7.5.0=ha8ba4b0_17 - libgomp=11.2.0=h1234567_1 - libopenblas=0.3.18=hf726d26_0 - libpng=1.6.39=h5eee18b_0 - libstdcxx-ng=11.2.0=h1234567_1 - libtiff=4.5.1=hffd6297_1 - libwebp=1.2.4=h11a3e52_1 - libwebp-base=1.2.4=h5eee18b_1 - locket=0.2.1=py36h06a4308_1 - lz4-c=1.9.4=h6a678d5_1 - lzo=2.10=h7b6447c_2 - markupsafe=2.0.1=py36h27cfd23_0 - mock=4.0.3=pyhd3eb1b0_0 - msgpack-python=1.0.2=py36hff7bd54_1 - multidict=5.1.0=py36h27cfd23_2 - ncurses=6.4=h6a678d5_0 - numexpr=2.7.3=py36h4be448d_1 - numpy=1.19.2=py36h6163131_0 - numpy-base=1.19.2=py36h75fe3a5_0 - olefile=0.46=pyhd3eb1b0_0 - openssl=1.1.1w=h7f8727e_0 - packaging=21.3=pyhd3eb1b0_0 - pandas=1.1.5=py36ha9443f7_0 - parso=0.7.0=py_0 - partd=1.2.0=pyhd3eb1b0_1 - pexpect=4.8.0=pyhd3eb1b0_3 - pickleshare=0.7.5=pyhd3eb1b0_1003 - pillow=8.3.1=py36h5aabda8_0 - pip=21.2.2=py36h06a4308_0 - prompt-toolkit=3.0.20=pyhd3eb1b0_0 - psutil=5.8.0=py36h27cfd23_1 - ptyprocess=0.7.0=pyhd3eb1b0_2 - pycparser=2.21=pyhd3eb1b0_0 - pygments=2.11.2=pyhd3eb1b0_0 - pyopenssl=22.0.0=pyhd3eb1b0_0 - pyparsing=3.0.4=pyhd3eb1b0_0 - pysocks=1.7.1=py36h06a4308_0 - pytables=3.6.1=py36h71ec239_0 - python=3.6.13=h12debd9_1 - python-dateutil=2.8.2=pyhd3eb1b0_0 - pytz=2021.3=pyhd3eb1b0_0 - pyyaml=5.4.1=py36h27cfd23_1 - readline=8.2=h5eee18b_0 - s3fs=2022.1.0=pyhd3eb1b0_0 - scipy=1.5.2=py36habc2bb6_0 - setuptools=58.0.4=py36h06a4308_0 - six=1.16.0=pyhd3eb1b0_1 - sortedcontainers=2.4.0=pyhd3eb1b0_0 - sqlite=3.45.3=h5eee18b_0 - tblib=1.7.0=pyhd3eb1b0_0 - tk=8.6.14=h39e8969_0 - toolz=0.11.2=pyhd3eb1b0_0 - tornado=6.1=py36h27cfd23_0 - traitlets=4.3.3=py36h06a4308_0 - typing-extensions=4.1.1=hd3eb1b0_0 - typing_extensions=4.1.1=pyh06a4308_0 - urllib3=1.26.8=pyhd3eb1b0_0 - wcwidth=0.2.5=pyhd3eb1b0_0 - wheel=0.37.1=pyhd3eb1b0_0 - wrapt=1.12.1=py36h7b6447c_1 - xz=5.6.4=h5eee18b_1 - yaml=0.2.5=h7b6447c_0 - yarl=1.6.3=py36h27cfd23_0 - zict=2.0.0=pyhd3eb1b0_0 - zlib=1.2.13=h5eee18b_1 - zstd=1.5.6=hc292b87_0 - pip: - importlib-metadata==4.8.3 - iniconfig==1.1.1 - pluggy==1.0.0 - py==1.11.0 - pytest==7.0.1 - tomli==1.2.3 - zipp==3.6.0 prefix: /opt/conda/envs/dask
[ "dask/array/tests/test_array_core.py::test_concatenate_fixlen_strings", "dask/array/tests/test_array_core.py::test_astype" ]
[ "dask/array/tests/test_array_core.py::test_field_access", "dask/array/tests/test_array_core.py::test_coarsen", "dask/array/tests/test_array_core.py::test_coarsen_with_excess" ]
[ "dask/array/tests/test_array_core.py::test_getem", "dask/array/tests/test_array_core.py::test_top", "dask/array/tests/test_array_core.py::test_top_supports_broadcasting_rules", "dask/array/tests/test_array_core.py::test_concatenate3", "dask/array/tests/test_array_core.py::test_concatenate3_on_scalars", "dask/array/tests/test_array_core.py::test_chunked_dot_product", "dask/array/tests/test_array_core.py::test_chunked_transpose_plus_one", "dask/array/tests/test_array_core.py::test_transpose", "dask/array/tests/test_array_core.py::test_broadcast_dimensions_works_with_singleton_dimensions", "dask/array/tests/test_array_core.py::test_broadcast_dimensions", "dask/array/tests/test_array_core.py::test_Array", "dask/array/tests/test_array_core.py::test_uneven_chunks", "dask/array/tests/test_array_core.py::test_numblocks_suppoorts_singleton_block_dims", "dask/array/tests/test_array_core.py::test_keys", "dask/array/tests/test_array_core.py::test_Array_computation", "dask/array/tests/test_array_core.py::test_stack", "dask/array/tests/test_array_core.py::test_short_stack", "dask/array/tests/test_array_core.py::test_stack_scalars", "dask/array/tests/test_array_core.py::test_concatenate", "dask/array/tests/test_array_core.py::test_vstack", "dask/array/tests/test_array_core.py::test_hstack", "dask/array/tests/test_array_core.py::test_dstack", "dask/array/tests/test_array_core.py::test_take", "dask/array/tests/test_array_core.py::test_compress", "dask/array/tests/test_array_core.py::test_binops", "dask/array/tests/test_array_core.py::test_isnull", "dask/array/tests/test_array_core.py::test_isclose", "dask/array/tests/test_array_core.py::test_broadcast_shapes", "dask/array/tests/test_array_core.py::test_elemwise_on_scalars", "dask/array/tests/test_array_core.py::test_partial_by_order", "dask/array/tests/test_array_core.py::test_elemwise_with_ndarrays", "dask/array/tests/test_array_core.py::test_elemwise_differently_chunked", "dask/array/tests/test_array_core.py::test_operators", "dask/array/tests/test_array_core.py::test_operator_dtype_promotion", "dask/array/tests/test_array_core.py::test_tensordot", "dask/array/tests/test_array_core.py::test_dot_method", "dask/array/tests/test_array_core.py::test_T", "dask/array/tests/test_array_core.py::test_norm", "dask/array/tests/test_array_core.py::test_choose", "dask/array/tests/test_array_core.py::test_where", "dask/array/tests/test_array_core.py::test_where_has_informative_error", "dask/array/tests/test_array_core.py::test_insert", "dask/array/tests/test_array_core.py::test_multi_insert", "dask/array/tests/test_array_core.py::test_broadcast_to", "dask/array/tests/test_array_core.py::test_ravel", "dask/array/tests/test_array_core.py::test_unravel", "dask/array/tests/test_array_core.py::test_reshape", "dask/array/tests/test_array_core.py::test_reshape_unknown_dimensions", "dask/array/tests/test_array_core.py::test_full", "dask/array/tests/test_array_core.py::test_map_blocks", "dask/array/tests/test_array_core.py::test_map_blocks2", "dask/array/tests/test_array_core.py::test_map_blocks_with_constants", "dask/array/tests/test_array_core.py::test_map_blocks_with_kwargs", "dask/array/tests/test_array_core.py::test_fromfunction", "dask/array/tests/test_array_core.py::test_from_function_requires_block_args", "dask/array/tests/test_array_core.py::test_repr", "dask/array/tests/test_array_core.py::test_slicing_with_ellipsis", "dask/array/tests/test_array_core.py::test_slicing_with_ndarray", "dask/array/tests/test_array_core.py::test_dtype", "dask/array/tests/test_array_core.py::test_blockdims_from_blockshape", "dask/array/tests/test_array_core.py::test_coerce", "dask/array/tests/test_array_core.py::test_store", "dask/array/tests/test_array_core.py::test_to_hdf5", "dask/array/tests/test_array_core.py::test_np_array_with_zero_dimensions", "dask/array/tests/test_array_core.py::test_unique", "dask/array/tests/test_array_core.py::test_dtype_complex", "dask/array/tests/test_array_core.py::test_arithmetic", "dask/array/tests/test_array_core.py::test_elemwise_consistent_names", "dask/array/tests/test_array_core.py::test_optimize", "dask/array/tests/test_array_core.py::test_slicing_with_non_ndarrays", "dask/array/tests/test_array_core.py::test_getarray", "dask/array/tests/test_array_core.py::test_squeeze", "dask/array/tests/test_array_core.py::test_size", "dask/array/tests/test_array_core.py::test_nbytes", "dask/array/tests/test_array_core.py::test_Array_normalizes_dtype", "dask/array/tests/test_array_core.py::test_args", "dask/array/tests/test_array_core.py::test_from_array_with_lock", "dask/array/tests/test_array_core.py::test_from_func", "dask/array/tests/test_array_core.py::test_topk", "dask/array/tests/test_array_core.py::test_topk_k_bigger_than_chunk", "dask/array/tests/test_array_core.py::test_bincount", "dask/array/tests/test_array_core.py::test_bincount_with_weights", "dask/array/tests/test_array_core.py::test_bincount_raises_informative_error_on_missing_minlength_kwarg", "dask/array/tests/test_array_core.py::test_histogram", "dask/array/tests/test_array_core.py::test_histogram_alternative_bins_range", "dask/array/tests/test_array_core.py::test_histogram_return_type", "dask/array/tests/test_array_core.py::test_histogram_extra_args_and_shapes", "dask/array/tests/test_array_core.py::test_map_blocks3", "dask/array/tests/test_array_core.py::test_from_array_with_missing_chunks", "dask/array/tests/test_array_core.py::test_cache", "dask/array/tests/test_array_core.py::test_take_dask_from_numpy", "dask/array/tests/test_array_core.py::test_normalize_chunks", "dask/array/tests/test_array_core.py::test_raise_on_no_chunks", "dask/array/tests/test_array_core.py::test_chunks_is_immutable", "dask/array/tests/test_array_core.py::test_raise_on_bad_kwargs", "dask/array/tests/test_array_core.py::test_long_slice", "dask/array/tests/test_array_core.py::test_h5py_newaxis", "dask/array/tests/test_array_core.py::test_ellipsis_slicing", "dask/array/tests/test_array_core.py::test_point_slicing", "dask/array/tests/test_array_core.py::test_point_slicing_with_full_slice", "dask/array/tests/test_array_core.py::test_slice_with_floats", "dask/array/tests/test_array_core.py::test_vindex_errors", "dask/array/tests/test_array_core.py::test_vindex_merge", "dask/array/tests/test_array_core.py::test_empty_array", "dask/array/tests/test_array_core.py::test_array", "dask/array/tests/test_array_core.py::test_cov", "dask/array/tests/test_array_core.py::test_corrcoef", "dask/array/tests/test_array_core.py::test_memmap", "dask/array/tests/test_array_core.py::test_to_npy_stack", "dask/array/tests/test_array_core.py::test_view", "dask/array/tests/test_array_core.py::test_view_fortran", "dask/array/tests/test_array_core.py::test_h5py_tokenize", "dask/array/tests/test_array_core.py::test_map_blocks_with_changed_dimension", "dask/array/tests/test_array_core.py::test_broadcast_chunks", "dask/array/tests/test_array_core.py::test_chunks_error", "dask/array/tests/test_array_core.py::test_array_compute_forward_kwargs", "dask/array/tests/test_array_core.py::test_dont_fuse_outputs", "dask/array/tests/test_array_core.py::test_dont_dealias_outputs", "dask/array/tests/test_array_core.py::test_timedelta_op", "dask/array/tests/test_array_core.py::test_to_imperative", "dask/array/tests/test_array_core.py::test_cumulative", "dask/array/tests/test_array_core.py::test_eye", "dask/array/tests/test_array_core.py::test_diag", "dask/array/tests/test_array_core.py::test_tril_triu", "dask/array/tests/test_array_core.py::test_tril_triu_errors" ]
[]
BSD 3-Clause "New" or "Revised" License
477
rackerlabs__lambda-uploader-69
d4b3a4a582baf41325e0551e987a9d6ab6a0b02d
2016-03-15 18:40:55
a03a2743a7b668930f12d63da81a944af7e6cff3
diff --git a/lambda_uploader/package.py b/lambda_uploader/package.py index 5e22d83..6a0fe87 100644 --- a/lambda_uploader/package.py +++ b/lambda_uploader/package.py @@ -228,12 +228,11 @@ class Package(object): for p in self._extra_files: LOG.info('Copying extra %s into package' % p) + ignore += ["%s" % p] if os.path.isdir(p): - utils.copy_tree(p, package) - ignore += ["^%s/*" % p] + utils.copy_tree(p, package, include_parent=True) else: shutil.copy(p, package) - ignore += ["%s" % p] # Append the temp workspace to the ignore list: ignore += ["^%s/*" % TEMP_WORKSPACE_NAME] diff --git a/lambda_uploader/utils.py b/lambda_uploader/utils.py index 2a3c841..e7664b6 100644 --- a/lambda_uploader/utils.py +++ b/lambda_uploader/utils.py @@ -21,8 +21,18 @@ import re LOG = logging.getLogger(__name__) -def copy_tree(src, dest, ignore=[]): +def copy_tree(src, dest, ignore=[], include_parent=False): + if os.path.isfile(src): + raise Exception('Cannot use copy_tree with a file as the src') + LOG.info('Copying source files') + if include_parent: + # if src is foo, make dest/foo and copy files there + nested_dest = os.path.join(dest, os.path.basename(src)) + os.makedirs(nested_dest) + else: + nested_dest = dest + # Re-create directory structure for root, _, files in os.walk(src): for filename in files: @@ -33,7 +43,7 @@ def copy_tree(src, dest, ignore=[]): sub_dirs = os.path.dirname(os.path.relpath(path, start=src)) - pkg_path = os.path.join(dest, sub_dirs) + pkg_path = os.path.join(nested_dest, sub_dirs) if not os.path.isdir(pkg_path): os.makedirs(pkg_path)
Easy sharing of code between lambda functions Is there a way to share python code between lambdas, without having to create an additional package to install via requirements? e.g., in the Serverless framework, there's the concept of a "lib" folder which can contain modules shared between multiple lambdas.
rackerlabs/lambda-uploader
diff --git a/test/test_package.py b/test/test_package.py index 5de87eb..104bed9 100644 --- a/test/test_package.py +++ b/test/test_package.py @@ -120,7 +120,7 @@ def test_package_with_extras(): assert path.isfile(expected_extra_file1) # test a recursive directory - expected_extra_file2 = path.join(PACKAGE_TEMP_DIR, 'foo/__init__.py') + expected_extra_file2 = path.join(PACKAGE_TEMP_DIR, 'extra/foo/__init__.py') assert path.isfile(expected_extra_file2)
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 1, "test_score": 2 }, "num_modified_files": 2 }
0.5
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "cython", "distro", "pytest-cov", "pytest-xdist", "pytest-mock", "pytest-asyncio", "pytest-bdd", "pytest-benchmark", "pytest-randomly", "responses", "mock", "hypothesis", "freezegun", "trustme", "requests-mock", "requests", "tomlkit" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.9", "reqs_path": [ "requirements/base.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==25.3.0 boto3==1.2.2 botocore==1.3.30 certifi==2025.1.31 cffi==1.17.1 charset-normalizer==3.4.1 coverage==7.8.0 cryptography==44.0.2 Cython==3.0.12 distlib==0.3.9 distro==1.9.0 docutils==0.21.2 exceptiongroup==1.2.2 execnet==2.1.1 filelock==3.18.0 freezegun==1.5.1 gherkin-official==29.0.0 hypothesis==6.130.5 idna==3.10 importlib_metadata==8.6.1 iniconfig==2.1.0 jmespath==0.10.0 -e git+https://github.com/rackerlabs/lambda-uploader.git@d4b3a4a582baf41325e0551e987a9d6ab6a0b02d#egg=lambda_uploader Mako==1.3.9 MarkupSafe==3.0.2 mock==5.2.0 packaging==24.2 parse==1.20.2 parse_type==0.6.4 platformdirs==4.3.7 pluggy==1.5.0 py-cpuinfo==9.0.0 pycparser==2.22 pytest==8.3.5 pytest-asyncio==0.26.0 pytest-bdd==8.1.0 pytest-benchmark==5.1.0 pytest-cov==6.0.0 pytest-mock==3.14.0 pytest-randomly==3.16.0 pytest-xdist==3.6.1 python-dateutil==2.9.0.post0 PyYAML==6.0.2 requests==2.32.3 requests-mock==1.12.1 responses==0.25.7 six==1.17.0 sortedcontainers==2.4.0 tomli==2.2.1 tomlkit==0.13.2 trustme==1.2.1 typing_extensions==4.13.0 urllib3==2.3.0 virtualenv==20.29.3 zipp==3.21.0
name: lambda-uploader channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==25.3.0 - boto3==1.2.2 - botocore==1.3.30 - certifi==2025.1.31 - cffi==1.17.1 - charset-normalizer==3.4.1 - coverage==7.8.0 - cryptography==44.0.2 - cython==3.0.12 - distlib==0.3.9 - distro==1.9.0 - docutils==0.21.2 - exceptiongroup==1.2.2 - execnet==2.1.1 - filelock==3.18.0 - freezegun==1.5.1 - gherkin-official==29.0.0 - hypothesis==6.130.5 - idna==3.10 - importlib-metadata==8.6.1 - iniconfig==2.1.0 - jmespath==0.10.0 - mako==1.3.9 - markupsafe==3.0.2 - mock==5.2.0 - packaging==24.2 - parse==1.20.2 - parse-type==0.6.4 - platformdirs==4.3.7 - pluggy==1.5.0 - py-cpuinfo==9.0.0 - pycparser==2.22 - pytest==8.3.5 - pytest-asyncio==0.26.0 - pytest-bdd==8.1.0 - pytest-benchmark==5.1.0 - pytest-cov==6.0.0 - pytest-mock==3.14.0 - pytest-randomly==3.16.0 - pytest-xdist==3.6.1 - python-dateutil==2.9.0.post0 - pyyaml==6.0.2 - requests==2.32.3 - requests-mock==1.12.1 - responses==0.25.7 - six==1.17.0 - sortedcontainers==2.4.0 - tomli==2.2.1 - tomlkit==0.13.2 - trustme==1.2.1 - typing-extensions==4.13.0 - urllib3==2.3.0 - virtualenv==20.29.3 - zipp==3.21.0 prefix: /opt/conda/envs/lambda-uploader
[ "test/test_package.py::test_package_with_extras" ]
[ "test/test_package.py::test_install_requirements" ]
[ "test/test_package.py::test_bad_existing_virtualenv", "test/test_package.py::test_package", "test/test_package.py::test_default_virtualenv", "test/test_package.py::test_prepare_workspace", "test/test_package.py::test_package_zip_location", "test/test_package.py::test_omit_virtualenv", "test/test_package.py::test_package_name", "test/test_package.py::test_existing_virtualenv" ]
[ "test/test_package.py::test_package_clean_workspace" ]
Apache License 2.0
478
moogar0880__PyTrakt-54
f574c1c1dfc6f65f21296184659aadc2879f2be6
2016-03-20 01:02:25
f574c1c1dfc6f65f21296184659aadc2879f2be6
diff --git a/.landscape.yaml b/.landscape.yaml new file mode 100644 index 0000000..95d8d32 --- /dev/null +++ b/.landscape.yaml @@ -0,0 +1,2 @@ +doc-warnings: yes +strictness: veryhigh diff --git a/.travis.yml b/.travis.yml index 0b1b7a0..18823c2 100644 --- a/.travis.yml +++ b/.travis.yml @@ -5,5 +5,4 @@ python: - 3.4 - 3.5 script: - - make travis -after_success: coveralls + - make ci diff --git a/HISTORY.rst b/HISTORY.rst index 9ff86a6..0295563 100644 --- a/HISTORY.rst +++ b/HISTORY.rst @@ -1,5 +1,10 @@ Release History ^^^^^^^^^^^^^^^ +2.4.4 (2016-03-19) +++++++++++++++++++ + +* Update `slugify` function to better match trakt slugs (#51) + 2.4.3 (2016-03-12) ++++++++++++++++++ diff --git a/Makefile b/Makefile index 5ab43b9..8379202 100644 --- a/Makefile +++ b/Makefile @@ -14,11 +14,6 @@ coverage: ci: init style test -coveralls: - pip install coveralls - -travis: coveralls ci - publish: python setup.py register python setup.py sdist upload diff --git a/README.rst b/README.rst index 6b953ad..598a0ad 100644 --- a/README.rst +++ b/README.rst @@ -4,9 +4,9 @@ PyTrakt :target: https://travis-ci.org/moogar0880/PyTrakt :alt: Travis CI Status -.. image:: https://coveralls.io/repos/moogar0880/PyTrakt/badge.svg - :target: https://coveralls.io/r/moogar0880/PyTrakt - :alt: Coverage +.. image:: https://landscape.io/github/moogar0880/PyTrakt/master/landscape.svg?style=flat + :target: https://landscape.io/github/moogar0880/PyTrakt/master + :alt: Code Health .. image:: https://pypip.in/d/trakt/badge.svg?style=flat :target: https://pypi.python.org/pypi/trakt/ diff --git a/trakt/__init__.py b/trakt/__init__.py index c53769b..4980572 100644 --- a/trakt/__init__.py +++ b/trakt/__init__.py @@ -5,6 +5,6 @@ try: except ImportError: pass -version_info = (2, 4, 3) +version_info = (2, 4, 4) __author__ = 'Jon Nappi' __version__ = '.'.join([str(i) for i in version_info]) diff --git a/trakt/utils.py b/trakt/utils.py index d05b755..6b79fec 100644 --- a/trakt/utils.py +++ b/trakt/utils.py @@ -14,12 +14,11 @@ def slugify(value): Adapted from django.utils.text.slugify """ - if sys.version_info[0] == 2: - value = unicode(value) # NOQA - value = unicodedata.normalize('NFKD', - value).encode('ascii', - 'ignore').decode('ascii') - value = re.sub('[^\w\s-]', '', value).strip().lower() + if sys.version_info[0] == 2 and isinstance(value, str): + value = unicode(value, 'utf-8') # NOQA + nfkd_form = unicodedata.normalize('NFKD', value) + decoded = nfkd_form.encode('ascii', 'ignore').decode('utf-8') + value = re.sub('[^\w\s-]', ' ', decoded).strip().lower() return re.sub('[-\s]+', '-', value)
Slugify bug? There appears to be some differences in how your slugify function works in utils.py compared to the trakt slugs. Are they supposed to match? Without them matching the _get functions in movies.py and tv.py class inits are prone to error as these rely on the slug to pull the information. Here's an example. title: Marvel's Agents of S.H.I.E.L.D. pytrakt slug: marvels-agents-of-shield trakt slug: marvel-s-agents-of-s-h-i-e-l-d Also, I noticed that in movie.py your joining the year (if passed) to the slug but you don't have the same logic in tv.py. Is there a reason to not include that? TV Shows with the same title also use the year in the slug to differentiate the shows.
moogar0880/PyTrakt
diff --git a/testing-requirements.txt b/testing-requirements.txt index 35d8447..4c50c00 100644 --- a/testing-requirements.txt +++ b/testing-requirements.txt @@ -2,4 +2,3 @@ flake8 pytest pytest-cov -coveralls diff --git a/tests/test_utils.py b/tests/test_utils.py index 6b75c21..78b1af4 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -11,6 +11,8 @@ def test_slugify(): ('IM AN ALL CAPS STRING', 'im-an-all-caps-string'), ('IM A BAD A$$ STRING!@', 'im-a-bad-a-string'), (' LOOK AT MY WHITESPACE ', 'look-at-my-whitespace'), + ("Marvel's Agents of S.H.I.E.L.D.", 'marvel-s-agents-of-s-h-i-e-l-d'), + ('Naruto Shippūden', 'naruto-shippuden'), ] for inp, expected in test_data:
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_added_files", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 1, "test_score": 1 }, "num_modified_files": 6 }
2.4
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "pytest-cov", "coveralls" ], "pre_install": null, "python": "3.9", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
certifi==2025.1.31 charset-normalizer==3.4.1 coverage==7.8.0 coveralls==4.0.1 docopt==0.6.2 exceptiongroup==1.2.2 idna==3.10 iniconfig==2.1.0 oauthlib==3.2.2 packaging==24.2 pluggy==1.5.0 pytest==8.3.5 pytest-cov==6.0.0 requests==2.32.3 requests-oauthlib==2.0.0 tomli==2.2.1 -e git+https://github.com/moogar0880/PyTrakt.git@f574c1c1dfc6f65f21296184659aadc2879f2be6#egg=trakt urllib3==2.3.0
name: PyTrakt channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - certifi==2025.1.31 - charset-normalizer==3.4.1 - coverage==7.8.0 - coveralls==4.0.1 - docopt==0.6.2 - exceptiongroup==1.2.2 - idna==3.10 - iniconfig==2.1.0 - oauthlib==3.2.2 - packaging==24.2 - pluggy==1.5.0 - pytest==8.3.5 - pytest-cov==6.0.0 - requests==2.32.3 - requests-oauthlib==2.0.0 - tomli==2.2.1 - urllib3==2.3.0 prefix: /opt/conda/envs/PyTrakt
[ "tests/test_utils.py::test_slugify" ]
[]
[ "tests/test_utils.py::test_airs_date", "tests/test_utils.py::test_now", "tests/test_utils.py::test_timestamp", "tests/test_utils.py::test_extract_ids" ]
[]
Apache License 2.0
479
sympy__sympy-10879
674f13344242236cf37c0bf4cc3ec17d68a1ae51
2016-03-21 13:13:55
8bb5814067cfa0348fb8b708848f35dba2b55ff4
smichr: @jksuom @asmeurer , I think I got the problem fixed. I'm still a bilt puzzled by what I was seeing since that was an old change that I don't think is represented anywhere in the current PR. Perhaps the force update and online editing of this PR caused a glitch. smichr: @jksuom , you were right. The behavior you were seeing was consistent with the behavior in master but was wrong. While f(x) and f(-x) will cover the same integers, f(x) and -f(x) will not. I now only make two simplifications (which are also a bit more robust).
diff --git a/sympy/core/operations.py b/sympy/core/operations.py index a0869f8459..85037bbef7 100644 --- a/sympy/core/operations.py +++ b/sympy/core/operations.py @@ -367,7 +367,7 @@ def make_args(cls, expr): if isinstance(expr, cls): return expr.args else: - return (expr,) + return (sympify(expr),) class ShortCircuit(Exception): @@ -443,23 +443,12 @@ def _new_args_filter(cls, arg_sequence, call_cls=None): @classmethod def make_args(cls, expr): """ - Return a sequence of elements `args` such that cls(*args) == expr - - >>> from sympy import Symbol, Mul, Add - >>> x, y = map(Symbol, 'xy') - - >>> Mul.make_args(x*y) - (x, y) - >>> Add.make_args(x*y) - (x*y,) - >>> set(Add.make_args(x*y + y)) == set([y, x*y]) - True - + Return a set of args such that cls(*arg_set) == expr. """ if isinstance(expr, cls): return expr._argset else: - return frozenset([expr]) + return frozenset([sympify(expr)]) @property @cacheit diff --git a/sympy/polys/polytools.py b/sympy/polys/polytools.py index b15983594c..e05c0f90e9 100644 --- a/sympy/polys/polytools.py +++ b/sympy/polys/polytools.py @@ -5622,12 +5622,12 @@ def _sorted_factors(factors, method): def key(obj): poly, exp = obj rep = poly.rep.rep - return (exp, len(rep), rep) + return (exp, len(rep), len(poly.gens), rep) else: def key(obj): poly, exp = obj rep = poly.rep.rep - return (len(rep), exp, rep) + return (len(rep), len(poly.gens), exp, rep) return sorted(factors, key=key) diff --git a/sympy/sets/fancysets.py b/sympy/sets/fancysets.py index 7e1d2bb970..d819ea4f59 100644 --- a/sympy/sets/fancysets.py +++ b/sympy/sets/fancysets.py @@ -1,14 +1,16 @@ from __future__ import print_function, division from sympy.logic.boolalg import And +from sympy.core.add import Add from sympy.core.basic import Basic from sympy.core.compatibility import as_int, with_metaclass, range, PY3 -from sympy.sets.sets import (Set, Interval, Intersection, EmptySet, Union, - FiniteSet, imageset) -from sympy.core.sympify import _sympify, sympify, converter +from sympy.core.expr import Expr +from sympy.core.function import Lambda, _coeff_isneg from sympy.core.singleton import Singleton, S from sympy.core.symbol import Dummy, symbols, Wild -from sympy.core.function import Lambda +from sympy.core.sympify import _sympify, sympify, converter +from sympy.sets.sets import (Set, Interval, Intersection, EmptySet, Union, + FiniteSet, imageset) from sympy.utilities.misc import filldedent, func_name @@ -154,22 +156,32 @@ def _boundary(self): def _eval_imageset(self, f): expr = f.expr - if len(f.variables) > 1: + if not isinstance(expr, Expr): return - n = f.variables[0] - a = Wild('a') - b = Wild('b') + if len(f.variables) > 1: + return - match = expr.match(a*n + b) - if match[a].is_negative: - expr = -expr + n = f.variables[0] + # f(x) + c and f(-x) + c cover the same integers + # so choose the form that has the fewest negatives + c = f(0) + fx = f(n) - c + f_x = f(-n) - c + neg_count = lambda e: sum(_coeff_isneg(_) for _ in Add.make_args(e)) + if neg_count(f_x) < neg_count(fx): + expr = f_x + c + + a = Wild('a', exclude=[n]) + b = Wild('b', exclude=[n]) match = expr.match(a*n + b) - if match[a] is S.One and match[b].is_integer: - expr = expr - match[b] + if match and match[a]: + # canonical shift + expr = match[a]*n + match[b] % match[a] - return ImageSet(Lambda(n, expr), S.Integers) + if expr != f.expr: + return ImageSet(Lambda(n, expr), S.Integers) class Reals(with_metaclass(Singleton, Interval)): @@ -253,17 +265,26 @@ def _is_multivariate(self): def _contains(self, other): from sympy.matrices import Matrix from sympy.solvers.solveset import solveset, linsolve - from sympy.utilities.iterables import iterable, cartes + from sympy.utilities.iterables import is_sequence, iterable, cartes L = self.lamda + if is_sequence(other): + if not is_sequence(L.expr): + return S.false + if len(L.expr) != len(other): + raise ValueError(filldedent(''' + Dimensions of other and output of Lambda are different.''')) + elif iterable(other): + raise ValueError(filldedent(''' + `other` should be an ordered object like a Tuple.''')) + + solns = None if self._is_multivariate(): - if not iterable(L.expr): - if iterable(other): - return S.false + if not is_sequence(L.expr): + # exprs -> (numer, denom) and check again + # XXX this is a bad idea -- make the user + # remap self to desired form return other.as_numer_denom() in self.func( Lambda(L.variables, L.expr.as_numer_denom()), self.base_set) - if len(L.expr) != len(self.lamda.variables): - raise NotImplementedError(filldedent(''' - Dimensions of input and output of Lambda are different.''')) eqs = [expr - val for val, expr in zip(other, L.expr)] variables = L.variables free = set(variables) @@ -292,14 +313,35 @@ def _contains(self, other): raise NotImplementedError solns = cartes(*[solns[s] for s in variables]) else: - # assume scalar -> scalar mapping - solnsSet = solveset(L.expr - other, L.variables[0]) - if solnsSet.is_FiniteSet: - solns = list(solnsSet) + x = L.variables[0] + if isinstance(L.expr, Expr): + # scalar -> scalar mapping + solnsSet = solveset(L.expr - other, x) + if solnsSet.is_FiniteSet: + solns = list(solnsSet) + else: + msgset = solnsSet else: - raise NotImplementedError(filldedent(''' - Determining whether an ImageSet contains %s has not - been implemented.''' % func_name(other))) + # scalar -> vector + for e, o in zip(L.expr, other): + solns = solveset(e - o, x) + if solns is S.EmptySet: + return S.false + for soln in solns: + try: + if soln in self.base_set: + break # check next pair + except TypeError: + if self.base_set.contains(soln.evalf()): + break + else: + return S.false # never broke so there was no True + return S.true + + if solns is None: + raise NotImplementedError(filldedent(''' + Determining whether %s contains %s has not + been implemented.''' % (msgset, other))) for soln in solns: try: if soln in self.base_set: @@ -315,10 +357,15 @@ def is_iterable(self): def _intersect(self, other): from sympy.solvers.diophantine import diophantine if self.base_set is S.Integers: + g = None if isinstance(other, ImageSet) and other.base_set is S.Integers: - f, g = self.lamda.expr, other.lamda.expr - n, m = self.lamda.variables[0], other.lamda.variables[0] - + g = other.lamda.expr + m = other.lamda.variables[0] + elif other is S.Integers: + m = g = Dummy('x') + if g is not None: + f = self.lamda.expr + n = self.lamda.variables[0] # Diophantine sorts the solutions according to the alphabetic # order of the variable names, since the result should not depend # on the variable name, they are replaced by the dummy variables @@ -329,13 +376,14 @@ def _intersect(self, other): if solns_set == set(): return EmptySet() solns = list(diophantine(f - g)) - if len(solns) == 1: - t = list(solns[0][0].free_symbols)[0] - else: - return None - # since 'a' < 'b' - return imageset(Lambda(t, f.subs(a, solns[0][0])), S.Integers) + if len(solns) != 1: + return + + # since 'a' < 'b', select soln for n + nsol = solns[0][0] + t = nsol.free_symbols.pop() + return imageset(Lambda(n, f.subs(a, nsol.subs(t, n))), S.Integers) if other == S.Reals: from sympy.solvers.solveset import solveset_real @@ -504,39 +552,17 @@ def _intersect(self, other): if not all(i.is_number for i in other.args[:2]): return - o = other.intersect(Interval(self.inf, self.sup)) - if o is S.EmptySet: - return o - - # get inf/sup and handle below - if isinstance(o, FiniteSet): - assert len(o) == 1 - inf = sup = list(o)[0] - else: - assert isinstance(o, Interval) - sup = o.sup - inf = o.inf - - # get onto sequence - step = abs(self.step) - ref = self.start if self.start.is_finite else self.stop - a = ref + ceiling((inf - ref)/step)*step - if a not in other: - a += step - b = ref + floor((sup - ref)/step)*step - if b not in other: - b -= step - if self.step < 0: - a, b = b, a - # make sure to include end point - b += self.step - - rv = Range(a, b, self.step) - if not rv: - return S.EmptySet - return rv - - elif isinstance(other, Range): + # trim down to self's size, and represent + # as a Range with step 1 + start = ceiling(max(other.inf, self.inf)) + if start not in other: + start += 1 + end = floor(min(other.sup, self.sup)) + if end not in other: + end -= 1 + return self.intersect(Range(start, end + 1)) + + if isinstance(other, Range): from sympy.solvers.diophantine import diop_linear from sympy.core.numbers import ilcm @@ -641,7 +667,6 @@ def _updated_range(r, first): else: return - def _contains(self, other): if not self: return S.false @@ -807,6 +832,30 @@ def __getitem__(self, i): raise IndexError("Range index out of range") return rv + def _eval_imageset(self, f): + from sympy.core.function import expand_mul + if not self: + return S.EmptySet + if not isinstance(f.expr, Expr): + return + if self.size == 1: + return FiniteSet(f(self[0])) + if f is S.IdentityFunction: + return self + + x = f.variables[0] + expr = f.expr + # handle f that is linear in f's variable + if x not in expr.free_symbols or x in expr.diff(x).free_symbols: + return + if self.start.is_finite: + F = f(self.step*x + self.start) # for i in range(len(self)) + else: + F = f(-self.step*x + self[-1]) + F = expand_mul(F) + if F != expr: + return imageset(x, F, Range(self.size)) + @property def _inf(self): if not self: diff --git a/sympy/sets/sets.py b/sympy/sets/sets.py index 395138ba32..5d1f3a3cb5 100644 --- a/sympy/sets/sets.py +++ b/sympy/sets/sets.py @@ -2033,7 +2033,7 @@ def imageset(*args): Examples ======== - >>> from sympy import Interval, Symbol, imageset, sin, Lambda + >>> from sympy import S, Interval, Symbol, imageset, sin, Lambda >>> from sympy.abc import x, y >>> imageset(x, 2*x, Interval(0, 2)) @@ -2050,6 +2050,14 @@ def imageset(*args): >>> imageset(lambda y: x + y, Interval(-2, 1)) ImageSet(Lambda(_x, _x + x), [-2, 1]) + Expressions applied to the set of Integers are simplified + to show as few negatives as possible and linear expressions + are converted to a canonical form. If this is not desirable + then the unevaluated ImageSet should be used. + + >>> imageset(x, -2*x + 5, S.Integers) + ImageSet(Lambda(x, 2*x + 1), Integers()) + See Also ======== diff --git a/sympy/solvers/diophantine.py b/sympy/solvers/diophantine.py index d293f6b39c..f85bf48485 100644 --- a/sympy/solvers/diophantine.py +++ b/sympy/solvers/diophantine.py @@ -86,6 +86,8 @@ def diophantine(eq, param=symbols("t", integer=True)): base = term[0] var_t, jnk, eq_type = classify_diop(base) + if not var_t: + continue solution = diop_solve(base, param) if eq_type in ["linear", "homogeneous_ternary_quadratic", "general_pythagorean"]:
factor_list() error Python3 ``` Following function call generate an error in Python3 but work well in Python2. >>> factor_list(x*(x+y)) Traceback (most recent call last): File "<console>", line 1, in <module> File "/home/thilinarmtb/GSoC/sympy/sympy/polys/polytools.py", line 5675, in factor_list return _generic_factor_list(f, gens, args, method='factor') File "/home/thilinarmtb/GSoC/sympy/sympy/polys/polytools.py", line 5423, in _generic_factor_list fp = _sorted_factors(fp, method) File "/home/thilinarmtb/GSoC/sympy/sympy/polys/polytools.py", line 5322, in _sorted_factors return sorted(factors, key=key) TypeError: unorderable types: list() < int() Similarly following calls generate the same error: [1] factor_list(y*(x+y)), factor_list(x*y + y**2) [2] factor_list(x*(x+y+z)), factor_list(x**2+x*y+x*z) [3] factor_list((x + 1)*(x + y)) But the following don't: [4] factor_list(x*(x**2 + y**2)), factor_list(x**3 + x*y**2) [5] factor_list(x**3 - x), factor_list(x*(x**2 - 1)) [6] factor_list(x**2 + x*y + x + y) [7] factor_list((x + z)*(x + y)), factor_list(x**2 + x*z + y*x + y*z) Here [6] is mathematically same as [3]. But only [3] gives the error. ``` Original issue for #7067: http://code.google.com/p/sympy/issues/detail?id=3968 Original author: https://code.google.com/u/115610317529675726783/ Original owner: https://code.google.com/u/101069955704897915480/
sympy/sympy
diff --git a/sympy/core/tests/test_operations.py b/sympy/core/tests/test_operations.py index 0a2a68b3bb..c34feba3d4 100644 --- a/sympy/core/tests/test_operations.py +++ b/sympy/core/tests/test_operations.py @@ -2,6 +2,7 @@ from sympy.core.operations import LatticeOp from sympy.utilities.pytest import raises from sympy.core.sympify import SympifyError +from sympy.core.add import Add # create the simplest possible Lattice class @@ -34,6 +35,7 @@ def test_lattice_print(): def test_lattice_make_args(): - assert join.make_args(0) == {0} - assert join.make_args(1) == {1} assert join.make_args(join(2, 3, 4)) == {S(2), S(3), S(4)} + assert join.make_args(0) == {0} + assert list(join.make_args(0))[0] is S.Zero + assert Add.make_args(0)[0] is S.Zero diff --git a/sympy/polys/tests/test_polytools.py b/sympy/polys/tests/test_polytools.py index b3f68c68c4..e2c796fd9a 100644 --- a/sympy/polys/tests/test_polytools.py +++ b/sympy/polys/tests/test_polytools.py @@ -3157,3 +3157,9 @@ class foo(Expr): def test_to_rational_coeffs(): assert to_rational_coeffs( Poly(x**3 + y*x**2 + sqrt(y), x, domain='EX')) == None + + +def test_factor_terms(): + # issue 7067 + assert factor_list(x*(x + y)) == (1, [(x, 1), (x + y, 1)]) + assert sqf_list(x*(x + y)) == (1, [(x, 1), (x + y, 1)]) diff --git a/sympy/sets/tests/test_fancysets.py b/sympy/sets/tests/test_fancysets.py index 08d1610904..ebee5c5668 100644 --- a/sympy/sets/tests/test_fancysets.py +++ b/sympy/sets/tests/test_fancysets.py @@ -112,7 +112,7 @@ def test_halfcircle(): assert not halfcircle.is_iterable -def test_ImageSet_iterator_not_injetive(): +def test_ImageSet_iterator_not_injective(): L = Lambda(x, x - x % 2) # produces 0, 2, 2, 4, 4, 6, 6, ... evens = ImageSet(L, S.Naturals) i = iter(evens) @@ -303,84 +303,49 @@ def test_range_range_intersection(): def test_range_interval_intersection(): - empty = Range(0) p = symbols('p', positive=True) assert isinstance(Range(3).intersect(Interval(p, p + 2)), Intersection) - assert Range(0, 4, 3).intersect(Interval(1, 2)) is S.EmptySet - for line, (r, i) in enumerate([ - # Intersection with intervals - (Range(0, 10, 1), Interval(2, 6)), - (Range(0, 2, 1), Interval(2, 6)), - (Range(0, 3, 1), Interval(2, 6)), - (Range(0, 4, 1), Interval(2, 6)), - (Range(0, 7, 1), Interval(2, 6)), - (Range(0, 10, 1), Interval(2, 6)), - (Range(2, 3, 1), Interval(2, 6)), - (Range(2, 4, 1), Interval(2, 6)), - (Range(2, 7, 1), Interval(2, 6)), - (Range(2, 10, 1), Interval(2, 6)), - (Range(3, 4, 1), Interval(2, 6)), - (Range(3, 7, 1), Interval(2, 6)), - (Range(3, 10, 1), Interval(2, 6)), - (Range(6, 7, 1), Interval(2, 6)), - (Range(6, 10, 1), Interval(2, 6)), - (Range(7, 10, 1), Interval(2, 6)), - (Range(0, 10, 2), Interval(3, 5)), - (Range(1, 10, 2), Interval(2, 6)), - (Range(2, 10, 2), Interval(2, 6)), - (Range(3, 10, 2), Interval(2, 6)), - (Range(6, 10, 2), Interval(2, 6)), - (Range(10), Interval(5.1, 6.9)), - - # Open Intervals are removed - (Range(0, 10, 1), Interval(2, 6, True, True)), - - # Try this with large steps - (Range(0, 100, 10), Interval(15, 55)), - - - # Infinite range - (Range(0, oo, 2), Interval(-1, 5)), - (Range(-oo, 4, 3), Interval(-10, 20)), - (Range(-oo, 4, 3), Interval(-10, -5)), - - # Infinite interval - (Range(-3, 0, 3), Interval(-oo, 0)), - (Range(0, 10, 3), Interval(3, oo)), - (Range(0, 10, 3), Interval(-oo, 5)), - - # Infinite interval, infinite range start - (Range(-oo, 1, 3), Interval(-oo, 5)), - (Range(-oo, 1, 3), Interval(-oo, -3)), - - # Infinite interval, infinite range end - (Range(0, oo, 3), Interval(5, oo)), - (Range(0, oo, 3), Interval(-5, oo)), - (Range(0, oo, 3), Interval(-oo, 5)), - - ]): - - for rev in range(2): - if rev: - r = r.reversed - result = r.intersection(i) - - msg = "line %s: %s.intersect(%s) != %s" % (line, r, i, result) - if result is S.EmptySet: - assert ( - r.sup < i.inf or - r.sup == i.inf and i.left_open) or ( - r.inf > i.sup or - r.inf == i.sup and i.right_open), msg - else: - checks = a, b, c, d = [ - result.inf in i or result.inf == i.inf, - result.inf - abs(result.step) not in i or \ - result.inf == r.inf, - result.sup in i or result.sup == i.sup, - result.sup + abs(result.step) not in i or \ - result.sup == r.sup] - assert all(_ for _ in checks), msg + assert Range(4).intersect(Interval(0, 3)) == Range(4) + assert Range(4).intersect(Interval(-oo, oo)) == Range(4) + assert Range(4).intersect(Interval(1, oo)) == Range(1, 4) + assert Range(4).intersect(Interval(1.1, oo)) == Range(2, 4) + assert Range(4).intersect(Interval(0.1, 3)) == Range(1, 4) + assert Range(4).intersect(Interval(0.1, 3.1)) == Range(1, 4) + assert Range(4).intersect(Interval.open(0, 3)) == Range(1, 3) + assert Range(4).intersect(Interval.open(0.1, 0.5)) is S.EmptySet + + +def test_Integers_eval_imageset(): + ans = ImageSet(Lambda(x, 2*x + S(3)/7), S.Integers) + im = imageset(Lambda(x, -2*x + S(3)/7), S.Integers) + assert im == ans + im = imageset(Lambda(x, -2*x - S(11)/7), S.Integers) + assert im == ans + y = Symbol('y') + assert imageset(x, 2*x + y, S.Integers) == \ + imageset(x, 2*x + y % 2, S.Integers) + + _x = symbols('x', negative=True) + eq = _x**2 - _x + 1 + assert imageset(_x, eq, S.Integers).lamda.expr == _x**2 + _x + 1 + eq = 3*_x - 1 + assert imageset(_x, eq, S.Integers).lamda.expr == 3*_x + 2 + + assert imageset(x, (x, 1/x), S.Integers) == \ + ImageSet(Lambda(x, (x, 1/x)), S.Integers) + + +def test_Range_eval_imageset(): + a, b, c = symbols('a b c') + assert imageset(x, a*(x + b) + c, Range(3)) == \ + imageset(x, a*x + a*b + c, Range(3)) + eq = (x + 1)**2 + assert imageset(x, eq, Range(3)).lamda.expr == eq + eq = a*(x + b) + c + r = Range(3, -3, -2) + imset = imageset(x, eq, r) + assert imset.lamda.expr != eq + assert list(imset) == [eq.subs(x, i).expand() for i in list(r)] def test_fun(): @@ -433,26 +398,33 @@ def test_infinitely_indexed_set_1(): from sympy.abc import n, m, t assert imageset(Lambda(n, n), S.Integers) == imageset(Lambda(m, m), S.Integers) - assert imageset(Lambda(n, 2*n), S.Integers).intersect(imageset(Lambda(m, 2*m + 1), S.Integers)) == \ - EmptySet() + assert imageset(Lambda(n, 2*n), S.Integers).intersect( + imageset(Lambda(m, 2*m + 1), S.Integers)) is S.EmptySet - assert imageset(Lambda(n, 2*n), S.Integers).intersect(imageset(Lambda(n, 2*n + 1), S.Integers)) == \ - EmptySet() + assert imageset(Lambda(n, 2*n), S.Integers).intersect( + imageset(Lambda(n, 2*n + 1), S.Integers)) is S.EmptySet - assert imageset(Lambda(m, 2*m), S.Integers).intersect(imageset(Lambda(n, 3*n), S.Integers)) == \ + assert imageset(Lambda(m, 2*m), S.Integers).intersect( + imageset(Lambda(n, 3*n), S.Integers)) == \ ImageSet(Lambda(t, 6*t), S.Integers) + assert imageset(x, x/2 + S(1)/3, S.Integers).intersect(S.Integers) is S.EmptySet + assert imageset(x, x/2 + S.Half, S.Integers).intersect(S.Integers) is S.Integers + def test_infinitely_indexed_set_2(): - from sympy import exp from sympy.abc import n a = Symbol('a', integer=True) - assert imageset(Lambda(n, n), S.Integers) == imageset(Lambda(n, n + a), S.Integers) - assert imageset(Lambda(n, n), S.Integers) == imageset(Lambda(n, -n + a), S.Integers) - assert imageset(Lambda(n, -6*n), S.Integers) == ImageSet(Lambda(n, 6*n), S.Integers) - assert imageset(Lambda(n, 2*n + pi), S.Integers) == ImageSet(Lambda(n, 2*n + pi), S.Integers) - assert imageset(Lambda(n, pi*n + pi), S.Integers) == ImageSet(Lambda(n, pi*n + pi), S.Integers) - assert imageset(Lambda(n, exp(n)), S.Integers) != imageset(Lambda(n, n), S.Integers) + assert imageset(Lambda(n, n), S.Integers) == \ + imageset(Lambda(n, n + a), S.Integers) + assert imageset(Lambda(n, n + pi), S.Integers) == \ + imageset(Lambda(n, n + a + pi), S.Integers) + assert imageset(Lambda(n, n), S.Integers) == \ + imageset(Lambda(n, -n + a), S.Integers) + assert imageset(Lambda(n, -6*n), S.Integers) == \ + ImageSet(Lambda(n, 6*n), S.Integers) + assert imageset(Lambda(n, 2*n + pi), S.Integers) == \ + ImageSet(Lambda(n, 2*n + pi - 2), S.Integers) def test_imageset_intersect_real(): @@ -465,18 +437,15 @@ def test_imageset_intersect_real(): assert s.intersect(S.Reals) == imageset(Lambda(n, 2*n*pi - pi/4), S.Integers) -@XFAIL -def test_infinitely_indexed_failed_diophantine(): - from sympy.abc import n, m, t - assert imageset(Lambda(m, 2*pi*m), S.Integers).intersect(imageset(Lambda(n, 3*pi*n), S.Integers)) == \ - ImageSet(Lambda(t, -6*pi*t), S.Integers) - - -@XFAIL def test_infinitely_indexed_set_3(): - from sympy.abc import n - assert imageset(Lambda(n, 2*n + 1), S.Integers) == imageset(Lambda(n, 2*n - 1), S.Integers) - assert imageset(Lambda(n, 3*n + 2), S.Integers) == imageset(Lambda(n, 3*n - 1), S.Integers) + from sympy.abc import n, m, t + assert imageset(Lambda(m, 2*pi*m), S.Integers).intersect( + imageset(Lambda(n, 3*pi*n), S.Integers)) == \ + ImageSet(Lambda(t, 6*pi*t), S.Integers) + assert imageset(Lambda(n, 2*n + 1), S.Integers) == \ + imageset(Lambda(n, 2*n - 1), S.Integers) + assert imageset(Lambda(n, 3*n + 2), S.Integers) == \ + imageset(Lambda(n, 3*n - 1), S.Integers) def test_ImageSet_simplification(): @@ -487,6 +456,11 @@ def test_ImageSet_simplification(): imageset(Lambda(m, sin(tan(m))), S.Integers) +def test_ImageSet_contains(): + from sympy.abc import x + assert (2, S.Half) in imageset(x, (x, 1/x), S.Integers) + + def test_ComplexRegion_contains(): # contains in ComplexRegion diff --git a/sympy/solvers/tests/test_diophantine.py b/sympy/solvers/tests/test_diophantine.py index b20b0e25a6..3617763686 100644 --- a/sympy/solvers/tests/test_diophantine.py +++ b/sympy/solvers/tests/test_diophantine.py @@ -317,14 +317,11 @@ def test_transformation_to_normal(): def test_diop_ternary_quadratic(): - # Commented out test cases should be uncommented after - # the bug with factor_list() gets merged. - assert check_solutions(2*x**2 + z**2 + y**2 - 4*x*y) assert check_solutions(x**2 - y**2 - z**2 - x*y - y*z) assert check_solutions(3*x**2 - x*y - y*z - x*z) assert check_solutions(x**2 - y*z - x*z) - #assert check_solutions(5*x**2 - 3*x*y - x*z) + assert check_solutions(5*x**2 - 3*x*y - x*z) assert check_solutions(4*x**2 - 5*y**2 - x*z) assert check_solutions(3*x**2 + 2*y**2 - z**2 - 2*x*y + 5*y*z - 7*y*z) assert check_solutions(8*x**2 - 12*y*z) @@ -375,12 +372,9 @@ def test_parametrize_ternary_quadratic(): def test_no_square_ternary_quadratic(): - # Commented out test cases should be uncommented after - # the bug with factor_list() gets merged. - assert check_solutions(2*x*y + y*z - 3*x*z) assert check_solutions(189*x*y - 345*y*z - 12*x*z) - #assert check_solutions(23*x*y + 34*y*z) + assert check_solutions(23*x*y + 34*y*z) assert check_solutions(x*y + y*z + z*x) assert check_solutions(23*x*y + 23*y*z + 23*x*z) @@ -394,24 +388,21 @@ def test_descent(): def test_diophantine(): - # Commented out test cases should be uncommented after - # the bug with factor_list() gets merged. - assert check_solutions((x - y)*(y - z)*(z - x)) assert check_solutions((x - y)*(x**2 + y**2 - z**2)) assert check_solutions((x - 3*y + 7*z)*(x**2 + y**2 - z**2)) assert check_solutions((x**2 - 3*y**2 - 1)) - #assert check_solutions(y**2 + 7*x*y) - #assert check_solutions(x**2 - 3*x*y + y**2) - #assert check_solutions(z*(x**2 - y**2 - 15)) - #assert check_solutions(x*(2*y - 2*z + 5)) + assert check_solutions(y**2 + 7*x*y) + assert check_solutions(x**2 - 3*x*y + y**2) + assert check_solutions(z*(x**2 - y**2 - 15)) + assert check_solutions(x*(2*y - 2*z + 5)) assert check_solutions((x**2 - 3*y**2 - 1)*(x**2 - y**2 - 15)) assert check_solutions((x**2 - 3*y**2 - 1)*(y - 7*z)) assert check_solutions((x**2 + y**2 - z**2)*(x - 7*y - 3*z + 4*w)) # Following test case caused problems in parametric representation # But this can be solved by factroing out y. # No need to use methods for ternary quadratic equations. - #assert check_solutions(y**2 - 7*x*y + 4*y*z) + assert check_solutions(y**2 - 7*x*y + 4*y*z) assert check_solutions(x**2 - 2*x + 1) diff --git a/sympy/solvers/tests/test_solveset.py b/sympy/solvers/tests/test_solveset.py index 943c783591..00552f09cd 100644 --- a/sympy/solvers/tests/test_solveset.py +++ b/sympy/solvers/tests/test_solveset.py @@ -1,5 +1,5 @@ from sympy import ( - Abs, Dummy, Eq, Gt, Function, + Abs, Dummy, Eq, Gt, Function, Mod, LambertW, Piecewise, Poly, Rational, S, Symbol, Matrix, asin, acos, acsc, asec, atan, atanh, cos, csc, erf, erfinv, erfc, erfcinv, exp, log, pi, sin, sinh, sec, sqrt, symbols, @@ -105,31 +105,31 @@ def ireal(x, s=S.Reals): imageset(Lambda(n, 2*n*pi - acos(y)), S.Integers))) assert invert_real(cos(exp(x)), y, x) == \ - (x, Union(imageset(Lambda(n, log(2*n*pi + acos(y))), S.Integers), \ - imageset(Lambda(n, log(2*n*pi - acos(y))), S.Integers))) + (x, Union(imageset(Lambda(n, log(2*n*pi + Mod(acos(y), 2*pi))), S.Integers), \ + imageset(Lambda(n, log(2*n*pi + Mod(-acos(y), 2*pi))), S.Integers))) assert invert_real(sec(x), y, x) == \ (x, Union(imageset(Lambda(n, 2*n*pi + asec(y)), S.Integers), \ imageset(Lambda(n, 2*n*pi - asec(y)), S.Integers))) assert invert_real(sec(exp(x)), y, x) == \ - (x, Union(imageset(Lambda(n, log(2*n*pi + asec(y))), S.Integers), \ - imageset(Lambda(n, log(2*n*pi - asec(y))), S.Integers))) + (x, Union(imageset(Lambda(n, log(2*n*pi + Mod(asec(y), 2*pi))), S.Integers), \ + imageset(Lambda(n, log(2*n*pi + Mod(-asec(y), 2*pi))), S.Integers))) assert invert_real(tan(x), y, x) == \ - (x, imageset(Lambda(n, n*pi + atan(y)), S.Integers)) + (x, imageset(Lambda(n, n*pi + atan(y) % pi), S.Integers)) assert invert_real(tan(exp(x)), y, x) == \ - (x, imageset(Lambda(n, log(n*pi + atan(y))), S.Integers)) + (x, imageset(Lambda(n, log(n*pi + atan(y) % pi)), S.Integers)) assert invert_real(cot(x), y, x) == \ - (x, imageset(Lambda(n, n*pi + acot(y)), S.Integers)) + (x, imageset(Lambda(n, n*pi + acot(y) % pi), S.Integers)) assert invert_real(cot(exp(x)), y, x) == \ - (x, imageset(Lambda(n, log(n*pi + acot(y))), S.Integers)) + (x, imageset(Lambda(n, log(n*pi + acot(y) % pi)), S.Integers)) assert invert_real(tan(tan(x)), y, x) == \ - (tan(x), imageset(Lambda(n, n*pi + atan(y)), S.Integers)) + (tan(x), imageset(Lambda(n, n*pi + atan(y) % pi), S.Integers)) x = Symbol('x', positive=True) assert invert_real(x**pi, y, x) == (x, FiniteSet(y**(1/pi)))
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_hyperlinks", "has_issue_reference", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 1, "test_score": 3 }, "num_modified_files": 5 }
1.0
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "mpmath>=0.19", "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.7", "reqs_path": [ "requirements/base.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
certifi @ file:///croot/certifi_1671487769961/work/certifi exceptiongroup==1.2.2 importlib-metadata==6.7.0 iniconfig==2.0.0 mpmath==1.3.0 packaging==24.0 pluggy==1.2.0 pytest==7.4.4 -e git+https://github.com/sympy/sympy.git@674f13344242236cf37c0bf4cc3ec17d68a1ae51#egg=sympy tomli==2.0.1 typing_extensions==4.7.1 zipp==3.15.0
name: sympy channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2022.12.7=py37h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=22.3.1=py37h06a4308_0 - python=3.7.16=h7a1cb2a_0 - readline=8.2=h5eee18b_0 - setuptools=65.6.3=py37h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.38.4=py37h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - exceptiongroup==1.2.2 - importlib-metadata==6.7.0 - iniconfig==2.0.0 - mpmath==1.3.0 - packaging==24.0 - pluggy==1.2.0 - pytest==7.4.4 - tomli==2.0.1 - typing-extensions==4.7.1 - zipp==3.15.0 prefix: /opt/conda/envs/sympy
[ "sympy/core/tests/test_operations.py::test_lattice_make_args", "sympy/polys/tests/test_polytools.py::test_factor_terms", "sympy/sets/tests/test_fancysets.py::test_Integers_eval_imageset", "sympy/sets/tests/test_fancysets.py::test_Range_eval_imageset", "sympy/sets/tests/test_fancysets.py::test_infinitely_indexed_set_1", "sympy/sets/tests/test_fancysets.py::test_infinitely_indexed_set_2", "sympy/sets/tests/test_fancysets.py::test_infinitely_indexed_set_3", "sympy/sets/tests/test_fancysets.py::test_ImageSet_contains", "sympy/solvers/tests/test_diophantine.py::test_no_square_ternary_quadratic", "sympy/solvers/tests/test_diophantine.py::test_diophantine", "sympy/solvers/tests/test_solveset.py::test_invert_real" ]
[ "sympy/polys/tests/test_polytools.py::test_factor_noeval", "sympy/polys/tests/test_polytools.py::test_poly_matching_consistency", "sympy/polys/tests/test_polytools.py::test_issue_5786", "sympy/sets/tests/test_fancysets.py::test_halfcircle", "sympy/solvers/tests/test_diophantine.py::test_quadratic_non_perfect_square", "sympy/solvers/tests/test_diophantine.py::test_issue_9106", "sympy/solvers/tests/test_diophantine.py::test_quadratic_non_perfect_slow", "sympy/solvers/tests/test_diophantine.py::test_DN", "sympy/solvers/tests/test_diophantine.py::test_diop_ternary_quadratic", "sympy/solvers/tests/test_solveset.py::test_solve_sqrt_fail", "sympy/solvers/tests/test_solveset.py::test_uselogcombine_1", "sympy/solvers/tests/test_solveset.py::test_uselogcombine_2", "sympy/solvers/tests/test_solveset.py::test_rewrite_trigh", "sympy/solvers/tests/test_solveset.py::test_solve_only_exp_2", "sympy/solvers/tests/test_solveset.py::test_solve_quintics", "sympy/solvers/tests/test_solveset.py::test_solve_trig_abs", "sympy/solvers/tests/test_solveset.py::test_solve_trig_simplified", "sympy/solvers/tests/test_solveset.py::test_solve_lambert", "sympy/solvers/tests/test_solveset.py::test_conditionset_equality", "sympy/solvers/tests/test_solveset.py::test_issue_failing_pow" ]
[ "sympy/core/tests/test_operations.py::test_lattice_simple", "sympy/core/tests/test_operations.py::test_lattice_shortcircuit", "sympy/core/tests/test_operations.py::test_lattice_print", "sympy/polys/tests/test_polytools.py::test_Poly_from_dict", "sympy/polys/tests/test_polytools.py::test_Poly_from_list", "sympy/polys/tests/test_polytools.py::test_Poly_from_poly", "sympy/polys/tests/test_polytools.py::test_Poly_from_expr", "sympy/polys/tests/test_polytools.py::test_Poly__new__", "sympy/polys/tests/test_polytools.py::test_Poly__args", "sympy/polys/tests/test_polytools.py::test_Poly__gens", "sympy/polys/tests/test_polytools.py::test_Poly_zero", "sympy/polys/tests/test_polytools.py::test_Poly_one", "sympy/polys/tests/test_polytools.py::test_Poly__unify", "sympy/polys/tests/test_polytools.py::test_Poly_free_symbols", "sympy/polys/tests/test_polytools.py::test_PurePoly_free_symbols", "sympy/polys/tests/test_polytools.py::test_Poly__eq__", "sympy/polys/tests/test_polytools.py::test_PurePoly__eq__", "sympy/polys/tests/test_polytools.py::test_PurePoly_Poly", "sympy/polys/tests/test_polytools.py::test_Poly_get_domain", "sympy/polys/tests/test_polytools.py::test_Poly_set_domain", "sympy/polys/tests/test_polytools.py::test_Poly_get_modulus", "sympy/polys/tests/test_polytools.py::test_Poly_set_modulus", "sympy/polys/tests/test_polytools.py::test_Poly_add_ground", "sympy/polys/tests/test_polytools.py::test_Poly_sub_ground", "sympy/polys/tests/test_polytools.py::test_Poly_mul_ground", "sympy/polys/tests/test_polytools.py::test_Poly_quo_ground", "sympy/polys/tests/test_polytools.py::test_Poly_exquo_ground", "sympy/polys/tests/test_polytools.py::test_Poly_abs", "sympy/polys/tests/test_polytools.py::test_Poly_neg", "sympy/polys/tests/test_polytools.py::test_Poly_add", "sympy/polys/tests/test_polytools.py::test_Poly_sub", "sympy/polys/tests/test_polytools.py::test_Poly_mul", "sympy/polys/tests/test_polytools.py::test_Poly_sqr", "sympy/polys/tests/test_polytools.py::test_Poly_pow", "sympy/polys/tests/test_polytools.py::test_Poly_divmod", "sympy/polys/tests/test_polytools.py::test_Poly_eq_ne", "sympy/polys/tests/test_polytools.py::test_Poly_nonzero", "sympy/polys/tests/test_polytools.py::test_Poly_properties", "sympy/polys/tests/test_polytools.py::test_Poly_is_irreducible", "sympy/polys/tests/test_polytools.py::test_Poly_subs", "sympy/polys/tests/test_polytools.py::test_Poly_replace", "sympy/polys/tests/test_polytools.py::test_Poly_reorder", "sympy/polys/tests/test_polytools.py::test_Poly_ltrim", "sympy/polys/tests/test_polytools.py::test_Poly_has_only_gens", "sympy/polys/tests/test_polytools.py::test_Poly_to_ring", "sympy/polys/tests/test_polytools.py::test_Poly_to_field", "sympy/polys/tests/test_polytools.py::test_Poly_to_exact", "sympy/polys/tests/test_polytools.py::test_Poly_retract", "sympy/polys/tests/test_polytools.py::test_Poly_slice", "sympy/polys/tests/test_polytools.py::test_Poly_coeffs", "sympy/polys/tests/test_polytools.py::test_Poly_monoms", "sympy/polys/tests/test_polytools.py::test_Poly_terms", "sympy/polys/tests/test_polytools.py::test_Poly_all_coeffs", "sympy/polys/tests/test_polytools.py::test_Poly_all_monoms", "sympy/polys/tests/test_polytools.py::test_Poly_all_terms", "sympy/polys/tests/test_polytools.py::test_Poly_termwise", "sympy/polys/tests/test_polytools.py::test_Poly_length", "sympy/polys/tests/test_polytools.py::test_Poly_as_dict", "sympy/polys/tests/test_polytools.py::test_Poly_as_expr", "sympy/polys/tests/test_polytools.py::test_Poly_lift", "sympy/polys/tests/test_polytools.py::test_Poly_deflate", "sympy/polys/tests/test_polytools.py::test_Poly_inject", "sympy/polys/tests/test_polytools.py::test_Poly_eject", "sympy/polys/tests/test_polytools.py::test_Poly_exclude", "sympy/polys/tests/test_polytools.py::test_Poly__gen_to_level", "sympy/polys/tests/test_polytools.py::test_Poly_degree", "sympy/polys/tests/test_polytools.py::test_Poly_degree_list", "sympy/polys/tests/test_polytools.py::test_Poly_total_degree", "sympy/polys/tests/test_polytools.py::test_Poly_homogenize", "sympy/polys/tests/test_polytools.py::test_Poly_homogeneous_order", "sympy/polys/tests/test_polytools.py::test_Poly_LC", "sympy/polys/tests/test_polytools.py::test_Poly_TC", "sympy/polys/tests/test_polytools.py::test_Poly_EC", "sympy/polys/tests/test_polytools.py::test_Poly_coeff", "sympy/polys/tests/test_polytools.py::test_Poly_nth", "sympy/polys/tests/test_polytools.py::test_Poly_LM", "sympy/polys/tests/test_polytools.py::test_Poly_LM_custom_order", "sympy/polys/tests/test_polytools.py::test_Poly_EM", "sympy/polys/tests/test_polytools.py::test_Poly_LT", "sympy/polys/tests/test_polytools.py::test_Poly_ET", "sympy/polys/tests/test_polytools.py::test_Poly_max_norm", "sympy/polys/tests/test_polytools.py::test_Poly_l1_norm", "sympy/polys/tests/test_polytools.py::test_Poly_clear_denoms", "sympy/polys/tests/test_polytools.py::test_Poly_rat_clear_denoms", "sympy/polys/tests/test_polytools.py::test_Poly_integrate", "sympy/polys/tests/test_polytools.py::test_Poly_diff", "sympy/polys/tests/test_polytools.py::test_issue_9585", "sympy/polys/tests/test_polytools.py::test_Poly_eval", "sympy/polys/tests/test_polytools.py::test_Poly___call__", "sympy/polys/tests/test_polytools.py::test_parallel_poly_from_expr", "sympy/polys/tests/test_polytools.py::test_pdiv", "sympy/polys/tests/test_polytools.py::test_div", "sympy/polys/tests/test_polytools.py::test_gcdex", "sympy/polys/tests/test_polytools.py::test_revert", "sympy/polys/tests/test_polytools.py::test_subresultants", "sympy/polys/tests/test_polytools.py::test_resultant", "sympy/polys/tests/test_polytools.py::test_discriminant", "sympy/polys/tests/test_polytools.py::test_dispersion", "sympy/polys/tests/test_polytools.py::test_gcd_list", "sympy/polys/tests/test_polytools.py::test_lcm_list", "sympy/polys/tests/test_polytools.py::test_gcd", "sympy/polys/tests/test_polytools.py::test_gcd_numbers_vs_polys", "sympy/polys/tests/test_polytools.py::test_terms_gcd", "sympy/polys/tests/test_polytools.py::test_trunc", "sympy/polys/tests/test_polytools.py::test_monic", "sympy/polys/tests/test_polytools.py::test_content", "sympy/polys/tests/test_polytools.py::test_primitive", "sympy/polys/tests/test_polytools.py::test_compose", "sympy/polys/tests/test_polytools.py::test_shift", "sympy/polys/tests/test_polytools.py::test_sturm", "sympy/polys/tests/test_polytools.py::test_gff", "sympy/polys/tests/test_polytools.py::test_sqf_norm", "sympy/polys/tests/test_polytools.py::test_sqf", "sympy/polys/tests/test_polytools.py::test_factor", "sympy/polys/tests/test_polytools.py::test_factor_large", "sympy/polys/tests/test_polytools.py::test_intervals", "sympy/polys/tests/test_polytools.py::test_refine_root", "sympy/polys/tests/test_polytools.py::test_count_roots", "sympy/polys/tests/test_polytools.py::test_Poly_root", "sympy/polys/tests/test_polytools.py::test_real_roots", "sympy/polys/tests/test_polytools.py::test_all_roots", "sympy/polys/tests/test_polytools.py::test_nroots", "sympy/polys/tests/test_polytools.py::test_ground_roots", "sympy/polys/tests/test_polytools.py::test_nth_power_roots_poly", "sympy/polys/tests/test_polytools.py::test_torational_factor_list", "sympy/polys/tests/test_polytools.py::test_cancel", "sympy/polys/tests/test_polytools.py::test_reduced", "sympy/polys/tests/test_polytools.py::test_groebner", "sympy/polys/tests/test_polytools.py::test_fglm", "sympy/polys/tests/test_polytools.py::test_is_zero_dimensional", "sympy/polys/tests/test_polytools.py::test_GroebnerBasis", "sympy/polys/tests/test_polytools.py::test_poly", "sympy/polys/tests/test_polytools.py::test_keep_coeff", "sympy/polys/tests/test_polytools.py::test_noncommutative", "sympy/polys/tests/test_polytools.py::test_to_rational_coeffs", "sympy/sets/tests/test_fancysets.py::test_naturals", "sympy/sets/tests/test_fancysets.py::test_naturals0", "sympy/sets/tests/test_fancysets.py::test_integers", "sympy/sets/tests/test_fancysets.py::test_ImageSet", "sympy/sets/tests/test_fancysets.py::test_image_is_ImageSet", "sympy/sets/tests/test_fancysets.py::test_ImageSet_iterator_not_injective", "sympy/sets/tests/test_fancysets.py::test_inf_Range_len", "sympy/sets/tests/test_fancysets.py::test_Range_set", "sympy/sets/tests/test_fancysets.py::test_range_range_intersection", "sympy/sets/tests/test_fancysets.py::test_range_interval_intersection", "sympy/sets/tests/test_fancysets.py::test_fun", "sympy/sets/tests/test_fancysets.py::test_Reals", "sympy/sets/tests/test_fancysets.py::test_Complex", "sympy/sets/tests/test_fancysets.py::test_intersections", "sympy/sets/tests/test_fancysets.py::test_imageset_intersect_real", "sympy/sets/tests/test_fancysets.py::test_ImageSet_simplification", "sympy/sets/tests/test_fancysets.py::test_ComplexRegion_contains", "sympy/sets/tests/test_fancysets.py::test_ComplexRegion_intersect", "sympy/sets/tests/test_fancysets.py::test_ComplexRegion_union", "sympy/sets/tests/test_fancysets.py::test_ComplexRegion_measure", "sympy/sets/tests/test_fancysets.py::test_normalize_theta_set", "sympy/sets/tests/test_fancysets.py::test_ComplexRegion_FiniteSet", "sympy/sets/tests/test_fancysets.py::test_union_RealSubSet", "sympy/sets/tests/test_fancysets.py::test_issue_9980", "sympy/solvers/tests/test_diophantine.py::test_input_format", "sympy/solvers/tests/test_diophantine.py::test_univariate", "sympy/solvers/tests/test_diophantine.py::test_linear", "sympy/solvers/tests/test_diophantine.py::test_quadratic_simple_hyperbolic_case", "sympy/solvers/tests/test_diophantine.py::test_quadratic_elliptical_case", "sympy/solvers/tests/test_diophantine.py::test_quadratic_parabolic_case", "sympy/solvers/tests/test_diophantine.py::test_quadratic_perfect_square", "sympy/solvers/tests/test_diophantine.py::test_bf_pell", "sympy/solvers/tests/test_diophantine.py::test_length", "sympy/solvers/tests/test_diophantine.py::test_transformation_to_pell", "sympy/solvers/tests/test_diophantine.py::test_find_DN", "sympy/solvers/tests/test_diophantine.py::test_ldescent", "sympy/solvers/tests/test_diophantine.py::test_diop_ternary_quadratic_normal", "sympy/solvers/tests/test_diophantine.py::test_transformation_to_normal", "sympy/solvers/tests/test_diophantine.py::test_pairwise_prime", "sympy/solvers/tests/test_diophantine.py::test_square_factor", "sympy/solvers/tests/test_diophantine.py::test_parametrize_ternary_quadratic", "sympy/solvers/tests/test_diophantine.py::test_descent", "sympy/solvers/tests/test_diophantine.py::test_general_pythagorean", "sympy/solvers/tests/test_diophantine.py::test_diop_general_sum_of_squares", "sympy/solvers/tests/test_diophantine.py::test_partition", "sympy/solvers/tests/test_diophantine.py::test_prime_as_sum_of_two_squares", "sympy/solvers/tests/test_diophantine.py::test_sum_of_three_squares", "sympy/solvers/tests/test_diophantine.py::test_sum_of_four_squares", "sympy/solvers/tests/test_diophantine.py::test_power_representation", "sympy/solvers/tests/test_diophantine.py::test_assumptions", "sympy/solvers/tests/test_solveset.py::test_invert_complex", "sympy/solvers/tests/test_solveset.py::test_domain_check", "sympy/solvers/tests/test_solveset.py::test_is_function_class_equation", "sympy/solvers/tests/test_solveset.py::test_garbage_input", "sympy/solvers/tests/test_solveset.py::test_solve_mul", "sympy/solvers/tests/test_solveset.py::test_solve_invert", "sympy/solvers/tests/test_solveset.py::test_errorinverses", "sympy/solvers/tests/test_solveset.py::test_solve_polynomial", "sympy/solvers/tests/test_solveset.py::test_return_root_of", "sympy/solvers/tests/test_solveset.py::test__has_rational_power", "sympy/solvers/tests/test_solveset.py::test_solveset_sqrt_1", "sympy/solvers/tests/test_solveset.py::test_solveset_sqrt_2", "sympy/solvers/tests/test_solveset.py::test_solve_sqrt_3", "sympy/solvers/tests/test_solveset.py::test_solve_polynomial_symbolic_param", "sympy/solvers/tests/test_solveset.py::test_solve_rational", "sympy/solvers/tests/test_solveset.py::test_solveset_real_gen_is_pow", "sympy/solvers/tests/test_solveset.py::test_no_sol", "sympy/solvers/tests/test_solveset.py::test_sol_zero_real", "sympy/solvers/tests/test_solveset.py::test_no_sol_rational_extragenous", "sympy/solvers/tests/test_solveset.py::test_solve_polynomial_cv_1a", "sympy/solvers/tests/test_solveset.py::test_solveset_real_rational", "sympy/solvers/tests/test_solveset.py::test_solveset_real_log", "sympy/solvers/tests/test_solveset.py::test_poly_gens", "sympy/solvers/tests/test_solveset.py::test_solve_abs", "sympy/solvers/tests/test_solveset.py::test_real_imag_splitting", "sympy/solvers/tests/test_solveset.py::test_units", "sympy/solvers/tests/test_solveset.py::test_solve_only_exp_1", "sympy/solvers/tests/test_solveset.py::test_atan2", "sympy/solvers/tests/test_solveset.py::test_piecewise", "sympy/solvers/tests/test_solveset.py::test_solveset_complex_polynomial", "sympy/solvers/tests/test_solveset.py::test_sol_zero_complex", "sympy/solvers/tests/test_solveset.py::test_solveset_complex_rational", "sympy/solvers/tests/test_solveset.py::test_solveset_complex_exp", "sympy/solvers/tests/test_solveset.py::test_solve_complex_log", "sympy/solvers/tests/test_solveset.py::test_solve_complex_sqrt", "sympy/solvers/tests/test_solveset.py::test_solveset_complex_tan", "sympy/solvers/tests/test_solveset.py::test_solve_trig", "sympy/solvers/tests/test_solveset.py::test_solve_invalid_sol", "sympy/solvers/tests/test_solveset.py::test_solveset", "sympy/solvers/tests/test_solveset.py::test_conditionset", "sympy/solvers/tests/test_solveset.py::test_solveset_domain", "sympy/solvers/tests/test_solveset.py::test_improve_coverage", "sympy/solvers/tests/test_solveset.py::test_issue_9522", "sympy/solvers/tests/test_solveset.py::test_linear_eq_to_matrix", "sympy/solvers/tests/test_solveset.py::test_linsolve", "sympy/solvers/tests/test_solveset.py::test_issue_9556", "sympy/solvers/tests/test_solveset.py::test_issue_9611", "sympy/solvers/tests/test_solveset.py::test_issue_9557", "sympy/solvers/tests/test_solveset.py::test_issue_9778", "sympy/solvers/tests/test_solveset.py::test_issue_9849", "sympy/solvers/tests/test_solveset.py::test_issue_9953", "sympy/solvers/tests/test_solveset.py::test_issue_9913", "sympy/solvers/tests/test_solveset.py::test_issue_10397", "sympy/solvers/tests/test_solveset.py::test_simplification", "sympy/solvers/tests/test_solveset.py::test_issue_10555", "sympy/solvers/tests/test_solveset.py::test_issue_8715" ]
[]
BSD
480
box__box-python-sdk-125
162fc82c044a044284e01a403ecb8bf43da2d23d
2016-03-21 21:26:37
ded623f4b6de0530d8f983d3c3d2cafe646c126b
diff --git a/HISTORY.rst b/HISTORY.rst index 5ca5148..ec26cb8 100644 --- a/HISTORY.rst +++ b/HISTORY.rst @@ -6,6 +6,10 @@ Release History Upcoming ++++++++ +1.5.1 (2016-03-23) +++++++++++++++++++ + +- Added a ``revoke()`` method to the ``OAuth2`` class. Calling it will revoke the current access/refresh token pair. 1.5.0 (2016-03-17) diff --git a/boxsdk/auth/developer_token_auth.py b/boxsdk/auth/developer_token_auth.py index 33c7446..9fa9c4e 100644 --- a/boxsdk/auth/developer_token_auth.py +++ b/boxsdk/auth/developer_token_auth.py @@ -32,3 +32,9 @@ def _refresh(self, access_token): """ self._access_token = self._refresh_developer_token() return self._access_token, None + + def revoke(self): + """ + Base class override. + Do nothing; developer tokens can't be revoked without client ID and secret. + """ diff --git a/boxsdk/auth/oauth2.py b/boxsdk/auth/oauth2.py index 9daf9ad..a810e79 100644 --- a/boxsdk/auth/oauth2.py +++ b/boxsdk/auth/oauth2.py @@ -213,6 +213,8 @@ def _get_state_csrf_token(): return 'box_csrf_token_' + ''.join(ascii_alphabet[int(system_random.random() * ascii_len)] for _ in range(16)) def _store_tokens(self, access_token, refresh_token): + self._access_token = access_token + self._refresh_token = refresh_token if self._store_tokens_callback is not None: self._store_tokens_callback(access_token, refresh_token) @@ -240,17 +242,41 @@ def send_token_request(self, data, access_token, expect_refresh_token=True): url, data=data, headers=headers, - access_token=access_token + access_token=access_token, ) if not network_response.ok: raise BoxOAuthException(network_response.status_code, network_response.content, url, 'POST') try: response = network_response.json() - self._access_token = response['access_token'] - self._refresh_token = response.get('refresh_token', None) - if self._refresh_token is None and expect_refresh_token: + access_token = response['access_token'] + refresh_token = response.get('refresh_token', None) + if refresh_token is None and expect_refresh_token: raise BoxOAuthException(network_response.status_code, network_response.content, url, 'POST') except (ValueError, KeyError): raise BoxOAuthException(network_response.status_code, network_response.content, url, 'POST') - self._store_tokens(self._access_token, self._refresh_token) + self._store_tokens(access_token, refresh_token) return self._access_token, self._refresh_token + + def revoke(self): + """ + Revoke the authorization for the current access/refresh token pair. + """ + with self._refresh_lock: + access_token, refresh_token = self._get_tokens() + token_to_revoke = access_token or refresh_token + if token_to_revoke is None: + return + url = '{base_auth_url}/revoke'.format(base_auth_url=API.OAUTH2_API_URL) + network_response = self._network_layer.request( + 'POST', + url, + data={ + 'client_id': self._client_id, + 'client_secret': self._client_secret, + 'token': token_to_revoke, + }, + access_token=access_token, + ) + if not network_response.ok: + raise BoxOAuthException(network_response.status_code, network_response.content, url, 'POST') + self._store_tokens(None, None) diff --git a/boxsdk/version.py b/boxsdk/version.py index cfa1704..c64a173 100644 --- a/boxsdk/version.py +++ b/boxsdk/version.py @@ -3,4 +3,4 @@ from __future__ import unicode_literals, absolute_import -__version__ = '1.5.0' +__version__ = '1.5.1'
Add a way to revoke OAuth tokens Using the OAuth2 class you can easily authenticate and refresh tokens but there is not an easy way to revoke the token. Right now to revoke the token, we would have to do something like: ```python data = { 'client_id': CLIENT_ID, 'client_secret': CLIENT_SECRET, 'token': refresh_token } client.make_request( 'POST', '{0}/revoke'.format(API.OAUTH2_API_URL), data=data ) ``` It would be nice if I could do something like `oauth.revoke()` to revoke and clear the access token and the refresh token.
box/box-python-sdk
diff --git a/test/unit/auth/test_oauth2.py b/test/unit/auth/test_oauth2.py index 4e5b28b..af4d6ed 100644 --- a/test/unit/auth/test_oauth2.py +++ b/test/unit/auth/test_oauth2.py @@ -275,3 +275,42 @@ def test_token_request_allows_missing_refresh_token(mock_network_layer): network_layer=mock_network_layer, ) oauth.send_token_request({}, access_token=None, expect_refresh_token=False) + + [email protected]( + 'access_token,refresh_token,expected_token_to_revoke', + ( + ('fake_access_token', 'fake_refresh_token', 'fake_access_token'), + (None, 'fake_refresh_token', 'fake_refresh_token') + ) +) +def test_revoke_sends_revoke_request( + client_id, + client_secret, + mock_network_layer, + access_token, + refresh_token, + expected_token_to_revoke, +): + mock_network_response = Mock() + mock_network_response.ok = True + mock_network_layer.request.return_value = mock_network_response + oauth = OAuth2( + client_id=client_id, + client_secret=client_secret, + access_token=access_token, + refresh_token=refresh_token, + network_layer=mock_network_layer, + ) + oauth.revoke() + mock_network_layer.request.assert_called_once_with( + 'POST', + '{0}/revoke'.format(API.OAUTH2_API_URL), + data={ + 'client_id': client_id, + 'client_secret': client_secret, + 'token': expected_token_to_revoke, + }, + access_token=access_token, + ) + assert oauth.access_token is None
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 0, "test_score": 2 }, "num_modified_files": 4 }
1.5
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "pytest-xdist", "mock", "sqlalchemy", "bottle" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.7", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
async-timeout==4.0.3 bottle==0.13.2 -e git+https://github.com/box/box-python-sdk.git@162fc82c044a044284e01a403ecb8bf43da2d23d#egg=boxsdk certifi @ file:///croot/certifi_1671487769961/work/certifi cffi==1.15.1 charset-normalizer==3.4.1 cryptography==44.0.2 exceptiongroup==1.2.2 execnet==2.0.2 greenlet==3.1.1 idna==3.10 importlib-metadata==6.7.0 iniconfig==2.0.0 mock==5.2.0 packaging==24.0 pluggy==1.2.0 pycparser==2.21 PyJWT==2.8.0 pytest==7.4.4 pytest-xdist==3.5.0 redis==5.0.8 requests==2.31.0 requests-toolbelt==1.0.0 six==1.17.0 SQLAlchemy==2.0.40 tomli==2.0.1 typing_extensions==4.7.1 urllib3==2.0.7 zipp==3.15.0
name: box-python-sdk channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2022.12.7=py37h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=22.3.1=py37h06a4308_0 - python=3.7.16=h7a1cb2a_0 - readline=8.2=h5eee18b_0 - setuptools=65.6.3=py37h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.38.4=py37h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - async-timeout==4.0.3 - bottle==0.13.2 - cffi==1.15.1 - charset-normalizer==3.4.1 - cryptography==44.0.2 - exceptiongroup==1.2.2 - execnet==2.0.2 - greenlet==3.1.1 - idna==3.10 - importlib-metadata==6.7.0 - iniconfig==2.0.0 - mock==5.2.0 - packaging==24.0 - pluggy==1.2.0 - pycparser==2.21 - pyjwt==2.8.0 - pytest==7.4.4 - pytest-xdist==3.5.0 - redis==5.0.8 - requests==2.31.0 - requests-toolbelt==1.0.0 - six==1.17.0 - sqlalchemy==2.0.40 - tomli==2.0.1 - typing-extensions==4.7.1 - urllib3==2.0.7 - zipp==3.15.0 prefix: /opt/conda/envs/box-python-sdk
[ "test/unit/auth/test_oauth2.py::test_revoke_sends_revoke_request[fake_access_token-fake_refresh_token-fake_access_token]", "test/unit/auth/test_oauth2.py::test_revoke_sends_revoke_request[None-fake_refresh_token-fake_refresh_token]" ]
[]
[ "test/unit/auth/test_oauth2.py::test_get_correct_authorization_url[https://url.com/foo?bar=baz]", "test/unit/auth/test_oauth2.py::test_get_correct_authorization_url[https://\\u0215\\u0155\\u013e.com/\\u0192\\u0151\\u0151?\\u0184\\u0201\\u0155=\\u0184\\u0201\\u017c]", "test/unit/auth/test_oauth2.py::test_get_correct_authorization_url[None]", "test/unit/auth/test_oauth2.py::test_authenticate_send_post_request_with_correct_params", "test/unit/auth/test_oauth2.py::test_refresh_send_post_request_with_correct_params_and_handles_multiple_requests[0]", "test/unit/auth/test_oauth2.py::test_refresh_send_post_request_with_correct_params_and_handles_multiple_requests[1]", "test/unit/auth/test_oauth2.py::test_refresh_send_post_request_with_correct_params_and_handles_multiple_requests[2]", "test/unit/auth/test_oauth2.py::test_refresh_send_post_request_with_correct_params_and_handles_multiple_requests[3]", "test/unit/auth/test_oauth2.py::test_refresh_send_post_request_with_correct_params_and_handles_multiple_requests[4]", "test/unit/auth/test_oauth2.py::test_refresh_send_post_request_with_correct_params_and_handles_multiple_requests[5]", "test/unit/auth/test_oauth2.py::test_refresh_send_post_request_with_correct_params_and_handles_multiple_requests[6]", "test/unit/auth/test_oauth2.py::test_refresh_send_post_request_with_correct_params_and_handles_multiple_requests[7]", "test/unit/auth/test_oauth2.py::test_refresh_send_post_request_with_correct_params_and_handles_multiple_requests[8]", "test/unit/auth/test_oauth2.py::test_refresh_send_post_request_with_correct_params_and_handles_multiple_requests[9]", "test/unit/auth/test_oauth2.py::test_authenticate_stores_tokens_correctly", "test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens0-0]", "test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens0-1]", "test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens0-2]", "test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens0-3]", "test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens0-4]", "test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens0-5]", "test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens0-6]", "test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens0-7]", "test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens0-8]", "test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens0-9]", "test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens1-0]", "test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens1-1]", "test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens1-2]", "test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens1-3]", "test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens1-4]", "test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens1-5]", "test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens1-6]", "test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens1-7]", "test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens1-8]", "test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens1-9]", "test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens2-0]", "test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens2-1]", "test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens2-2]", "test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens2-3]", "test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens2-4]", "test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens2-5]", "test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens2-6]", "test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens2-7]", "test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens2-8]", "test/unit/auth/test_oauth2.py::test_refresh_gives_back_the_correct_response_and_handles_multiple_requests[network_response_with_missing_tokens2-9]", "test/unit/auth/test_oauth2.py::test_token_request_raises_box_oauth_exception_when_getting_bad_network_response[test_method0]", "test/unit/auth/test_oauth2.py::test_token_request_raises_box_oauth_exception_when_getting_bad_network_response[test_method1]", "test/unit/auth/test_oauth2.py::test_token_request_raises_box_oauth_exception_when_no_json_object_can_be_decoded[test_method0]", "test/unit/auth/test_oauth2.py::test_token_request_raises_box_oauth_exception_when_no_json_object_can_be_decoded[test_method1]", "test/unit/auth/test_oauth2.py::test_token_request_raises_box_oauth_exception_when_tokens_are_not_in_the_response[network_response_with_missing_tokens0-test_method0]", "test/unit/auth/test_oauth2.py::test_token_request_raises_box_oauth_exception_when_tokens_are_not_in_the_response[network_response_with_missing_tokens0-test_method1]", "test/unit/auth/test_oauth2.py::test_token_request_raises_box_oauth_exception_when_tokens_are_not_in_the_response[network_response_with_missing_tokens1-test_method0]", "test/unit/auth/test_oauth2.py::test_token_request_raises_box_oauth_exception_when_tokens_are_not_in_the_response[network_response_with_missing_tokens1-test_method1]", "test/unit/auth/test_oauth2.py::test_token_request_raises_box_oauth_exception_when_tokens_are_not_in_the_response[network_response_with_missing_tokens2-test_method0]", "test/unit/auth/test_oauth2.py::test_token_request_raises_box_oauth_exception_when_tokens_are_not_in_the_response[network_response_with_missing_tokens2-test_method1]", "test/unit/auth/test_oauth2.py::test_token_request_allows_missing_refresh_token" ]
[]
Apache License 2.0
481
pystorm__pystorm-21
ee14ec810de20d62929a418fff88285833b6afe5
2016-03-22 02:07:10
7f0d6b320e9943082bcdfd6de93d161a3b174e12
diff --git a/pystorm/serializers/serializer.py b/pystorm/serializers/serializer.py index 03a15de..58f6a0e 100644 --- a/pystorm/serializers/serializer.py +++ b/pystorm/serializers/serializer.py @@ -4,6 +4,8 @@ each serializer a Java counterpart needs to exist. from __future__ import absolute_import, print_function, unicode_literals +from ..exceptions import StormWentAwayError + class Serializer(object): @@ -21,9 +23,12 @@ class Serializer(object): def send_message(self, msg_dict): """Serialize a message dictionary and write it to the output stream.""" with self._writer_lock: - self.output_stream.flush() - self.output_stream.write(self.serialize_dict(msg_dict)) - self.output_stream.flush() + try: + self.output_stream.flush() + self.output_stream.write(self.serialize_dict(msg_dict)) + self.output_stream.flush() + except IOError: + raise StormWentAwayError() def serialize_dict(self, msg_dict): """Convert a message dictionary to bytes. Used by send_message"""
IOError is raised instead of StormWentAwayError if pipe breaks during send_message ``` IOError: [Errno 32] Broken pipe (2 additional frame(s) were not displayed) ... File "pystorm/component.py", line 471, in run self._handle_run_exception(e) File "pystorm/component.py", line 483, in _handle_run_exception self.raise_exception(exc) File "pystorm/component.py", line 321, in raise_exception self.send_message({'command': 'error', 'msg': str(message)}) File "pystorm/component.py", line 305, in send_message self.serializer.send_message(message) File "pystorm/serializers/serializer.py", line 26, in send_message self.output_stream.flush() ``` This should be a `StormWentAwayError`. Will fix ASAP.
pystorm/pystorm
diff --git a/test/pystorm/serializers/test_json_serializer.py b/test/pystorm/serializers/test_json_serializer.py index a18a9e8..f460f6e 100644 --- a/test/pystorm/serializers/test_json_serializer.py +++ b/test/pystorm/serializers/test_json_serializer.py @@ -7,7 +7,9 @@ except ImportError: import mock import simplejson as json +import pytest +from pystorm.exceptions import StormWentAwayError from pystorm.serializers.json_serializer import JSONSerializer from .serializer import SerializerTestCase @@ -33,3 +35,12 @@ class TestJSONSerializer(SerializerTestCase): self.instance.output_stream = StringIO() self.instance.send_message(msg_dict) assert self.instance.output_stream.getvalue() == expected_output + + def test_send_message_raises_stormwentaway(self): + string_io_mock = mock.MagicMock(autospec=True) + def raiser(): # lambdas can't raise + raise IOError() + string_io_mock.flush.side_effect = raiser + self.instance.output_stream = string_io_mock + with pytest.raises(StormWentAwayError): + self.instance.send_message({'hello': "world",}) diff --git a/test/pystorm/serializers/test_msgpack_serializer.py b/test/pystorm/serializers/test_msgpack_serializer.py index e36a1ec..d51c19f 100644 --- a/test/pystorm/serializers/test_msgpack_serializer.py +++ b/test/pystorm/serializers/test_msgpack_serializer.py @@ -7,7 +7,9 @@ except ImportError: import mock import msgpack +import pytest +from pystorm.exceptions import StormWentAwayError from pystorm.serializers.msgpack_serializer import MsgpackSerializer from .serializer import SerializerTestCase @@ -32,3 +34,12 @@ class TestMsgpackSerializer(SerializerTestCase): expected_output = msgpack.packb(msg_dict) self.instance.send_message(msg_dict) assert self.instance.output_stream.getvalue() == expected_output + + def test_send_message_raises_stormwentaway(self): + bytes_io_mock = mock.MagicMock(autospec=True) + def raiser(): # lambdas can't raise + raise IOError() + bytes_io_mock.flush.side_effect = raiser + self.instance.output_stream = bytes_io_mock + with pytest.raises(StormWentAwayError): + self.instance.send_message({'hello': "world",})
{ "commit_name": "head_commit", "failed_lite_validators": [], "has_test_patch": true, "is_lite": true, "llm_score": { "difficulty_score": 0, "issue_text_score": 0, "test_score": 0 }, "num_modified_files": 1 }
2.0
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[all]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": null, "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.5", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==22.2.0 certifi==2021.5.30 importlib-metadata==4.8.3 iniconfig==1.1.1 msgpack-python==0.5.6 packaging==21.3 pluggy==1.0.0 py==1.11.0 pyparsing==3.1.4 -e git+https://github.com/pystorm/pystorm.git@ee14ec810de20d62929a418fff88285833b6afe5#egg=pystorm pytest==7.0.1 pytest-timeout==2.1.0 simplejson==3.20.1 six==1.17.0 tomli==1.2.3 typing_extensions==4.1.1 zipp==3.6.0
name: pystorm channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==22.2.0 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - msgpack-python==0.5.6 - packaging==21.3 - pluggy==1.0.0 - py==1.11.0 - pyparsing==3.1.4 - pytest==7.0.1 - pytest-timeout==2.1.0 - simplejson==3.20.1 - six==1.17.0 - tomli==1.2.3 - typing-extensions==4.1.1 - zipp==3.6.0 prefix: /opt/conda/envs/pystorm
[ "test/pystorm/serializers/test_json_serializer.py::TestJSONSerializer::test_send_message_raises_stormwentaway", "test/pystorm/serializers/test_msgpack_serializer.py::TestMsgpackSerializer::test_send_message_raises_stormwentaway" ]
[]
[ "test/pystorm/serializers/test_json_serializer.py::TestJSONSerializer::test_read_message_dict", "test/pystorm/serializers/test_json_serializer.py::TestJSONSerializer::test_read_message_list", "test/pystorm/serializers/test_json_serializer.py::TestJSONSerializer::test_send_message", "test/pystorm/serializers/test_msgpack_serializer.py::TestMsgpackSerializer::test_read_message_dict", "test/pystorm/serializers/test_msgpack_serializer.py::TestMsgpackSerializer::test_read_message_list", "test/pystorm/serializers/test_msgpack_serializer.py::TestMsgpackSerializer::test_send_message" ]
[]
Apache License 2.0
482
sympy__sympy-10883
f7a8dbec25b04767a3a6996c11a03781184d45d7
2016-03-22 13:32:56
8bb5814067cfa0348fb8b708848f35dba2b55ff4
leosartaj: You should use a more informative PR title. Please read [this](https://github.com/sympy/sympy/wiki/Development-workflow#writing-pull-request-title-and-description).
diff --git a/sympy/functions/combinatorial/factorials.py b/sympy/functions/combinatorial/factorials.py index 48ea175555..fc11ad04fa 100644 --- a/sympy/functions/combinatorial/factorials.py +++ b/sympy/functions/combinatorial/factorials.py @@ -853,6 +853,9 @@ def _eval_rewrite_as_gamma(self, n, k): from sympy import gamma return gamma(n + 1)/(gamma(k + 1)*gamma(n - k + 1)) + def _eval_rewrite_as_tractable(self, n, k): + return self._eval_rewrite_as_gamma(n, k).rewrite('tractable') + def _eval_rewrite_as_FallingFactorial(self, n, k): if k.is_integer: return ff(n, k) / factorial(k)
Make limit work with binomial limit can work with binomial by converting to factorial first. ``` In [214]: limit(16**k/(k*binomial(2*k,k)**2), k, oo) --------------------------------------------------------------------------- NotImplementedError Traceback (most recent call last) /Users/aaronmeurer/Documents/Python/sympy/sympy/sympy/series/limits.py in doit(self, **hints) 171 try: --> 172 r = gruntz(e, z, z0, dir) 173 if r is S.NaN: /Users/aaronmeurer/Documents/Python/sympy/sympy/sympy/series/gruntz.py in gruntz(e, z, z0, dir) 640 if z0 == oo: --> 641 r = limitinf(e, z) 642 elif z0 == -oo: /Users/aaronmeurer/Documents/Python/sympy/sympy/sympy/series/gruntz.py in limitinf(e, x) 423 x = p --> 424 c0, e0 = mrv_leadterm(e, x) 425 sig = sign(e0, x) /Users/aaronmeurer/Documents/Python/sympy/sympy/sympy/series/gruntz.py in mrv_leadterm(e, x) 479 if Omega == SubsSet(): --> 480 Omega, exps = mrv(e, x) 481 if not Omega: /Users/aaronmeurer/Documents/Python/sympy/sympy/sympy/series/gruntz.py in mrv(e, x) 258 s1, e1 = mrv(a, x) --> 259 s2, e2 = mrv(b, x) 260 return mrv_max1(s1, s2, e.func(i, e1, e2), x) /Users/aaronmeurer/Documents/Python/sympy/sympy/sympy/series/gruntz.py in mrv(e, x) 257 a, b = d.as_two_terms() --> 258 s1, e1 = mrv(a, x) 259 s2, e2 = mrv(b, x) /Users/aaronmeurer/Documents/Python/sympy/sympy/sympy/series/gruntz.py in mrv(e, x) 265 else: --> 266 s, expr = mrv(b, x) 267 return s, expr**e /Users/aaronmeurer/Documents/Python/sympy/sympy/sympy/series/gruntz.py in mrv(e, x) 294 # e.g. something like BesselJ(x, x) --> 295 raise NotImplementedError("MRV set computation for functions in" 296 " several variables not implemented.") NotImplementedError: MRV set computation for functions in several variables not implemented. During handling of the above exception, another exception occurred: NotImplementedError Traceback (most recent call last) <ipython-input-214-830bdb8908f4> in <module>() ----> 1 limit(16**k/(k*binomial(2*k,k)**2), k, oo) /Users/aaronmeurer/Documents/Python/sympy/sympy/sympy/series/limits.py in limit(e, z, z0, dir) 43 """ 44 ---> 45 return Limit(e, z, z0, dir).doit(deep=False) 46 47 /Users/aaronmeurer/Documents/Python/sympy/sympy/sympy/series/limits.py in doit(self, **hints) 183 r = limit_seq(e, z, trials) 184 if r is None: --> 185 raise NotImplementedError() 186 else: 187 raise NotImplementedError() NotImplementedError: In [215]: limit(16**k/(k*binomial(2*k,k)**2).rewrite(factorial), k, oo) Out[215]: π ```
sympy/sympy
diff --git a/sympy/series/tests/test_limits.py b/sympy/series/tests/test_limits.py index 174814b450..09f972fcc2 100644 --- a/sympy/series/tests/test_limits.py +++ b/sympy/series/tests/test_limits.py @@ -456,6 +456,11 @@ def test_issue_8730(): assert limit(subfactorial(x), x, oo) == oo +def test_issue_10801(): + # make sure limits work with binomial + assert limit(16**k / (k * binomial(2*k, k)**2), k, oo) == pi + + def test_issue_9205(): x, y, a = symbols('x, y, a') assert Limit(x, x, a).free_symbols == {a}
{ "commit_name": "head_commit", "failed_lite_validators": [], "has_test_patch": true, "is_lite": true, "llm_score": { "difficulty_score": 1, "issue_text_score": 1, "test_score": 2 }, "num_modified_files": 1 }
1.0
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "mpmath>=0.19", "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.7", "reqs_path": [ "requirements/base.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
certifi @ file:///croot/certifi_1671487769961/work/certifi exceptiongroup==1.2.2 importlib-metadata==6.7.0 iniconfig==2.0.0 mpmath==1.3.0 packaging==24.0 pluggy==1.2.0 pytest==7.4.4 -e git+https://github.com/sympy/sympy.git@f7a8dbec25b04767a3a6996c11a03781184d45d7#egg=sympy tomli==2.0.1 typing_extensions==4.7.1 zipp==3.15.0
name: sympy channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2022.12.7=py37h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=22.3.1=py37h06a4308_0 - python=3.7.16=h7a1cb2a_0 - readline=8.2=h5eee18b_0 - setuptools=65.6.3=py37h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.38.4=py37h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - exceptiongroup==1.2.2 - importlib-metadata==6.7.0 - iniconfig==2.0.0 - mpmath==1.3.0 - packaging==24.0 - pluggy==1.2.0 - pytest==7.4.4 - tomli==2.0.1 - typing-extensions==4.7.1 - zipp==3.15.0 prefix: /opt/conda/envs/sympy
[ "sympy/series/tests/test_limits.py::test_issue_10801" ]
[ "sympy/series/tests/test_limits.py::test_exponential2", "sympy/series/tests/test_limits.py::test_doit2", "sympy/series/tests/test_limits.py::test_order_oo" ]
[ "sympy/series/tests/test_limits.py::test_basic1", "sympy/series/tests/test_limits.py::test_basic2", "sympy/series/tests/test_limits.py::test_basic3", "sympy/series/tests/test_limits.py::test_basic4", "sympy/series/tests/test_limits.py::test_basic5", "sympy/series/tests/test_limits.py::test_issue_3885", "sympy/series/tests/test_limits.py::test_Limit", "sympy/series/tests/test_limits.py::test_floor", "sympy/series/tests/test_limits.py::test_floor_requires_robust_assumptions", "sympy/series/tests/test_limits.py::test_ceiling", "sympy/series/tests/test_limits.py::test_ceiling_requires_robust_assumptions", "sympy/series/tests/test_limits.py::test_atan", "sympy/series/tests/test_limits.py::test_abs", "sympy/series/tests/test_limits.py::test_heuristic", "sympy/series/tests/test_limits.py::test_issue_3871", "sympy/series/tests/test_limits.py::test_exponential", "sympy/series/tests/test_limits.py::test_doit", "sympy/series/tests/test_limits.py::test_AccumBounds", "sympy/series/tests/test_limits.py::test_issue_3792", "sympy/series/tests/test_limits.py::test_issue_4090", "sympy/series/tests/test_limits.py::test_issue_4547", "sympy/series/tests/test_limits.py::test_issue_5164", "sympy/series/tests/test_limits.py::test_issue_5183", "sympy/series/tests/test_limits.py::test_issue_5184", "sympy/series/tests/test_limits.py::test_issue_5229", "sympy/series/tests/test_limits.py::test_issue_4546", "sympy/series/tests/test_limits.py::test_issue_3934", "sympy/series/tests/test_limits.py::test_calculate_series", "sympy/series/tests/test_limits.py::test_issue_5955", "sympy/series/tests/test_limits.py::test_newissue", "sympy/series/tests/test_limits.py::test_extended_real_line", "sympy/series/tests/test_limits.py::test_issue_5436", "sympy/series/tests/test_limits.py::test_Limit_dir", "sympy/series/tests/test_limits.py::test_polynomial", "sympy/series/tests/test_limits.py::test_rational", "sympy/series/tests/test_limits.py::test_issue_5740", "sympy/series/tests/test_limits.py::test_issue_6366", "sympy/series/tests/test_limits.py::test_factorial", "sympy/series/tests/test_limits.py::test_issue_6560", "sympy/series/tests/test_limits.py::test_issue_5172", "sympy/series/tests/test_limits.py::test_issue_7088", "sympy/series/tests/test_limits.py::test_issue_6364", "sympy/series/tests/test_limits.py::test_issue_4099", "sympy/series/tests/test_limits.py::test_issue_4503", "sympy/series/tests/test_limits.py::test_issue_8730", "sympy/series/tests/test_limits.py::test_issue_9205", "sympy/series/tests/test_limits.py::test_limit_seq" ]
[]
BSD
483
sympy__sympy-10889
f7a8dbec25b04767a3a6996c11a03781184d45d7
2016-03-23 03:34:56
8bb5814067cfa0348fb8b708848f35dba2b55ff4
diff --git a/sympy/printing/latex.py b/sympy/printing/latex.py index ba22308b1e..ddbc887714 100644 --- a/sympy/printing/latex.py +++ b/sympy/printing/latex.py @@ -322,9 +322,13 @@ def _print_Float(self, expr): return str_real def _print_Mul(self, expr): + include_parens = False if _coeff_isneg(expr): expr = -expr tex = "- " + if expr.is_Add: + tex += "(" + include_parens = True else: tex = "" @@ -405,6 +409,8 @@ def convert(expr): else: tex += r"\frac{%s}{%s}" % (snumer, sdenom) + if include_parens: + tex += ")" return tex def _print_Pow(self, expr):
latex bug for commutator output There is a latex bug in the output of the function sympy.physics.quantum.commutator.doit that gives incorrect result if there is overall negative sign involved. For example, the following code does not give the correct expression for the latex output. ```python from sympy.physics.quantum import Commutator, Operator import sympy A = Operator('A') B = Operator('B') comm = Commutator(B, A) print comm.doit() sympy.latex(comm.doit()) ```
sympy/sympy
diff --git a/sympy/printing/tests/test_latex.py b/sympy/printing/tests/test_latex.py index 05eac5e4ae..c663785e4c 100644 --- a/sympy/printing/tests/test_latex.py +++ b/sympy/printing/tests/test_latex.py @@ -27,6 +27,7 @@ from sympy.functions import DiracDelta, Heaviside, KroneckerDelta, LeviCivita from sympy.logic import Implies from sympy.logic.boolalg import And, Or, Xor +from sympy.physics.quantum import Commutator, Operator from sympy.core.trace import Tr from sympy.core.compatibility import range from sympy.combinatorics.permutations import Cycle, Permutation @@ -601,6 +602,12 @@ def test_latex_AccumuBounds(): def test_latex_emptyset(): assert latex(S.EmptySet) == r"\emptyset" +def test_latex_commutator(): + A = Operator('A') + B = Operator('B') + comm = Commutator(B, A) + assert latex(comm.doit()) == r"- (A B - B A)" + def test_latex_union(): assert latex(Union(Interval(0, 1), Interval(2, 3))) == \
{ "commit_name": "head_commit", "failed_lite_validators": [], "has_test_patch": true, "is_lite": true, "llm_score": { "difficulty_score": 1, "issue_text_score": 1, "test_score": 2 }, "num_modified_files": 1 }
1.0
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "mpmath>=0.19", "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.7", "reqs_path": [ "requirements/base.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
certifi @ file:///croot/certifi_1671487769961/work/certifi exceptiongroup==1.2.2 importlib-metadata==6.7.0 iniconfig==2.0.0 mpmath==1.3.0 packaging==24.0 pluggy==1.2.0 pytest==7.4.4 -e git+https://github.com/sympy/sympy.git@f7a8dbec25b04767a3a6996c11a03781184d45d7#egg=sympy tomli==2.0.1 typing_extensions==4.7.1 zipp==3.15.0
name: sympy channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2022.12.7=py37h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=22.3.1=py37h06a4308_0 - python=3.7.16=h7a1cb2a_0 - readline=8.2=h5eee18b_0 - setuptools=65.6.3=py37h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.38.4=py37h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - exceptiongroup==1.2.2 - importlib-metadata==6.7.0 - iniconfig==2.0.0 - mpmath==1.3.0 - packaging==24.0 - pluggy==1.2.0 - pytest==7.4.4 - tomli==2.0.1 - typing-extensions==4.7.1 - zipp==3.15.0 prefix: /opt/conda/envs/sympy
[ "sympy/printing/tests/test_latex.py::test_latex_commutator" ]
[ "sympy/printing/tests/test_latex.py::test_latex_symbols_failing", "sympy/printing/tests/test_latex.py::test_builtin_without_args_mismatched_names" ]
[ "sympy/printing/tests/test_latex.py::test_printmethod", "sympy/printing/tests/test_latex.py::test_latex_basic", "sympy/printing/tests/test_latex.py::test_latex_builtins", "sympy/printing/tests/test_latex.py::test_latex_cycle", "sympy/printing/tests/test_latex.py::test_latex_permutation", "sympy/printing/tests/test_latex.py::test_latex_Float", "sympy/printing/tests/test_latex.py::test_latex_symbols", "sympy/printing/tests/test_latex.py::test_latex_functions", "sympy/printing/tests/test_latex.py::test_hyper_printing", "sympy/printing/tests/test_latex.py::test_latex_bessel", "sympy/printing/tests/test_latex.py::test_latex_fresnel", "sympy/printing/tests/test_latex.py::test_latex_brackets", "sympy/printing/tests/test_latex.py::test_latex_indexed", "sympy/printing/tests/test_latex.py::test_latex_derivatives", "sympy/printing/tests/test_latex.py::test_latex_subs", "sympy/printing/tests/test_latex.py::test_latex_integrals", "sympy/printing/tests/test_latex.py::test_latex_sets", "sympy/printing/tests/test_latex.py::test_latex_Range", "sympy/printing/tests/test_latex.py::test_latex_sequences", "sympy/printing/tests/test_latex.py::test_latex_FourierSeries", "sympy/printing/tests/test_latex.py::test_latex_FormalPowerSeries", "sympy/printing/tests/test_latex.py::test_latex_intervals", "sympy/printing/tests/test_latex.py::test_latex_AccumuBounds", "sympy/printing/tests/test_latex.py::test_latex_emptyset", "sympy/printing/tests/test_latex.py::test_latex_union", "sympy/printing/tests/test_latex.py::test_latex_symmetric_difference", "sympy/printing/tests/test_latex.py::test_latex_Complement", "sympy/printing/tests/test_latex.py::test_latex_Complexes", "sympy/printing/tests/test_latex.py::test_latex_productset", "sympy/printing/tests/test_latex.py::test_latex_Naturals", "sympy/printing/tests/test_latex.py::test_latex_Naturals0", "sympy/printing/tests/test_latex.py::test_latex_Integers", "sympy/printing/tests/test_latex.py::test_latex_ImageSet", "sympy/printing/tests/test_latex.py::test_latex_ConditionSet", "sympy/printing/tests/test_latex.py::test_latex_ComplexRegion", "sympy/printing/tests/test_latex.py::test_latex_Contains", "sympy/printing/tests/test_latex.py::test_latex_sum", "sympy/printing/tests/test_latex.py::test_latex_product", "sympy/printing/tests/test_latex.py::test_latex_limits", "sympy/printing/tests/test_latex.py::test_issue_3568", "sympy/printing/tests/test_latex.py::test_latex", "sympy/printing/tests/test_latex.py::test_latex_dict", "sympy/printing/tests/test_latex.py::test_latex_list", "sympy/printing/tests/test_latex.py::test_latex_rational", "sympy/printing/tests/test_latex.py::test_latex_inverse", "sympy/printing/tests/test_latex.py::test_latex_DiracDelta", "sympy/printing/tests/test_latex.py::test_latex_Heaviside", "sympy/printing/tests/test_latex.py::test_latex_KroneckerDelta", "sympy/printing/tests/test_latex.py::test_latex_LeviCivita", "sympy/printing/tests/test_latex.py::test_mode", "sympy/printing/tests/test_latex.py::test_latex_Piecewise", "sympy/printing/tests/test_latex.py::test_latex_Matrix", "sympy/printing/tests/test_latex.py::test_latex_matrix_with_functions", "sympy/printing/tests/test_latex.py::test_latex_mul_symbol", "sympy/printing/tests/test_latex.py::test_latex_issue_4381", "sympy/printing/tests/test_latex.py::test_latex_issue_4576", "sympy/printing/tests/test_latex.py::test_latex_pow_fraction", "sympy/printing/tests/test_latex.py::test_noncommutative", "sympy/printing/tests/test_latex.py::test_latex_order", "sympy/printing/tests/test_latex.py::test_latex_Lambda", "sympy/printing/tests/test_latex.py::test_latex_PolyElement", "sympy/printing/tests/test_latex.py::test_latex_FracElement", "sympy/printing/tests/test_latex.py::test_latex_Poly", "sympy/printing/tests/test_latex.py::test_latex_ComplexRootOf", "sympy/printing/tests/test_latex.py::test_latex_RootSum", "sympy/printing/tests/test_latex.py::test_settings", "sympy/printing/tests/test_latex.py::test_latex_numbers", "sympy/printing/tests/test_latex.py::test_lamda", "sympy/printing/tests/test_latex.py::test_custom_symbol_names", "sympy/printing/tests/test_latex.py::test_matAdd", "sympy/printing/tests/test_latex.py::test_matMul", "sympy/printing/tests/test_latex.py::test_latex_MatrixSlice", "sympy/printing/tests/test_latex.py::test_latex_RandomDomain", "sympy/printing/tests/test_latex.py::test_PrettyPoly", "sympy/printing/tests/test_latex.py::test_integral_transforms", "sympy/printing/tests/test_latex.py::test_PolynomialRingBase", "sympy/printing/tests/test_latex.py::test_categories", "sympy/printing/tests/test_latex.py::test_Modules", "sympy/printing/tests/test_latex.py::test_QuotientRing", "sympy/printing/tests/test_latex.py::test_Tr", "sympy/printing/tests/test_latex.py::test_Adjoint", "sympy/printing/tests/test_latex.py::test_Hadamard", "sympy/printing/tests/test_latex.py::test_ZeroMatrix", "sympy/printing/tests/test_latex.py::test_boolean_args_order", "sympy/printing/tests/test_latex.py::test_imaginary", "sympy/printing/tests/test_latex.py::test_builtins_without_args", "sympy/printing/tests/test_latex.py::test_latex_greek_functions", "sympy/printing/tests/test_latex.py::test_translate", "sympy/printing/tests/test_latex.py::test_other_symbols", "sympy/printing/tests/test_latex.py::test_modifiers", "sympy/printing/tests/test_latex.py::test_greek_symbols", "sympy/printing/tests/test_latex.py::test_builtin_no_args", "sympy/printing/tests/test_latex.py::test_issue_6853", "sympy/printing/tests/test_latex.py::test_Mul", "sympy/printing/tests/test_latex.py::test_Pow", "sympy/printing/tests/test_latex.py::test_issue_7180", "sympy/printing/tests/test_latex.py::test_issue_8409", "sympy/printing/tests/test_latex.py::test_issue_8470", "sympy/printing/tests/test_latex.py::test_issue_7117", "sympy/printing/tests/test_latex.py::test_issue_2934", "sympy/printing/tests/test_latex.py::test_issue_10489" ]
[]
BSD
484
dpkp__kafka-python-611
d81963a919fa8161c94b5bef5e6de0697b91c4a6
2016-03-23 17:29:58
810f08b7996a15e65cdd8af6c1a7167c28f94646
coveralls: [![Coverage Status](https://coveralls.io/builds/5522917/badge)](https://coveralls.io/builds/5522917) Changes Unknown when pulling **bb2548705a3be822be9e17ea6eb824061fc9fb8f on sock_send_bytes** into ** on master**. coveralls: [![Coverage Status](https://coveralls.io/builds/5664292/badge)](https://coveralls.io/builds/5664292) Changes Unknown when pulling **7af174fe0a6bcc5962a8c8008d66e0b3b05e5fc2 on sock_send_bytes** into ** on master**.
diff --git a/kafka/conn.py b/kafka/conn.py index 2b82b6d..ffc839e 100644 --- a/kafka/conn.py +++ b/kafka/conn.py @@ -188,10 +188,12 @@ class BrokerConnection(object): # and send bytes asynchronously. For now, just block # sending each request payload self._sock.setblocking(True) - sent_bytes = self._sock.send(size) - assert sent_bytes == len(size) - sent_bytes = self._sock.send(message) - assert sent_bytes == len(message) + for data in (size, message): + total_sent = 0 + while total_sent < len(data): + sent_bytes = self._sock.send(data[total_sent:]) + total_sent += sent_bytes + assert total_sent == len(data) self._sock.setblocking(False) except (AssertionError, ConnectionError) as e: log.exception("Error sending %s to %s", request, self) diff --git a/kafka/future.py b/kafka/future.py index 06b8c3a..c7e0b14 100644 --- a/kafka/future.py +++ b/kafka/future.py @@ -15,10 +15,10 @@ class Future(object): self._errbacks = [] def succeeded(self): - return self.is_done and not self.exception + return self.is_done and not bool(self.exception) def failed(self): - return self.is_done and self.exception + return self.is_done and bool(self.exception) def retriable(self): try:
kafka.common.ConnectionError on big messages + gevent i'm getting kafka.common.ConnectionError trying to send big message. Code below ```python from gevent.monkey import patch_all; patch_all() from kafka import KafkaProducer producer = KafkaProducer(bootstrap_servers=xxxxxxxx, buffer_memory=10 * 1024 * 1024, max_request_size=10 * 1024 * 1024,) producer.send('test', 'a' * 1024 * 1024 * 3, ).get(timeout=60) producer.flush() ``` causing this ``` 2016-03-23 11:50:58,147 - kafka.conn - ERROR - Error sending ProduceRequest(required_acks=1, timeout=30000, topics=[(topic='test2', partitions=[(1, <_io.BytesIO object at 0x7fe64edc2950>)])]) to <BrokerConnection host=127.0.0.1 port=9093> Traceback (most recent call last): File "/usr/local/lib/python2.7/dist-packages/kafka/conn.py", line 187, in send assert sent_bytes == len(message) AssertionError 2016-03-23 11:50:58,150 - kafka.producer.sender - DEBUG - Error sending produce request to node 2: 2016-03-23 11:50:58,150 - kafka.producer.record_accumulator - DEBUG - Produced messages to topic-partition TopicPartition(topic='test2', partition=1) with base offset -1 and error . 2016-03-23 11:50:58,150 - kafka.client - DEBUG - Initializing connection to node 2 for metadata request Traceback (most recent call last): File "test_producer.py", line 15, in <module> producer.send('test2', 'a' * 1024 * 1024 * 3, ).get(timeout=60) File "/usr/local/lib/python2.7/dist-packages/kafka/producer/future.py", line 50, in get raise self.exception # pylint: disable-msg=raising-bad-type kafka.common.ConnectionError ``` this works well if i comment out **patch_all()** line Fixing kafka.conn.BrokerConnection.send() method solved it, but i'm not sure about side effects. ```python def send() ..... ..... size = Int32.encode(len(message)) try: self._sock.setblocking(True) sent_bytes = self._sock.send(size) assert sent_bytes == len(size) total_sent = 0 while total_sent < len(message): # sending in loop sent_bytes = self._sock.send(message[total_sent:]) assert sent_bytes total_sent += sent_bytes self._sock.setblocking(False) except (AssertionError, ConnectionError) as e: log.exception("Error sending %s to %s", request, self) .... ``` Any chances to have similar fix in master branch?
dpkp/kafka-python
diff --git a/test/test_conn.py b/test/test_conn.py index d394f74..5432ebd 100644 --- a/test/test_conn.py +++ b/test/test_conn.py @@ -2,12 +2,15 @@ from __future__ import absolute_import from errno import EALREADY, EINPROGRESS, EISCONN, ECONNRESET -import socket import time import pytest from kafka.conn import BrokerConnection, ConnectionStates +from kafka.protocol.api import RequestHeader +from kafka.protocol.metadata import MetadataRequest + +import kafka.common as Errors @pytest.fixture @@ -20,6 +23,7 @@ def socket(mocker): @pytest.fixture def conn(socket): + from socket import AF_INET conn = BrokerConnection('localhost', 9092, socket.AF_INET) return conn @@ -61,22 +65,111 @@ def test_connect_timeout(socket, conn): def test_blacked_out(conn): - assert not conn.blacked_out() + assert conn.blacked_out() is False conn.last_attempt = time.time() - assert conn.blacked_out() + assert conn.blacked_out() is True def test_connected(conn): - assert not conn.connected() + assert conn.connected() is False conn.state = ConnectionStates.CONNECTED - assert conn.connected() + assert conn.connected() is True def test_connecting(conn): - assert not conn.connecting() + assert conn.connecting() is False + conn.state = ConnectionStates.CONNECTING + assert conn.connecting() is True + conn.state = ConnectionStates.CONNECTED + assert conn.connecting() is False + + +def test_send_disconnected(conn): + conn.state = ConnectionStates.DISCONNECTED + f = conn.send('foobar') + assert f.failed() is True + assert isinstance(f.exception, Errors.ConnectionError) + + +def test_send_connecting(conn): conn.state = ConnectionStates.CONNECTING - assert conn.connecting() + f = conn.send('foobar') + assert f.failed() is True + assert isinstance(f.exception, Errors.NodeNotReadyError) + + +def test_send_max_ifr(conn): conn.state = ConnectionStates.CONNECTED - assert not conn.connecting() + max_ifrs = conn.config['max_in_flight_requests_per_connection'] + for _ in range(max_ifrs): + conn.in_flight_requests.append('foo') + f = conn.send('foobar') + assert f.failed() is True + assert isinstance(f.exception, Errors.TooManyInFlightRequests) + + +def test_send_no_response(socket, conn): + conn.connect() + assert conn.state is ConnectionStates.CONNECTED + req = MetadataRequest([]) + header = RequestHeader(req, client_id=conn.config['client_id']) + payload_bytes = len(header.encode()) + len(req.encode()) + third = payload_bytes // 3 + remainder = payload_bytes % 3 + socket.send.side_effect = [4, third, third, third, remainder] + + assert len(conn.in_flight_requests) == 0 + f = conn.send(req, expect_response=False) + assert f.succeeded() is True + assert f.value is None + assert len(conn.in_flight_requests) == 0 + + +def test_send_response(socket, conn): + conn.connect() + assert conn.state is ConnectionStates.CONNECTED + req = MetadataRequest([]) + header = RequestHeader(req, client_id=conn.config['client_id']) + payload_bytes = len(header.encode()) + len(req.encode()) + third = payload_bytes // 3 + remainder = payload_bytes % 3 + socket.send.side_effect = [4, third, third, third, remainder] + + assert len(conn.in_flight_requests) == 0 + f = conn.send(req) + assert f.is_done is False + assert len(conn.in_flight_requests) == 1 + + +def test_send_error(socket, conn): + conn.connect() + assert conn.state is ConnectionStates.CONNECTED + req = MetadataRequest([]) + header = RequestHeader(req, client_id=conn.config['client_id']) + try: + error = ConnectionError + except NameError: + from socket import error + socket.send.side_effect = error + f = conn.send(req) + assert f.failed() is True + assert isinstance(f.exception, Errors.ConnectionError) + assert socket.close.call_count == 1 + assert conn.state is ConnectionStates.DISCONNECTED + + +def test_can_send_more(conn): + assert conn.can_send_more() is True + max_ifrs = conn.config['max_in_flight_requests_per_connection'] + for _ in range(max_ifrs): + assert conn.can_send_more() is True + conn.in_flight_requests.append('foo') + assert conn.can_send_more() is False + + +def test_recv(socket, conn): + pass # TODO + -# TODO: test_send, test_recv, test_can_send_more, test_close +def test_close(conn): + pass # TODO
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 2, "test_score": 2 }, "num_modified_files": 2 }
1.0
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "pytest-cov", "pytest-catchlog", "pytest-pylint", "pytest-sugar", "pytest-mock", "mock", "python-snappy", "lz4tools", "xxhash" ], "pre_install": [ "apt-get update", "apt-get install -y libsnappy-dev" ], "python": "3.5", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
astroid==2.11.7 attrs==22.2.0 certifi==2021.5.30 coverage==6.2 cramjam==2.5.0 dill==0.3.4 importlib-metadata==4.8.3 iniconfig==1.1.1 isort==5.10.1 -e git+https://github.com/dpkp/kafka-python.git@d81963a919fa8161c94b5bef5e6de0697b91c4a6#egg=kafka_python lazy-object-proxy==1.7.1 lz4tools==1.3.1.2 mccabe==0.7.0 mock==5.2.0 packaging==21.3 platformdirs==2.4.0 pluggy==1.0.0 py==1.11.0 pylint==2.13.9 pyparsing==3.1.4 pytest==7.0.1 pytest-catchlog==1.2.2 pytest-cov==4.0.0 pytest-mock==3.6.1 pytest-pylint==0.18.0 pytest-sugar==0.9.6 python-snappy==0.7.3 six==1.17.0 termcolor==1.1.0 toml==0.10.2 tomli==1.2.3 typed-ast==1.5.5 typing_extensions==4.1.1 wrapt==1.16.0 xxhash==3.2.0 zipp==3.6.0
name: kafka-python channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - astroid==2.11.7 - attrs==22.2.0 - coverage==6.2 - cramjam==2.5.0 - dill==0.3.4 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - isort==5.10.1 - lazy-object-proxy==1.7.1 - lz4tools==1.3.1.2 - mccabe==0.7.0 - mock==5.2.0 - packaging==21.3 - platformdirs==2.4.0 - pluggy==1.0.0 - py==1.11.0 - pylint==2.13.9 - pyparsing==3.1.4 - pytest==7.0.1 - pytest-catchlog==1.2.2 - pytest-cov==4.0.0 - pytest-mock==3.6.1 - pytest-pylint==0.18.0 - pytest-sugar==0.9.6 - python-snappy==0.7.3 - six==1.17.0 - termcolor==1.1.0 - toml==0.10.2 - tomli==1.2.3 - typed-ast==1.5.5 - typing-extensions==4.1.1 - wrapt==1.16.0 - xxhash==3.2.0 - zipp==3.6.0 prefix: /opt/conda/envs/kafka-python
[ "test/test_conn.py::test_send_disconnected", "test/test_conn.py::test_send_connecting", "test/test_conn.py::test_send_max_ifr", "test/test_conn.py::test_send_no_response", "test/test_conn.py::test_send_response", "test/test_conn.py::test_send_error" ]
[]
[ "test/test_conn.py::test_connect[states0]", "test/test_conn.py::test_connect[states1]", "test/test_conn.py::test_connect[states2]", "test/test_conn.py::test_connect[states3]", "test/test_conn.py::test_connect[states4]", "test/test_conn.py::test_connect_timeout", "test/test_conn.py::test_blacked_out", "test/test_conn.py::test_connected", "test/test_conn.py::test_connecting", "test/test_conn.py::test_can_send_more", "test/test_conn.py::test_recv", "test/test_conn.py::test_close" ]
[]
Apache License 2.0
485
sympy__sympy-10895
6fa2cf207e942dbbca1cb22fc92f43febc50fec8
2016-03-24 04:48:30
8bb5814067cfa0348fb8b708848f35dba2b55ff4
diff --git a/sympy/physics/vector/functions.py b/sympy/physics/vector/functions.py index a5cdabfc25..ba5a963c32 100644 --- a/sympy/physics/vector/functions.py +++ b/sympy/physics/vector/functions.py @@ -517,12 +517,10 @@ def _process_vector_differential(vectdiff, condition, \ return (acc, vel, kwargs['position']) -def partial_velocity(vel_list, u_list, frame): - """Returns a list of partial velocities. - - For a list of velocity or angular velocity vectors the partial derivatives - with respect to the supplied generalized speeds are computed, in the - specified ReferenceFrame. +def partial_velocity(vel_vecs, gen_speeds, frame): + """Returns a list of partial velocities with respect to the provided + generalized speeds in the given reference frame for each of the supplied + velocity vectors. The output is a list of lists. The outer list has a number of elements equal to the number of supplied velocity vectors. The inner lists are, for @@ -532,12 +530,13 @@ def partial_velocity(vel_list, u_list, frame): Parameters ========== - vel_list : list - List of velocities of Point's and angular velocities of ReferenceFrame's - u_list : list - List of independent generalized speeds. + vel_vecs : iterable + An iterable of velocity vectors (angular or linear). + gen_speeds : iterable + An iterable of generalized speeds. frame : ReferenceFrame - The ReferenceFrame the partial derivatives are going to be taken in. + The reference frame that the partial derivatives are going to be taken + in. Examples ======== @@ -549,24 +548,27 @@ def partial_velocity(vel_list, u_list, frame): >>> N = ReferenceFrame('N') >>> P = Point('P') >>> P.set_vel(N, u * N.x) - >>> vel_list = [P.vel(N)] - >>> u_list = [u] - >>> partial_velocity(vel_list, u_list, N) + >>> vel_vecs = [P.vel(N)] + >>> gen_speeds = [u] + >>> partial_velocity(vel_vecs, gen_speeds, N) [[N.x]] """ - if not iterable(vel_list): - raise TypeError('Provide velocities in an iterable') - if not iterable(u_list): - raise TypeError('Provide speeds in an iterable') - list_of_pvlists = [] - for i in vel_list: - pvlist = [] - for j in u_list: - vel = i.diff(j, frame) - pvlist += [vel] - list_of_pvlists += [pvlist] - return list_of_pvlists + + if not iterable(vel_vecs): + raise TypeError('Velocity vectors must be contained in an iterable.') + + if not iterable(gen_speeds): + raise TypeError('Generalized speeds must be contained in an iterable') + + vec_partials = [] + for vec in vel_vecs: + partials = [] + for speed in gen_speeds: + partials.append(vec.diff(speed, frame, var_in_dcm=False)) + vec_partials.append(partials) + + return vec_partials def dynamicsymbols(names, level=0): diff --git a/sympy/physics/vector/vector.py b/sympy/physics/vector/vector.py index e7577df17b..4fbef9f85f 100644 --- a/sympy/physics/vector/vector.py +++ b/sympy/physics/vector/vector.py @@ -485,24 +485,30 @@ def outer(self, other): return self | other outer.__doc__ = __or__.__doc__ - def diff(self, wrt, otherframe): - """Takes the partial derivative, with respect to a value, in a frame. - - Returns a Vector. + def diff(self, var, frame, var_in_dcm=True): + """Returns the partial derivative of the vector with respect to a + variable in the provided reference frame. Parameters ========== - - wrt : Symbol + var : Symbol What the partial derivative is taken with respect to. - otherframe : ReferenceFrame - The ReferenceFrame that the partial derivative is taken in. + frame : ReferenceFrame + The reference frame that the partial derivative is taken in. + var_in_dcm : boolean + If true, the differentiation algorithm assumes that the variable + may be present in any of the direction cosine matrices that relate + the frame to the frames of any component of the vector. But if it + is known that the variable is not present in the direction cosine + matrices, false can be set to skip full reexpression in the desired + frame. Examples ======== - >>> from sympy.physics.vector import ReferenceFrame, Vector, dynamicsymbols >>> from sympy import Symbol + >>> from sympy.physics.vector import dynamicsymbols, ReferenceFrame + >>> from sympy.physics.vector import Vector >>> Vector.simp = True >>> t = Symbol('t') >>> q1 = dynamicsymbols('q1') @@ -510,24 +516,39 @@ def diff(self, wrt, otherframe): >>> A = N.orientnew('A', 'Axis', [q1, N.y]) >>> A.x.diff(t, N) - q1'*A.z + >>> B = ReferenceFrame('B') + >>> u1, u2 = dynamicsymbols('u1, u2') + >>> v = u1 * A.x + u2 * B.y + >>> v.diff(u2, N, var_in_dcm=False) + B.y """ from sympy.physics.vector.frame import _check_frame - wrt = sympify(wrt) - _check_frame(otherframe) - outvec = Vector(0) - for i, v in enumerate(self.args): - if v[1] == otherframe: - outvec += Vector([(v[0].diff(wrt), otherframe)]) + + var = sympify(var) + _check_frame(frame) + + partial = Vector(0) + + for vector_component in self.args: + measure_number = vector_component[0] + component_frame = vector_component[1] + if component_frame == frame: + partial += Vector([(measure_number.diff(var), frame)]) else: - if otherframe.dcm(v[1]).diff(wrt) == zeros(3, 3): - d = v[0].diff(wrt) - outvec += Vector([(d, v[1])]) - else: - d = (Vector([v]).express(otherframe)).args[0][0].diff(wrt) - outvec += Vector([(d, otherframe)]).express(v[1]) - return outvec + # If the direction cosine matrix relating the component frame + # with the derivative frame does not contain the variable. + if not var_in_dcm or (frame.dcm(component_frame).diff(var) == + zeros(3, 3)): + partial += Vector([(measure_number.diff(var), + component_frame)]) + else: # else express in the frame + reexp_vec_comp = Vector([vector_component]).express(frame) + deriv = reexp_vec_comp.args[0][0].diff(var) + partial += Vector([(deriv, frame)]).express(component_frame) + + return partial def express(self, otherframe, variables=False): """
partial_velocity should not require a rexpression in the provided reference frame I think that we may have a fundamental design flaw with some things about physics.vector. To illustrate, let's say that we have defined a vector that describes the linear velocity of point P in the reference frame N in terms of the generalized speeds: u1, u2, and u3. ```python N_v_P = u1 * A.x + u2 * A.y + u3 * A.z ``` Note that we have this velocity vector is expressed in a frame A which has an arbitrary relationship with the frame N. If point P happens to be an essential point to form the equations of motion with Kane's method, then we will need to take the partial velocity of the vector with respect to the generalized speeds in an intertial reference frame. If N happens to be that inertial reference frame, then we should be able to do: ```python N_v_P.diff(u1, N) ``` and this should return: ```python A.x ``` The partial velocity of N_v_P in N with respect to u1 is simply A.x. Of course, if A is oriented with respect to N in some way, then A.x can be expressed in N's unit vectors, but it isn't necessary to do that to compute the partial velocity. The following code shows that this does not work: ``` moorepants@garuda:pydy(error-messages)$ ipython Python 3.5.1 |Continuum Analytics, Inc.| (default, Dec 7 2015, 11:16:01) Type "copyright", "credits" or "license" for more information. IPython 4.1.2 -- An enhanced Interactive Python. ? -> Introduction and overview of IPython's features. %quickref -> Quick reference. help -> Python's own help system. object? -> Details about 'object', use 'object??' for extra details. In [1]: import sympy.physics.mechanics as me In [2]: u1, u2, u3 = me.dynamicsymbols('u1, u2, u3') In [3]: N = me.ReferenceFrame('N') In [4]: A = me.ReferenceFrame('A') In [5]: N_v_P = u1 * A.x + u2 * A.y + u3 * A.z In [7]: N_v_P.diff(u1, N) --------------------------------------------------------------------------- ValueError Traceback (most recent call last) <ipython-input-7-7a11f17c841e> in <module>() ----> 1 N_v_P.diff(u1, N) /home/moorepants/miniconda3/lib/python3.5/site-packages/sympy/physics/vector/vector.py in diff(self, wrt, otherframe) 519 outvec += Vector([(v[0].diff(wrt), otherframe)]) 520 else: --> 521 if otherframe.dcm(v[1]).diff(wrt) == zeros(3, 3): 522 d = v[0].diff(wrt) 523 outvec += Vector([(d, v[1])]) /home/moorepants/miniconda3/lib/python3.5/site-packages/sympy/physics/vector/frame.py in dcm(self, otherframe) 405 if otherframe in self._dcm_cache: 406 return self._dcm_cache[otherframe] --> 407 flist = self._dict_list(otherframe, 0) 408 outdcm = eye(3) 409 for i in range(len(flist) - 1): /home/moorepants/miniconda3/lib/python3.5/site-packages/sympy/physics/vector/frame.py in _dict_list(self, other, num) 250 return outlist[0] 251 raise ValueError('No Connecting Path found between ' + self.name + --> 252 ' and ' + other.name) 253 254 def _w_diff_dcm(self, otherframe): ValueError: No Connecting Path found between N and A In [9]: me.partial_velocity([N_v_P], [u1, u2, u3], N) --------------------------------------------------------------------------- ValueError Traceback (most recent call last) <ipython-input-9-56e053551919> in <module>() ----> 1 me.partial_velocity([N_v_P], [u1, u2, u3], N) /home/moorepants/miniconda3/lib/python3.5/site-packages/sympy/physics/vector/functions.py in partial_velocity(vel_list, u_list, frame) 564 pvlist = [] 565 for j in u_list: --> 566 vel = i.diff(j, frame) 567 pvlist += [vel] 568 list_of_pvlists += [pvlist] /home/moorepants/miniconda3/lib/python3.5/site-packages/sympy/physics/vector/vector.py in diff(self, wrt, otherframe) 519 outvec += Vector([(v[0].diff(wrt), otherframe)]) 520 else: --> 521 if otherframe.dcm(v[1]).diff(wrt) == zeros(3, 3): 522 d = v[0].diff(wrt) 523 outvec += Vector([(d, v[1])]) /home/moorepants/miniconda3/lib/python3.5/site-packages/sympy/physics/vector/frame.py in dcm(self, otherframe) 405 if otherframe in self._dcm_cache: 406 return self._dcm_cache[otherframe] --> 407 flist = self._dict_list(otherframe, 0) 408 outdcm = eye(3) 409 for i in range(len(flist) - 1): /home/moorepants/miniconda3/lib/python3.5/site-packages/sympy/physics/vector/frame.py in _dict_list(self, other, num) 250 return outlist[0] 251 raise ValueError('No Connecting Path found between ' + self.name + --> 252 ' and ' + other.name) 253 254 def _w_diff_dcm(self, otherframe): ValueError: No Connecting Path found between N and A ``` The problem is that the Vector object has no awareness of that it is a velocity of some point in some reference frame. Because of this, it doesn't really make sense to ask it to differentiate itself in the reference frame N. But our vector differentiation in frames does work. It works because we *require* that there be a per-specified orientation between reference frames A and N. If this orientation is specified the correct answer is returned: ``` In [10]: q1, q2, q3 = me.dynamicsymbols('q1, q2, q3') In [11]: A.orient(N, 'Body', (q1, q2, q3), 'XYZ') In [12]: N_v_P.diff(u1, N) Out[12]: A.x ``` This requires re-expression of the vector in the N frame so that the measure numbers in the N frame of N_v_P can be differentiated in that frame. But we know that re-expression is not necessary to get the correct result. This comes up when you have ignorable coordinates. For example, if you want to study a satellite that is moving in inertial reference frame N, you can often ignore the 6 coordinates associated with the primary degrees of freedom up front. Currently, our software does not allow us to easily do this because of the above mentioned limitation. Some thoughts: - Maybe we need to define differentiation of the position/orientatoins, velocity, and accelerations at only on the Point and RefernceFrame objects because these have the full information about a given velocity vector, i.e. those objects know what frame the vector is with respect to. The vector by itself is not aware of what frame it is wrt. - Maybe we need a "null" orientation option that lets you connect frames A and N but not specify the generalized coordinates and direction cosine matrix that relates the two. Then the vector.diff() function can deal with this properly. - The fact that Vectors can differentiate themselves without know what frame they are defined wrt, seems mathematically incorrect. - It is important to note that re-expression of a vector in another frame is different that knowing what frame a velocity vector is associated with. - Do we need a VelocityVector object that knows more info like the frame the velocity is wrt or the associated points? - I think Autolev properly tracks this with their syntax, for example in `N_v_P> = u1 * A1> + u2 * A2> + u3 * A3>` both the left and right hand sides have semantic meaning. This defines a velocity vector in N of P with that vector expression. All of that info must be stored in a data structure behind the scenes. They must have different types of vectors. @sympy/mechanics What do you all think about this? I feel like we have an odd hole here that may take more than a hack to properly fix. tl;dr Differentiation of vectors wrt to generalized speeds should not require that orientations be set between the frames that are associated with the unit vectors in the velocity vector expression and the frame that the velocity is defined it.
sympy/sympy
diff --git a/sympy/physics/vector/tests/test_functions.py b/sympy/physics/vector/tests/test_functions.py index 9a30180ac0..f6e561b5c0 100644 --- a/sympy/physics/vector/tests/test_functions.py +++ b/sympy/physics/vector/tests/test_functions.py @@ -1,11 +1,10 @@ from sympy import S, Integral, sin, cos, pi, sqrt, symbols -from sympy.physics.vector import (Dyadic, Point, ReferenceFrame, \ - Vector) -from sympy.physics.vector import (cross, dot, express, \ - time_derivative, kinematic_equations, \ - outer, partial_velocity, \ - get_motion_params) -from sympy.physics.vector.functions import dynamicsymbols +from sympy.physics.vector import Dyadic, Point, ReferenceFrame, Vector +from sympy.physics.vector.functions import (cross, dot, express, + time_derivative, + kinematic_equations, outer, + partial_velocity, + get_motion_params, dynamicsymbols) from sympy.utilities.pytest import raises Vector.simp = True @@ -465,3 +464,10 @@ def test_partial_velocity(): [[- r*L.y, r*L.x, 0, L.x, cos(q2)*L.y - sin(q2)*L.z], [0, 0, 0, L.x, cos(q2)*L.y - sin(q2)*L.z], [L.x, L.y, L.z, 0, 0]]) + + # Make sure that partial velocities can be computed regardless if the + # orientation between frames is defined or not. + A = ReferenceFrame('A') + B = ReferenceFrame('B') + v = u4 * A.x + u5 * B.y + assert partial_velocity((v, ), (u4, u5), A) == [[A.x, B.y]] diff --git a/sympy/physics/vector/tests/test_vector.py b/sympy/physics/vector/tests/test_vector.py index 602872d9ad..5079c8fac4 100644 --- a/sympy/physics/vector/tests/test_vector.py +++ b/sympy/physics/vector/tests/test_vector.py @@ -1,7 +1,7 @@ from sympy import symbols, pi, sin, cos, ImmutableMatrix as Matrix -from sympy.physics.vector import ReferenceFrame, Vector, \ - dynamicsymbols, dot +from sympy.physics.vector import ReferenceFrame, Vector, dynamicsymbols, dot from sympy.abc import x, y, z +from sympy.utilities.pytest import raises Vector.simp = True @@ -54,6 +54,7 @@ def test_Vector(): assert v1.separate() == {A: v1} assert v5.separate() == {A: x*A.x + y*A.y, B: z*B.z} + def test_Vector_diffs(): q1, q2, q3, q4 = dynamicsymbols('q1 q2 q3 q4') q1d, q2d, q3d, q4d = dynamicsymbols('q1 q2 q3 q4', 1) @@ -121,6 +122,25 @@ def test_Vector_diffs(): assert v4.diff(q3d, B) == B.x + q3 * N.x + N.y +def test_vector_var_in_dcm(): + + N = ReferenceFrame('N') + A = ReferenceFrame('A') + B = ReferenceFrame('B') + u1, u2, u3, u4 = dynamicsymbols('u1 u2 u3 u4') + + v = u1 * u2 * A.x + u3 * N.y + u4**2 * N.z + + assert v.diff(u1, N, var_in_dcm=False) == u2 * A.x + assert v.diff(u1, A, var_in_dcm=False) == u2 * A.x + assert v.diff(u3, N, var_in_dcm=False) == N.y + assert v.diff(u3, A, var_in_dcm=False) == N.y + assert v.diff(u3, B, var_in_dcm=False) == N.y + assert v.diff(u4, N, var_in_dcm=False) == 2 * u4 * N.z + + raises(ValueError, lambda: v.diff(u1, N)) + + def test_vector_simplify(): x, y, z, k, n, m, w, f, s, A = symbols('x, y, z, k, n, m, w, f, s, A') N = ReferenceFrame('N')
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 0, "test_score": 3 }, "num_modified_files": 2 }
1.0
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "mpmath>=0.19", "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.7", "reqs_path": [ "requirements/base.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
certifi @ file:///croot/certifi_1671487769961/work/certifi exceptiongroup==1.2.2 importlib-metadata==6.7.0 iniconfig==2.0.0 mpmath==1.3.0 packaging==24.0 pluggy==1.2.0 pytest==7.4.4 -e git+https://github.com/sympy/sympy.git@6fa2cf207e942dbbca1cb22fc92f43febc50fec8#egg=sympy tomli==2.0.1 typing_extensions==4.7.1 zipp==3.15.0
name: sympy channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2022.12.7=py37h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=22.3.1=py37h06a4308_0 - python=3.7.16=h7a1cb2a_0 - readline=8.2=h5eee18b_0 - setuptools=65.6.3=py37h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.38.4=py37h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - exceptiongroup==1.2.2 - importlib-metadata==6.7.0 - iniconfig==2.0.0 - mpmath==1.3.0 - packaging==24.0 - pluggy==1.2.0 - pytest==7.4.4 - tomli==2.0.1 - typing-extensions==4.7.1 - zipp==3.15.0 prefix: /opt/conda/envs/sympy
[ "sympy/physics/vector/tests/test_functions.py::test_partial_velocity", "sympy/physics/vector/tests/test_vector.py::test_vector_var_in_dcm" ]
[]
[ "sympy/physics/vector/tests/test_functions.py::test_dot", "sympy/physics/vector/tests/test_functions.py::test_dot_different_frames", "sympy/physics/vector/tests/test_functions.py::test_cross", "sympy/physics/vector/tests/test_functions.py::test_cross_different_frames", "sympy/physics/vector/tests/test_functions.py::test_operator_match", "sympy/physics/vector/tests/test_functions.py::test_express", "sympy/physics/vector/tests/test_functions.py::test_time_derivative", "sympy/physics/vector/tests/test_functions.py::test_get_motion_methods", "sympy/physics/vector/tests/test_functions.py::test_kin_eqs", "sympy/physics/vector/tests/test_vector.py::test_Vector", "sympy/physics/vector/tests/test_vector.py::test_Vector_diffs", "sympy/physics/vector/tests/test_vector.py::test_vector_simplify" ]
[]
BSD
486
sympy__sympy-10899
f32d8386dbab72c56575afa19f329912a7849c75
2016-03-24 10:18:36
8bb5814067cfa0348fb8b708848f35dba2b55ff4
Nitin216: Someone review this PR thisch: You need to fix the existing tests in https://github.com/sympy/sympy/pull/8359/files Nitin216: @thisch Got that working on it now.
diff --git a/doc/src/modules/solvers/diophantine.rst b/doc/src/modules/solvers/diophantine.rst index a738bdc433..743a28f1d4 100644 --- a/doc/src/modules/solvers/diophantine.rst +++ b/doc/src/modules/solvers/diophantine.rst @@ -135,7 +135,7 @@ For linear Diophantine equations, the customized parameter is the prefix used for each free variable in the solution. Consider the following example: >>> diop_solve(2*x + 3*y - 5*z + 7, m) -(m_0, 9*m_0 + 5*m_1 + 14, 5*m_0 + 3*m_1 + 7) +(m_0, -9*m_0 - 5*m_1 - 14, -5*m_0 - 3*m_1 - 7) In the solution above, m_0 and m_1 are independent free variables. @@ -164,7 +164,7 @@ set() >>> diophantine(x**2 - 4*x*y + 4*y**2 - 3*x + 7*y - 5) set([(-2*t**2 - 7*t + 10, -t**2 - 3*t + 5)]) >>> diophantine(x**2 + 2*x*y + y**2 - 3*x - 3*y) -set([(t_0, -t_0 + 1), (t_0, -t_0 + 3)]) +set([(t_0, -t_0), (t_0, -t_0 + 3)]) The most interesting case is when `\Delta > 0` and it is not a perfect square. In this case, the equation has either no solutions or an infinte number of diff --git a/examples/beginner/basic.py b/examples/beginner/basic.py index 9d5f7d0565..e2651f7615 100755 --- a/examples/beginner/basic.py +++ b/examples/beginner/basic.py @@ -5,18 +5,19 @@ Demonstrates how to create symbols and print some algebra operations. """ -from sympy import Symbol, pprint +import sympy +from sympy import pprint def main(): - a = Symbol('a') - b = Symbol('b') - c = Symbol('c') + a = sympy.Symbol('a') + b = sympy.Symbol('b') + c = sympy.Symbol('c') e = ( a*b*b + 2*b*a*b )**c - print('') + print pprint(e) - print('') + print if __name__ == "__main__": main() diff --git a/examples/beginner/series.py b/examples/beginner/series.py index 6ea50692fe..dc3a9d353f 100755 --- a/examples/beginner/series.py +++ b/examples/beginner/series.py @@ -12,17 +12,17 @@ def main(): x = Symbol('x') e = 1/cos(x) - print('') + print() print("Series for sec(x):") - print('') + print() pprint(e.series(x, 0, 10)) print("\n") e = 1/sin(x) print("Series for csc(x):") - print('') + print() pprint(e.series(x, 0, 4)) - print('') + print() if __name__ == "__main__": main() diff --git a/sympy/core/function.py b/sympy/core/function.py index 94eb5128ff..3570998820 100644 --- a/sympy/core/function.py +++ b/sympy/core/function.py @@ -137,22 +137,6 @@ def __init__(cls, *args, **kwargs): nargs = (as_int(nargs),) cls._nargs = nargs - @property - def __signature__(self): - """ - Allow Python 3's inspect.signature to give a useful signature for - Function subclasses. - """ - # Python 3 only, but backports (like the one in IPython) still might - # call this. - try: - from inspect import signature - except ImportError: - return None - - # TODO: Look at nargs - return signature(self.eval) - @property def nargs(self): """Return a set of the allowed number of arguments for the function. @@ -482,7 +466,7 @@ def _eval_evalf(self, prec): except (AttributeError, KeyError): try: return Float(self._imp_(*self.args), prec) - except (AttributeError, TypeError, ValueError): + except (AttributeError, TypeError): return # Convert all args to mpf or mpc diff --git a/sympy/functions/combinatorial/factorials.py b/sympy/functions/combinatorial/factorials.py index fc11ad04fa..48ea175555 100644 --- a/sympy/functions/combinatorial/factorials.py +++ b/sympy/functions/combinatorial/factorials.py @@ -853,9 +853,6 @@ def _eval_rewrite_as_gamma(self, n, k): from sympy import gamma return gamma(n + 1)/(gamma(k + 1)*gamma(n - k + 1)) - def _eval_rewrite_as_tractable(self, n, k): - return self._eval_rewrite_as_gamma(n, k).rewrite('tractable') - def _eval_rewrite_as_FallingFactorial(self, n, k): if k.is_integer: return ff(n, k) / factorial(k) diff --git a/sympy/functions/elementary/complexes.py b/sympy/functions/elementary/complexes.py index cc1c4d8d7b..fcec176d1d 100644 --- a/sympy/functions/elementary/complexes.py +++ b/sympy/functions/elementary/complexes.py @@ -100,7 +100,7 @@ def _eval_derivative(self, x): * im(Derivative(self.args[0], x, evaluate=True)) def _eval_rewrite_as_im(self, arg): - return self.args[0] - im(self.args[0]) + return self.args[0] - S.ImaginaryUnit*im(self.args[0]) def _eval_is_algebraic(self): return self.args[0].is_algebraic @@ -203,7 +203,7 @@ def _sage_(self): return sage.imag_part(self.args[0]._sage_()) def _eval_rewrite_as_re(self, arg): - return self.args[0] - re(self.args[0]) + return -S.ImaginaryUnit*(self.args[0] - re(self.args[0])) def _eval_is_algebraic(self): return self.args[0].is_algebraic diff --git a/sympy/printing/latex.py b/sympy/printing/latex.py index 50a464f915..ba22308b1e 100644 --- a/sympy/printing/latex.py +++ b/sympy/printing/latex.py @@ -322,13 +322,9 @@ def _print_Float(self, expr): return str_real def _print_Mul(self, expr): - include_parens = False if _coeff_isneg(expr): expr = -expr tex = "- " - if expr.is_Add: - tex += "(" - include_parens = True else: tex = "" @@ -409,8 +405,6 @@ def convert(expr): else: tex += r"\frac{%s}{%s}" % (snumer, sdenom) - if include_parens: - tex += ")" return tex def _print_Pow(self, expr): @@ -1231,7 +1225,7 @@ def _print_Order(self, expr): s += self._print(expr.variables) elif len(expr.variables): s += self._print(expr.variables[0]) - s += r'\rightarrow ' + s += r'\rightarrow' if len(expr.point) > 1: s += self._print(expr.point) else:
rewrite im() in terms of re() and vice versa There is a bug in the rewrite logic in ``complexes.py`` (see https://github.com/sympy/sympy/pull/8359#issuecomment-200560081 and https://github.com/sympy/sympy/pull/8359#issuecomment-200713532). The expected output should be: ```py re(x).rewrite(im) == x - I*im(x) im(x).rewrite(re) == -I*(x - re(x)) ```
sympy/sympy
diff --git a/sympy/functions/elementary/tests/test_complexes.py b/sympy/functions/elementary/tests/test_complexes.py index d74e2e317e..896197d190 100644 --- a/sympy/functions/elementary/tests/test_complexes.py +++ b/sympy/functions/elementary/tests/test_complexes.py @@ -68,8 +68,8 @@ def test_re(): assert re((1 + sqrt(a + b*I))/2) == \ (a**2 + b**2)**Rational(1, 4)*cos(atan2(b, a)/2)/2 + Rational(1, 2) - assert re(x).rewrite(im) == x - im(x) - assert (x + re(y)).rewrite(re, im) == x + y - im(y) + assert re(x).rewrite(im) == x - S.ImaginaryUnit*im(x) + assert (x + re(y)).rewrite(re, im) == x + y - S.ImaginaryUnit*im(y) a = Symbol('a', algebraic=True) t = Symbol('t', transcendental=True) @@ -137,8 +137,8 @@ def test_im(): assert im((1 + sqrt(a + b*I))/2) == \ (a**2 + b**2)**Rational(1, 4)*sin(atan2(b, a)/2)/2 - assert im(x).rewrite(re) == x - re(x) - assert (x + im(y)).rewrite(im, re) == x + y - re(y) + assert im(x).rewrite(re) == -S.ImaginaryUnit * (x - re(x)) + assert (x + im(y)).rewrite(im, re) == x - S.ImaginaryUnit * (y - re(y)) a = Symbol('a', algebraic=True) t = Symbol('t', transcendental=True) diff --git a/sympy/printing/tests/test_latex.py b/sympy/printing/tests/test_latex.py index 2bb6b15ce0..05eac5e4ae 100644 --- a/sympy/printing/tests/test_latex.py +++ b/sympy/printing/tests/test_latex.py @@ -27,7 +27,6 @@ from sympy.functions import DiracDelta, Heaviside, KroneckerDelta, LeviCivita from sympy.logic import Implies from sympy.logic.boolalg import And, Or, Xor -from sympy.physics.quantum import Commutator, Operator from sympy.core.trace import Tr from sympy.core.compatibility import range from sympy.combinatorics.permutations import Cycle, Permutation @@ -264,11 +263,10 @@ def test_latex_functions(): assert latex(Order(x)) == r"\mathcal{O}\left(x\right)" assert latex(Order(x, x)) == r"\mathcal{O}\left(x\right)" assert latex(Order(x, (x, 0))) == r"\mathcal{O}\left(x\right)" - assert latex(Order(x, (x, oo))) == r"\mathcal{O}\left(x; x\rightarrow \infty\right)" - assert latex(Order(x - y, (x, y))) == r"\mathcal{O}\left(x - y; x\rightarrow y\right)" - assert latex(Order(x, x, y)) == r"\mathcal{O}\left(x; \left ( x, \quad y\right )\rightarrow \left ( 0, \quad 0\right )\right)" - assert latex(Order(x, x, y)) == r"\mathcal{O}\left(x; \left ( x, \quad y\right )\rightarrow \left ( 0, \quad 0\right )\right)" - assert latex(Order(x, (x, oo), (y, oo))) == r"\mathcal{O}\left(x; \left ( x, \quad y\right )\rightarrow \left ( \infty, \quad \infty\right )\right)" + assert latex(Order(x, (x, oo))) == r"\mathcal{O}\left(x; x\rightarrow\infty\right)" + assert latex(Order(x, x, y)) == r"\mathcal{O}\left(x; \left ( x, \quad y\right )\rightarrow\left ( 0, \quad 0\right )\right)" + assert latex(Order(x, x, y)) == r"\mathcal{O}\left(x; \left ( x, \quad y\right )\rightarrow\left ( 0, \quad 0\right )\right)" + assert latex(Order(x, (x, oo), (y, oo))) == r"\mathcal{O}\left(x; \left ( x, \quad y\right )\rightarrow\left ( \infty, \quad \infty\right )\right)" assert latex(lowergamma(x, y)) == r'\gamma\left(x, y\right)' assert latex(uppergamma(x, y)) == r'\Gamma\left(x, y\right)' @@ -603,12 +601,6 @@ def test_latex_AccumuBounds(): def test_latex_emptyset(): assert latex(S.EmptySet) == r"\emptyset" -def test_latex_commutator(): - A = Operator('A') - B = Operator('B') - comm = Commutator(B, A) - assert latex(comm.doit()) == r"- (A B - B A)" - def test_latex_union(): assert latex(Union(Interval(0, 1), Interval(2, 3))) == \ diff --git a/sympy/series/tests/test_limits.py b/sympy/series/tests/test_limits.py index 09f972fcc2..174814b450 100644 --- a/sympy/series/tests/test_limits.py +++ b/sympy/series/tests/test_limits.py @@ -456,11 +456,6 @@ def test_issue_8730(): assert limit(subfactorial(x), x, oo) == oo -def test_issue_10801(): - # make sure limits work with binomial - assert limit(16**k / (k * binomial(2*k, k)**2), k, oo) == pi - - def test_issue_9205(): x, y, a = symbols('x, y, a') assert Limit(x, x, a).free_symbols == {a} diff --git a/sympy/utilities/tests/test_lambdify.py b/sympy/utilities/tests/test_lambdify.py index a6926c6dca..199a160b08 100644 --- a/sympy/utilities/tests/test_lambdify.py +++ b/sympy/utilities/tests/test_lambdify.py @@ -502,23 +502,6 @@ def test_imps(): raises(ValueError, lambda: lambdify(x, f(f2(x)))) -def test_imps_errors(): - # Test errors that implemented functions can return, and still be able to - # form expressions. - # See: https://github.com/sympy/sympy/issues/10810 - for val, error_class in product((0, 0., 2, 2.0), - (AttributeError, TypeError, ValueError)): - - def myfunc(a): - if a == 0: - raise error_class - return 1 - - f = implemented_function('f', myfunc) - expr = f(val) - assert expr == f(val) - - def test_imps_wrong_args(): raises(ValueError, lambda: implemented_function(sin, lambda x: x))
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 1, "test_score": 0 }, "num_modified_files": 7 }
1.0
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "mpmath>=0.19", "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.7", "reqs_path": [ "requirements/base.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
certifi @ file:///croot/certifi_1671487769961/work/certifi exceptiongroup==1.2.2 importlib-metadata==6.7.0 iniconfig==2.0.0 mpmath==1.3.0 packaging==24.0 pluggy==1.2.0 pytest==7.4.4 -e git+https://github.com/sympy/sympy.git@f32d8386dbab72c56575afa19f329912a7849c75#egg=sympy tomli==2.0.1 typing_extensions==4.7.1 zipp==3.15.0
name: sympy channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2022.12.7=py37h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=22.3.1=py37h06a4308_0 - python=3.7.16=h7a1cb2a_0 - readline=8.2=h5eee18b_0 - setuptools=65.6.3=py37h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.38.4=py37h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - exceptiongroup==1.2.2 - importlib-metadata==6.7.0 - iniconfig==2.0.0 - mpmath==1.3.0 - packaging==24.0 - pluggy==1.2.0 - pytest==7.4.4 - tomli==2.0.1 - typing-extensions==4.7.1 - zipp==3.15.0 prefix: /opt/conda/envs/sympy
[ "sympy/functions/elementary/tests/test_complexes.py::test_re", "sympy/functions/elementary/tests/test_complexes.py::test_im", "sympy/printing/tests/test_latex.py::test_latex_functions" ]
[ "sympy/functions/elementary/tests/test_complexes.py::test_sign_issue_3068", "sympy/functions/elementary/tests/test_complexes.py::test_principal_branch_fail", "sympy/functions/elementary/tests/test_complexes.py::test_issue_6167_6151", "sympy/printing/tests/test_latex.py::test_latex_symbols_failing", "sympy/printing/tests/test_latex.py::test_builtin_without_args_mismatched_names", "sympy/series/tests/test_limits.py::test_exponential2", "sympy/series/tests/test_limits.py::test_doit2", "sympy/series/tests/test_limits.py::test_order_oo", "sympy/utilities/tests/test_lambdify.py::test_number_precision", "sympy/utilities/tests/test_lambdify.py::test_numpy_transl", "sympy/utilities/tests/test_lambdify.py::test_numpy_translation_abs", "sympy/utilities/tests/test_lambdify.py::test_numexpr_printer", "sympy/utilities/tests/test_lambdify.py::test_issue_9334", "sympy/utilities/tests/test_lambdify.py::test_numpy_matrix", "sympy/utilities/tests/test_lambdify.py::test_numpy_transpose", "sympy/utilities/tests/test_lambdify.py::test_numpy_inverse", "sympy/utilities/tests/test_lambdify.py::test_numpy_old_matrix", "sympy/utilities/tests/test_lambdify.py::test_issue_9871", "sympy/utilities/tests/test_lambdify.py::test_numpy_piecewise", "sympy/utilities/tests/test_lambdify.py::test_numpy_logical_ops", "sympy/utilities/tests/test_lambdify.py::test_numpy_matmul", "sympy/utilities/tests/test_lambdify.py::test_numpy_numexpr", "sympy/utilities/tests/test_lambdify.py::test_numexpr_userfunctions" ]
[ "sympy/functions/elementary/tests/test_complexes.py::test_sign", "sympy/functions/elementary/tests/test_complexes.py::test_as_real_imag", "sympy/functions/elementary/tests/test_complexes.py::test_Abs", "sympy/functions/elementary/tests/test_complexes.py::test_Abs_rewrite", "sympy/functions/elementary/tests/test_complexes.py::test_Abs_real", "sympy/functions/elementary/tests/test_complexes.py::test_Abs_properties", "sympy/functions/elementary/tests/test_complexes.py::test_abs", "sympy/functions/elementary/tests/test_complexes.py::test_arg", "sympy/functions/elementary/tests/test_complexes.py::test_arg_rewrite", "sympy/functions/elementary/tests/test_complexes.py::test_adjoint", "sympy/functions/elementary/tests/test_complexes.py::test_conjugate", "sympy/functions/elementary/tests/test_complexes.py::test_conjugate_transpose", "sympy/functions/elementary/tests/test_complexes.py::test_transpose", "sympy/functions/elementary/tests/test_complexes.py::test_polarify", "sympy/functions/elementary/tests/test_complexes.py::test_unpolarify", "sympy/functions/elementary/tests/test_complexes.py::test_issue_4035", "sympy/functions/elementary/tests/test_complexes.py::test_issue_3206", "sympy/functions/elementary/tests/test_complexes.py::test_issue_4754_derivative_conjugate", "sympy/functions/elementary/tests/test_complexes.py::test_derivatives_issue_4757", "sympy/functions/elementary/tests/test_complexes.py::test_periodic_argument", "sympy/functions/elementary/tests/test_complexes.py::test_principal_branch", "sympy/printing/tests/test_latex.py::test_printmethod", "sympy/printing/tests/test_latex.py::test_latex_basic", "sympy/printing/tests/test_latex.py::test_latex_builtins", "sympy/printing/tests/test_latex.py::test_latex_cycle", "sympy/printing/tests/test_latex.py::test_latex_permutation", "sympy/printing/tests/test_latex.py::test_latex_Float", "sympy/printing/tests/test_latex.py::test_latex_symbols", "sympy/printing/tests/test_latex.py::test_hyper_printing", "sympy/printing/tests/test_latex.py::test_latex_bessel", "sympy/printing/tests/test_latex.py::test_latex_fresnel", "sympy/printing/tests/test_latex.py::test_latex_brackets", "sympy/printing/tests/test_latex.py::test_latex_indexed", "sympy/printing/tests/test_latex.py::test_latex_derivatives", "sympy/printing/tests/test_latex.py::test_latex_subs", "sympy/printing/tests/test_latex.py::test_latex_integrals", "sympy/printing/tests/test_latex.py::test_latex_sets", "sympy/printing/tests/test_latex.py::test_latex_Range", "sympy/printing/tests/test_latex.py::test_latex_sequences", "sympy/printing/tests/test_latex.py::test_latex_FourierSeries", "sympy/printing/tests/test_latex.py::test_latex_FormalPowerSeries", "sympy/printing/tests/test_latex.py::test_latex_intervals", "sympy/printing/tests/test_latex.py::test_latex_AccumuBounds", "sympy/printing/tests/test_latex.py::test_latex_emptyset", "sympy/printing/tests/test_latex.py::test_latex_union", "sympy/printing/tests/test_latex.py::test_latex_symmetric_difference", "sympy/printing/tests/test_latex.py::test_latex_Complement", "sympy/printing/tests/test_latex.py::test_latex_Complexes", "sympy/printing/tests/test_latex.py::test_latex_productset", "sympy/printing/tests/test_latex.py::test_latex_Naturals", "sympy/printing/tests/test_latex.py::test_latex_Naturals0", "sympy/printing/tests/test_latex.py::test_latex_Integers", "sympy/printing/tests/test_latex.py::test_latex_ImageSet", "sympy/printing/tests/test_latex.py::test_latex_ConditionSet", "sympy/printing/tests/test_latex.py::test_latex_ComplexRegion", "sympy/printing/tests/test_latex.py::test_latex_Contains", "sympy/printing/tests/test_latex.py::test_latex_sum", "sympy/printing/tests/test_latex.py::test_latex_product", "sympy/printing/tests/test_latex.py::test_latex_limits", "sympy/printing/tests/test_latex.py::test_issue_3568", "sympy/printing/tests/test_latex.py::test_latex", "sympy/printing/tests/test_latex.py::test_latex_dict", "sympy/printing/tests/test_latex.py::test_latex_list", "sympy/printing/tests/test_latex.py::test_latex_rational", "sympy/printing/tests/test_latex.py::test_latex_inverse", "sympy/printing/tests/test_latex.py::test_latex_DiracDelta", "sympy/printing/tests/test_latex.py::test_latex_Heaviside", "sympy/printing/tests/test_latex.py::test_latex_KroneckerDelta", "sympy/printing/tests/test_latex.py::test_latex_LeviCivita", "sympy/printing/tests/test_latex.py::test_mode", "sympy/printing/tests/test_latex.py::test_latex_Piecewise", "sympy/printing/tests/test_latex.py::test_latex_Matrix", "sympy/printing/tests/test_latex.py::test_latex_matrix_with_functions", "sympy/printing/tests/test_latex.py::test_latex_mul_symbol", "sympy/printing/tests/test_latex.py::test_latex_issue_4381", "sympy/printing/tests/test_latex.py::test_latex_issue_4576", "sympy/printing/tests/test_latex.py::test_latex_pow_fraction", "sympy/printing/tests/test_latex.py::test_noncommutative", "sympy/printing/tests/test_latex.py::test_latex_order", "sympy/printing/tests/test_latex.py::test_latex_Lambda", "sympy/printing/tests/test_latex.py::test_latex_PolyElement", "sympy/printing/tests/test_latex.py::test_latex_FracElement", "sympy/printing/tests/test_latex.py::test_latex_Poly", "sympy/printing/tests/test_latex.py::test_latex_ComplexRootOf", "sympy/printing/tests/test_latex.py::test_latex_RootSum", "sympy/printing/tests/test_latex.py::test_settings", "sympy/printing/tests/test_latex.py::test_latex_numbers", "sympy/printing/tests/test_latex.py::test_lamda", "sympy/printing/tests/test_latex.py::test_custom_symbol_names", "sympy/printing/tests/test_latex.py::test_matAdd", "sympy/printing/tests/test_latex.py::test_matMul", "sympy/printing/tests/test_latex.py::test_latex_MatrixSlice", "sympy/printing/tests/test_latex.py::test_latex_RandomDomain", "sympy/printing/tests/test_latex.py::test_PrettyPoly", "sympy/printing/tests/test_latex.py::test_integral_transforms", "sympy/printing/tests/test_latex.py::test_PolynomialRingBase", "sympy/printing/tests/test_latex.py::test_categories", "sympy/printing/tests/test_latex.py::test_Modules", "sympy/printing/tests/test_latex.py::test_QuotientRing", "sympy/printing/tests/test_latex.py::test_Tr", "sympy/printing/tests/test_latex.py::test_Adjoint", "sympy/printing/tests/test_latex.py::test_Hadamard", "sympy/printing/tests/test_latex.py::test_ZeroMatrix", "sympy/printing/tests/test_latex.py::test_boolean_args_order", "sympy/printing/tests/test_latex.py::test_imaginary", "sympy/printing/tests/test_latex.py::test_builtins_without_args", "sympy/printing/tests/test_latex.py::test_latex_greek_functions", "sympy/printing/tests/test_latex.py::test_translate", "sympy/printing/tests/test_latex.py::test_other_symbols", "sympy/printing/tests/test_latex.py::test_modifiers", "sympy/printing/tests/test_latex.py::test_greek_symbols", "sympy/printing/tests/test_latex.py::test_builtin_no_args", "sympy/printing/tests/test_latex.py::test_issue_6853", "sympy/printing/tests/test_latex.py::test_Mul", "sympy/printing/tests/test_latex.py::test_Pow", "sympy/printing/tests/test_latex.py::test_issue_7180", "sympy/printing/tests/test_latex.py::test_issue_8409", "sympy/printing/tests/test_latex.py::test_issue_8470", "sympy/printing/tests/test_latex.py::test_issue_7117", "sympy/printing/tests/test_latex.py::test_issue_2934", "sympy/printing/tests/test_latex.py::test_issue_10489", "sympy/series/tests/test_limits.py::test_basic1", "sympy/series/tests/test_limits.py::test_basic2", "sympy/series/tests/test_limits.py::test_basic3", "sympy/series/tests/test_limits.py::test_basic4", "sympy/series/tests/test_limits.py::test_basic5", "sympy/series/tests/test_limits.py::test_issue_3885", "sympy/series/tests/test_limits.py::test_Limit", "sympy/series/tests/test_limits.py::test_floor", "sympy/series/tests/test_limits.py::test_floor_requires_robust_assumptions", "sympy/series/tests/test_limits.py::test_ceiling", "sympy/series/tests/test_limits.py::test_ceiling_requires_robust_assumptions", "sympy/series/tests/test_limits.py::test_atan", "sympy/series/tests/test_limits.py::test_abs", "sympy/series/tests/test_limits.py::test_heuristic", "sympy/series/tests/test_limits.py::test_issue_3871", "sympy/series/tests/test_limits.py::test_exponential", "sympy/series/tests/test_limits.py::test_doit", "sympy/series/tests/test_limits.py::test_AccumBounds", "sympy/series/tests/test_limits.py::test_issue_3792", "sympy/series/tests/test_limits.py::test_issue_4090", "sympy/series/tests/test_limits.py::test_issue_4547", "sympy/series/tests/test_limits.py::test_issue_5164", "sympy/series/tests/test_limits.py::test_issue_5183", "sympy/series/tests/test_limits.py::test_issue_5184", "sympy/series/tests/test_limits.py::test_issue_5229", "sympy/series/tests/test_limits.py::test_issue_4546", "sympy/series/tests/test_limits.py::test_issue_3934", "sympy/series/tests/test_limits.py::test_calculate_series", "sympy/series/tests/test_limits.py::test_issue_5955", "sympy/series/tests/test_limits.py::test_newissue", "sympy/series/tests/test_limits.py::test_extended_real_line", "sympy/series/tests/test_limits.py::test_issue_5436", "sympy/series/tests/test_limits.py::test_Limit_dir", "sympy/series/tests/test_limits.py::test_polynomial", "sympy/series/tests/test_limits.py::test_rational", "sympy/series/tests/test_limits.py::test_issue_5740", "sympy/series/tests/test_limits.py::test_issue_6366", "sympy/series/tests/test_limits.py::test_factorial", "sympy/series/tests/test_limits.py::test_issue_6560", "sympy/series/tests/test_limits.py::test_issue_5172", "sympy/series/tests/test_limits.py::test_issue_7088", "sympy/series/tests/test_limits.py::test_issue_6364", "sympy/series/tests/test_limits.py::test_issue_4099", "sympy/series/tests/test_limits.py::test_issue_4503", "sympy/series/tests/test_limits.py::test_issue_8730", "sympy/series/tests/test_limits.py::test_issue_9205", "sympy/series/tests/test_limits.py::test_limit_seq", "sympy/utilities/tests/test_lambdify.py::test_no_args", "sympy/utilities/tests/test_lambdify.py::test_single_arg", "sympy/utilities/tests/test_lambdify.py::test_list_args", "sympy/utilities/tests/test_lambdify.py::test_str_args", "sympy/utilities/tests/test_lambdify.py::test_own_namespace", "sympy/utilities/tests/test_lambdify.py::test_own_module", "sympy/utilities/tests/test_lambdify.py::test_bad_args", "sympy/utilities/tests/test_lambdify.py::test_atoms", "sympy/utilities/tests/test_lambdify.py::test_sympy_lambda", "sympy/utilities/tests/test_lambdify.py::test_math_lambda", "sympy/utilities/tests/test_lambdify.py::test_mpmath_lambda", "sympy/utilities/tests/test_lambdify.py::test_math_transl", "sympy/utilities/tests/test_lambdify.py::test_mpmath_transl", "sympy/utilities/tests/test_lambdify.py::test_exponentiation", "sympy/utilities/tests/test_lambdify.py::test_sqrt", "sympy/utilities/tests/test_lambdify.py::test_trig", "sympy/utilities/tests/test_lambdify.py::test_vector_simple", "sympy/utilities/tests/test_lambdify.py::test_vector_discontinuous", "sympy/utilities/tests/test_lambdify.py::test_trig_symbolic", "sympy/utilities/tests/test_lambdify.py::test_trig_float", "sympy/utilities/tests/test_lambdify.py::test_docs", "sympy/utilities/tests/test_lambdify.py::test_math", "sympy/utilities/tests/test_lambdify.py::test_sin", "sympy/utilities/tests/test_lambdify.py::test_matrix", "sympy/utilities/tests/test_lambdify.py::test_issue9474", "sympy/utilities/tests/test_lambdify.py::test_integral", "sympy/utilities/tests/test_lambdify.py::test_sym_single_arg", "sympy/utilities/tests/test_lambdify.py::test_sym_list_args", "sympy/utilities/tests/test_lambdify.py::test_sym_integral", "sympy/utilities/tests/test_lambdify.py::test_namespace_order", "sympy/utilities/tests/test_lambdify.py::test_imps", "sympy/utilities/tests/test_lambdify.py::test_imps_wrong_args", "sympy/utilities/tests/test_lambdify.py::test_lambdify_imps", "sympy/utilities/tests/test_lambdify.py::test_dummification", "sympy/utilities/tests/test_lambdify.py::test_python_keywords", "sympy/utilities/tests/test_lambdify.py::test_lambdify_docstring", "sympy/utilities/tests/test_lambdify.py::test_special_printers", "sympy/utilities/tests/test_lambdify.py::test_true_false", "sympy/utilities/tests/test_lambdify.py::test_issue_2790", "sympy/utilities/tests/test_lambdify.py::test_ITE", "sympy/utilities/tests/test_lambdify.py::test_Min_Max" ]
[]
BSD
487
nickstenning__honcho-171
824775779ddf30606e7514b4639e81a2d6f25393
2016-03-28 14:30:24
824775779ddf30606e7514b4639e81a2d6f25393
nickstenning: @migurski Could you check this out and let me know if it correctly addresses the issue for you? migurski: Thanks Nick! I checked again with Python 2.7.6 on Ubuntu 14.04. At 84a1f7d it generated a correct script, at d8be8f4 and it generated a bad one, and with your recent efd6292 and it generated this improved working script: ``` start on starting honcho-stuff stop on stopping honcho-stuff respawn env VAR='foo bar' env PORT=5000 exec su - migurski -m -s /bin/sh -c 'cd /home/migurski/honcho; exec python /home/migurski/honcho/stuff.py >> /var/log/honcho/stuff-1.log 2>&1' ``` migurski: I’ve updated https://github.com/nickstenning/honcho/pull/154, though with the file `honcho/test/integration/test_export.py` gone I’m not sure it will have an effect.
diff --git a/honcho/export/templates/upstart/process.conf b/honcho/export/templates/upstart/process.conf index 8a05378..fe6b451 100644 --- a/honcho/export/templates/upstart/process.conf +++ b/honcho/export/templates/upstart/process.conf @@ -2,8 +2,7 @@ start on starting {{ group_name }} stop on stopping {{ group_name }} respawn -exec su - {{ user }} -s {{ shell }} -c 'cd {{ app_root }}; -{%- for k, v in process.env.items() -%} - export {{ k }}={{ v | shellquote }}; -{%- endfor -%} -exec {{ process.cmd }} >> {{ log }}/{{ process.name|dashrepl }}.log 2>&1' +{% for k, v in process.env.items() -%} +env {{ k }}={{ v | shellquote }} +{% endfor %} +exec su - {{ user }} -m -s {{ shell }} -c 'cd {{ app_root }}; exec {{ process.cmd }} >> {{ log }}/{{ process.name|dashrepl }}.log 2>&1' diff --git a/honcho/manager.py b/honcho/manager.py index 218f2b4..ff31a8e 100644 --- a/honcho/manager.py +++ b/honcho/manager.py @@ -53,7 +53,7 @@ class Manager(object): self._terminating = False - def add_process(self, name, cmd, quiet=False, env=None): + def add_process(self, name, cmd, quiet=False, env=None, cwd=None): """ Add a process to this manager instance. The process will not be started until #loop() is called. @@ -63,7 +63,8 @@ class Manager(object): name=name, quiet=quiet, colour=next(self._colours), - env=env) + env=env, + cwd=cwd) self._processes[name] = {} self._processes[name]['obj'] = proc diff --git a/honcho/process.py b/honcho/process.py index 669c8ef..c211af1 100644 --- a/honcho/process.py +++ b/honcho/process.py @@ -21,12 +21,14 @@ class Process(object): name=None, colour=None, quiet=False, - env=None): + env=None, + cwd=None): self.cmd = cmd self.colour = colour self.quiet = quiet self.name = name self.env = os.environ.copy() if env is None else env + self.cwd = cwd # This is a honcho.environ.Env object, to allow for stubbing of # external calls, not the operating system environment. @@ -36,7 +38,7 @@ class Process(object): def run(self, events=None, ignore_signals=False): self._events = events - self._child = self._child_ctor(self.cmd, env=self.env) + self._child = self._child_ctor(self.cmd, env=self.env, cwd=self.cwd) self._send_message({'pid': self._child.pid}, type='start') # Don't pay attention to SIGINT/SIGTERM. The process itself is
Exported upstart configuration silently fails with quoted variables At or near d8be8f4a8 (version 0.5.0), quoted variables in exported upstart scripts became invalid and fail silently. Previously, quoted `.env` variables with spaces generated upstart configurations with correctly-nested double and single quotes. This is an example generated by 84a1f7d (also version 0.5.0): ``` VAR="foo bar" ``` ``` start on starting things-stuff stop on stopping things-stuff respawn exec su - migurski -s /bin/sh -c 'cd /home/migurski/things; export PORT=5000; export VAR="foo bar"; python stuff.py >> /var/log/things/stuff-1.log 2>&1' ``` Starting at d8be8f4a8 and still in 0.6.6, the exported configuration from the configuration above began producing this invalid and failing upstart configuration, due to the single quotes: ``` start on starting things-stuff stop on stopping things-stuff respawn exec su - migurski -s /bin/sh -c 'cd /home/migurski/things;export VAR='foo bar';export PORT=5000;python stuff.py >> /var/log/things/stuff-1.log 2>&1' ``` Here are my Procfile and python script for testing: ``` stuff: python stuff.py ``` ```python from sys import stderr from os import environ from time import sleep while True: print >> stderr, repr(environ['VAR']) sleep(5) ```
nickstenning/honcho
diff --git a/tests/integration/test_export.py b/tests/integration/test_export.py index bbb2397..09543b7 100644 --- a/tests/integration/test_export.py +++ b/tests/integration/test_export.py @@ -37,3 +37,37 @@ def test_export_upstart(testenv): 'trunk-web-1.conf'): expected = testenv.path('elephant', filename) assert os.path.exists(expected) + + [email protected]('testenv', [{ + 'Procfile': "web: python web.py", + '.env': """ +NORMAL=ok +SQ_SPACES='sqspace sqspace' +DQ_SPACES="dqspace dqspace" +SQ="it's got single quotes" +DQ='it has "double" quotes' +EXCL='an exclamation mark!' +SQ_DOLLAR='costs $UNINTERPOLATED amount' +DQ_DOLLAR="costs $UNINTERPOLATED amount" +""" +}], indirect=True) +def test_export_upstart_environment(testenv): + ret, out, err = testenv.run_honcho([ + 'export', + 'upstart', + testenv.path('test'), + '-a', 'envvars', + ]) + + assert ret == 0 + + lines = open(testenv.path('test', 'envvars-web-1.conf')).readlines() + assert 'env NORMAL=ok\n' in lines + assert "env SQ_SPACES='sqspace sqspace'\n" in lines + assert "env DQ_SPACES='dqspace dqspace'\n" in lines + assert "env SQ='it'\"'\"'s got single quotes'\n" in lines + assert "env DQ='it has \"double\" quotes'\n" in lines + assert "env EXCL='an exclamation mark!'\n" in lines + assert "env SQ_DOLLAR='costs $UNINTERPOLATED amount'\n" in lines + assert "env DQ_DOLLAR='costs $UNINTERPOLATED amount'\n" in lines diff --git a/tests/test_manager.py b/tests/test_manager.py index abee53c..ae028c6 100644 --- a/tests/test_manager.py +++ b/tests/test_manager.py @@ -59,12 +59,13 @@ class FakeEnv(object): class FakeProcess(object): - def __init__(self, cmd, name=None, colour=None, quiet=None, env=None): + def __init__(self, cmd, name=None, colour=None, quiet=None, env=None, cwd=None): self.cmd = cmd self.name = name self.colour = colour self.quiet = quiet self.env = env + self.cwd = cwd self._events = None self._options = {} @@ -229,6 +230,10 @@ class TestManager(object): with pytest.raises(AssertionError): self.m.add_process('foo', 'another command') + def test_add_process_sets_cwd(self): + proc = self.m.add_process('foo', 'ruby server.rb', cwd='foo-dir') + assert proc.cwd == 'foo-dir' + def test_loop_with_empty_manager_returns_immediately(self): self.m.loop() diff --git a/tests/test_process.py b/tests/test_process.py index 59e1538..9002212 100644 --- a/tests/test_process.py +++ b/tests/test_process.py @@ -184,3 +184,9 @@ class TestProcess(object): proc.run(self.q) msg = self.q.find_message({'returncode': 42}) assert msg.type == 'stop' + + def test_cwd_passed_along(self): + proc = Process('echo 123', cwd='fake-dir') + proc._child_ctor = FakePopen + proc.run(self.q) + assert proc._child.kwargs['cwd'] == 'fake-dir'
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 1, "test_score": 3 }, "num_modified_files": 3 }
0.6
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[export]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "pytest" ], "pre_install": null, "python": "3.9", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work -e git+https://github.com/nickstenning/honcho.git@824775779ddf30606e7514b4639e81a2d6f25393#egg=honcho iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work Jinja2==2.7.3 MarkupSafe==3.0.2 packaging @ file:///croot/packaging_1734472117206/work pluggy @ file:///croot/pluggy_1733169602837/work pytest @ file:///croot/pytest_1738938843180/work tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
name: honcho channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - exceptiongroup=1.2.0=py39h06a4308_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - packaging=24.2=py39h06a4308_0 - pip=25.0=py39h06a4308_0 - pluggy=1.5.0=py39h06a4308_0 - pytest=8.3.4=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tomli=2.0.1=py39h06a4308_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - jinja2==2.7.3 - markupsafe==3.0.2 prefix: /opt/conda/envs/honcho
[ "tests/test_manager.py::TestManager::test_add_process_sets_cwd", "tests/test_process.py::TestProcess::test_cwd_passed_along" ]
[ "tests/integration/test_export.py::test_export_supervisord[testenv0]", "tests/integration/test_export.py::test_export_upstart[testenv0]", "tests/integration/test_export.py::test_export_upstart_environment[testenv0]" ]
[ "tests/test_manager.py::TestManager::test_init_sets_default_printer_width", "tests/test_manager.py::TestManager::test_add_process_updates_printer_width", "tests/test_manager.py::TestManager::test_add_process_sets_name", "tests/test_manager.py::TestManager::test_add_process_sets_cmd", "tests/test_manager.py::TestManager::test_add_process_sets_colour", "tests/test_manager.py::TestManager::test_add_process_sets_unique_colours", "tests/test_manager.py::TestManager::test_add_process_sets_quiet", "tests/test_manager.py::TestManager::test_add_process_name_must_be_unique", "tests/test_manager.py::TestManager::test_loop_with_empty_manager_returns_immediately", "tests/test_manager.py::TestManager::test_loop_calls_process_run", "tests/test_manager.py::TestManager::test_printer_receives_messages_in_correct_order", "tests/test_manager.py::TestManager::test_printer_receives_lines_multi_process", "tests/test_manager.py::TestManager::test_returncode_set_by_first_exiting_process", "tests/test_manager.py::TestManager::test_printer_receives_lines_after_stop", "tests/test_process.py::TestProcess::test_ctor_cmd", "tests/test_process.py::TestProcess::test_ctor_name", "tests/test_process.py::TestProcess::test_ctor_colour", "tests/test_process.py::TestProcess::test_ctor_quiet", "tests/test_process.py::TestProcess::test_output_receives_start_with_pid", "tests/test_process.py::TestProcess::test_message_contains_name", "tests/test_process.py::TestProcess::test_message_contains_time", "tests/test_process.py::TestProcess::test_message_contains_colour", "tests/test_process.py::TestProcess::test_output_receives_lines", "tests/test_process.py::TestProcess::test_output_receives_lines_invalid_utf8", "tests/test_process.py::TestProcess::test_output_does_not_receive_lines_when_quiet", "tests/test_process.py::TestProcess::test_output_receives_stop", "tests/test_process.py::TestProcess::test_output_receives_stop_with_returncode" ]
[]
MIT License
488
alecthomas__voluptuous-155
32aeeec65d77940655a2905c9f15114586eb785e
2016-03-29 07:26:15
17204b79ca56b65aa3e4cfb20e7ef4555c2e8592
diff --git a/README.md b/README.md index d991125..74490c2 100644 --- a/README.md +++ b/README.md @@ -522,7 +522,7 @@ backtracking is attempted: ... raise AssertionError('MultipleInvalid not raised') ... except MultipleInvalid as e: ... exc = e ->>> str(exc) == "invalid list value @ data[0][0]" +>>> str(exc) == "not a valid value @ data[0][0]" True ``` diff --git a/voluptuous.py b/voluptuous.py index 3ab4de4..4b1215e 100644 --- a/voluptuous.py +++ b/voluptuous.py @@ -227,10 +227,6 @@ class InclusiveInvalid(Invalid): """Not all values found in inclusion group.""" -class SequenceItemInvalid(Invalid): - """One of the values found in a sequence was invalid.""" - - class SequenceTypeInvalid(Invalid): """The type found is not a sequence type.""" @@ -664,7 +660,7 @@ class Schema(object): >>> validator = Schema(['one', 'two', int]) >>> validator(['one']) ['one'] - >>> with raises(MultipleInvalid, 'invalid list value @ data[0]'): + >>> with raises(MultipleInvalid, 'expected int @ data[0]'): ... validator([3.5]) >>> validator([1]) [1] @@ -698,8 +694,6 @@ class Schema(object): raise invalid = e else: - if len(invalid.path) <= len(index_path): - invalid = SequenceItemInvalid('invalid %s value' % seq_type_name, index_path) errors.append(invalid) if errors: raise MultipleInvalid(errors) @@ -714,7 +708,7 @@ class Schema(object): >>> validator = Schema(('one', 'two', int)) >>> validator(('one',)) ('one',) - >>> with raises(MultipleInvalid, 'invalid tuple value @ data[0]'): + >>> with raises(MultipleInvalid, 'expected int @ data[0]'): ... validator((3.5,)) >>> validator((1,)) (1,) @@ -729,7 +723,7 @@ class Schema(object): >>> validator = Schema(['one', 'two', int]) >>> validator(['one']) ['one'] - >>> with raises(MultipleInvalid, 'invalid list value @ data[0]'): + >>> with raises(MultipleInvalid, 'expected int @ data[0]'): ... validator([3.5]) >>> validator([1]) [1] @@ -1095,7 +1089,7 @@ class Msg(object): Messages are only applied to invalid direct descendants of the schema: >>> validate = Schema(Msg([['one', 'two', int]], 'not okay!')) - >>> with raises(MultipleInvalid, 'invalid list value @ data[0][0]'): + >>> with raises(MultipleInvalid, 'expected int @ data[0][0]'): ... validate([['three']]) The type which is thrown can be overridden but needs to be a subclass of Invalid
list validation swallows more useful/explicit error messages Take this validator and schema: ```python def is_even(value): if value % 2: raise Invalid('%i is not even' % value) return value schema = Schema(dict(even_number=All(int, is_even))) schema(dict(even_number=3)) ``` We get a useful error message: ``` voluptuous.MultipleInvalid: 3 is not even for dictionary value @ data['even_number'] ``` Sadly, as soon as we want a list of these, we lose the useful error message: ```python schema = Schema(dict(even_numbers=[All(int, is_even)])) schema(dict(even_numbers=[3])) ```` Instead, we get a much less useful message: ``` voluptuous.MultipleInvalid: invalid list value @ data['even_numbers'][0] ``` Sadly, nowhere in that `MultipleInvalid` exception is the original error stored.
alecthomas/voluptuous
diff --git a/tests.md b/tests.md index f098c1b..18f6fba 100644 --- a/tests.md +++ b/tests.md @@ -16,7 +16,7 @@ value: ... raise AssertionError('MultipleInvalid not raised') ... except MultipleInvalid as e: ... exc = e - >>> str(exc) == 'invalid list value @ data[1]' + >>> str(exc) == 'expected a dictionary @ data[1]' True It should also be accurate for nested values: @@ -35,7 +35,7 @@ It should also be accurate for nested values: ... except MultipleInvalid as e: ... exc = e >>> str(exc) - "invalid list value @ data[0]['four'][0]" + "not a valid value @ data[0]['four'][0]" >>> try: ... schema([{'six': {'seven': 'nine'}}]) @@ -116,9 +116,9 @@ Multiple errors are reported: ... schema([1, 2, 3]) ... except MultipleInvalid as e: ... print([str(i) for i in e.errors]) # doctest: +NORMALIZE_WHITESPACE - ['invalid list value @ data[0]', - 'invalid list value @ data[1]', - 'invalid list value @ data[2]'] + ['expected a list @ data[0]', + 'expected a list @ data[1]', + 'expected a list @ data[2]'] Required fields in dictionary which are invalid should not have required : diff --git a/tests.py b/tests.py index 0dd4e9d..32e6694 100644 --- a/tests.py +++ b/tests.py @@ -112,7 +112,7 @@ def test_literal(): try: schema([{"c": 1}]) except Invalid as e: - assert_equal(str(e), 'invalid list value @ data[0]') + assert_equal(str(e), "{'c': 1} not match for {'b': 1} @ data[0]") else: assert False, "Did not raise Invalid" @@ -242,3 +242,23 @@ def test_repr(): ) assert_equal(repr(coerce_), "Coerce(int, msg='moo')") assert_equal(repr(all_), "All('10', Coerce(int, msg=None), msg='all msg')") + + +def test_list_validation_messages(): + """ Make sure useful error messages are available """ + + def is_even(value): + if value % 2: + raise Invalid('%i is not even' % value) + return value + + schema = Schema(dict(even_numbers=[All(int, is_even)])) + + try: + schema(dict(even_numbers=[3])) + except Invalid as e: + assert_equal(len(e.errors), 1, e.errors) + assert_equal(str(e.errors[0]), "3 is not even @ data['even_numbers'][0]") + assert_equal(str(e), "3 is not even @ data['even_numbers'][0]") + else: + assert False, "Did not raise Invalid"
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 0, "test_score": 0 }, "num_modified_files": 2 }
0.8
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "nose", "coverage", "pytest" ], "pre_install": null, "python": "3.5", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work certifi==2021.5.30 coverage==6.2 importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work nose==1.3.7 packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work py @ file:///opt/conda/conda-bld/py_1644396412707/work pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work pytest==6.2.4 toml @ file:///tmp/build/80754af9/toml_1616166611790/work typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work -e git+https://github.com/alecthomas/voluptuous.git@32aeeec65d77940655a2905c9f15114586eb785e#egg=voluptuous zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
name: voluptuous channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - attrs=21.4.0=pyhd3eb1b0_0 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - importlib-metadata=4.8.1=py36h06a4308_0 - importlib_metadata=4.8.1=hd3eb1b0_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - more-itertools=8.12.0=pyhd3eb1b0_0 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - packaging=21.3=pyhd3eb1b0_0 - pip=21.2.2=py36h06a4308_0 - pluggy=0.13.1=py36h06a4308_0 - py=1.11.0=pyhd3eb1b0_0 - pyparsing=3.0.4=pyhd3eb1b0_0 - pytest=6.2.4=py36h06a4308_2 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - toml=0.10.2=pyhd3eb1b0_0 - typing_extensions=4.1.1=pyh06a4308_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zipp=3.6.0=pyhd3eb1b0_0 - zlib=1.2.13=h5eee18b_1 - pip: - coverage==6.2 - nose==1.3.7 prefix: /opt/conda/envs/voluptuous
[ "tests.py::test_literal", "tests.py::test_list_validation_messages" ]
[]
[ "tests.py::test_required", "tests.py::test_extra_with_required", "tests.py::test_iterate_candidates", "tests.py::test_in", "tests.py::test_not_in", "tests.py::test_remove", "tests.py::test_extra_empty_errors", "tests.py::test_url_validation", "tests.py::test_url_validation_with_none", "tests.py::test_url_validation_with_empty_string", "tests.py::test_url_validation_without_host", "tests.py::test_copy_dict_undefined", "tests.py::test_sorting", "tests.py::test_schema_extend", "tests.py::test_schema_extend_overrides", "tests.py::test_repr" ]
[]
BSD 3-Clause "New" or "Revised" License
489
SciLifeLab__genologics-154
33b08db9bc8d85427c79f5e10792b51f10b27a47
2016-04-02 05:57:33
dc79dc55dc1218582f30a5e76e9a0fff428cbf30
diff --git a/genologics/entities.py b/genologics/entities.py index 22e0625..4b7bcf7 100644 --- a/genologics/entities.py +++ b/genologics/entities.py @@ -271,7 +271,6 @@ class BaseDescriptor(object): def __get__(self, instance, cls): raise NotImplementedError - class TagDescriptor(BaseDescriptor): """Abstract base descriptor for an instance attribute represented by an XML element. @@ -280,7 +279,6 @@ class TagDescriptor(BaseDescriptor): def __init__(self, tag): self.tag = tag - class StringDescriptor(TagDescriptor): """An instance attribute containing a string value represented by an XML element. @@ -307,7 +305,6 @@ class StringDescriptor(TagDescriptor): else: return instance.root - class StringAttributeDescriptor(TagDescriptor): """An instance attribute containing a string value represented by an XML attribute. @@ -317,7 +314,6 @@ class StringAttributeDescriptor(TagDescriptor): instance.get() return instance.root.attrib[self.tag] - class StringListDescriptor(TagDescriptor): """An instance attribute containing a list of strings represented by multiple XML elements. @@ -330,7 +326,6 @@ class StringListDescriptor(TagDescriptor): result.append(node.text) return result - class StringDictionaryDescriptor(TagDescriptor): """An instance attribute containing a dictionary of string key/values represented by a hierarchical XML element. @@ -345,7 +340,6 @@ class StringDictionaryDescriptor(TagDescriptor): result[node2.tag] = node2.text return result - class IntegerDescriptor(StringDescriptor): """An instance attribute containing an integer value represented by an XMl element. @@ -359,7 +353,6 @@ class IntegerDescriptor(StringDescriptor): else: return int(node.text) - class BooleanDescriptor(StringDescriptor): """An instance attribute containing a boolean value represented by an XMl element. @@ -373,7 +366,6 @@ class BooleanDescriptor(StringDescriptor): else: return node.text.lower() == 'true' - class UdfDictionary(object): "Dictionary-like container of UDFs, optionally within a UDT." @@ -536,8 +528,6 @@ class UdfDictionary(object): def get(self, key, default=None): return self._lookup.get(key, default) - - class UdfDictionaryDescriptor(BaseDescriptor): """An instance attribute containing a dictionary of UDF values represented by multiple XML elements. @@ -557,7 +547,6 @@ class UdtDictionaryDescriptor(UdfDictionaryDescriptor): _UDT = True - class PlacementDictionaryDescriptor(TagDescriptor): """An instance attribute containing a dictionary of locations keys and artifact values represented by multiple XML elements. @@ -571,7 +560,6 @@ class PlacementDictionaryDescriptor(TagDescriptor): self.value[key] = Artifact(instance.lims,uri=node.attrib['uri']) return self.value - class ExternalidListDescriptor(BaseDescriptor): """An instance attribute yielding a list of tuples (id, uri) for external identifiers represented by multiple XML elements. @@ -584,7 +572,6 @@ class ExternalidListDescriptor(BaseDescriptor): result.append((node.attrib.get('id'), node.attrib.get('uri'))) return result - class EntityDescriptor(TagDescriptor): "An instance attribute referencing another entity instance." @@ -600,7 +587,6 @@ class EntityDescriptor(TagDescriptor): else: return self.klass(instance.lims, uri=node.attrib['uri']) - class EntityListDescriptor(EntityDescriptor): """An instance attribute yielding a list of entity instances represented by multiple XML elements. @@ -668,6 +654,7 @@ class NestedEntityListDescriptor(EntityListDescriptor): for node in rootnode.findall(self.tag): result.append(self.klass(instance.lims, uri=node.attrib['uri'])) return result + class DimensionDescriptor(TagDescriptor): """An instance attribute containing a dictionary specifying the properties of a dimension of a container type. @@ -680,7 +667,6 @@ class DimensionDescriptor(TagDescriptor): offset = int(node.find('offset').text), size = int(node.find('size').text)) - class LocationDescriptor(TagDescriptor): """An instance attribute containing a tuple (container, value) specifying the location of an analyte in a container. @@ -750,7 +736,6 @@ class Entity(object): raise ValueError("Entity uri and id can't be both None") else: uri = lims.get_uri(cls._URI, id) - try: return lims.cache[uri] except KeyError: @@ -950,6 +935,7 @@ class Process(Entity): udf = UdfDictionaryDescriptor() udt = UdtDictionaryDescriptor() files = EntityListDescriptor(nsmap('file:file'), File) + # instrument XXX # process_parameters XXX @@ -1042,6 +1028,11 @@ class Process(Entity): cs.append(o_a.container) return list(frozenset(cs)) + @property + def step(self): + """Retrive the Step coresponding to this process. They share the same id""" + return Step(self.lims, id=self.id) + class Artifact(Entity): "Any process input or output; analyte or file." @@ -1105,28 +1096,76 @@ class Artifact(Entity): stateless = property(stateless) class StepActions(Entity): - """Small hack to be able to query the actions subentity of - the Step entity. Right now, only the escalation is parsed.""" + """Actions associated with a step""" + _escalation = None - def __init__(self, lims, uri=None, id=None): - super(StepActions, self).__init__(lims,uri,id) - self.escalation={} - self.lims=lims - self.root=self.lims.get(self.uri) - for node in self.root.findall('escalation'): - self.escalation['artifacts']=[] - self.escalation['author']=Researcher(lims,uri=node.find('request').find('author').attrib.get('uri')) - self.escalation['request']=uri=node.find('request').find('comment').text - if node.find('review') is not None: #recommended by the Etree doc - self.escalation['status']='Reviewed' - self.escalation['reviewer']= Researcher(lims,uri=node.find('review').find('author').attrib.get('uri')) - self.escalation['answer']=uri=node.find('review').find('comment').text - else: - self.escalation['status']='Pending' + @property + def escalation(self): + if not self._escalation: + self.get() + self._escalation={} + for node in self.root.findall('escalation'): + self._escalation['artifacts']=[] + self._escalation['author']=Researcher(self.lims,uri=node.find('request').find('author').attrib.get('uri')) + self._escalation['request']=uri=node.find('request').find('comment').text + if node.find('review') is not None: #recommended by the Etree doc + self._escalation['status']='Reviewed' + self._escalation['reviewer']= Researcher(self.lims,uri=node.find('review').find('author').attrib.get('uri')) + self._escalation['answer']=uri=node.find('review').find('comment').text + else: + self._escalation['status']='Pending' + + for node2 in node.findall('escalated-artifacts'): + art= self.lims.get_batch([Artifact(self.lims, uri=ch.attrib.get('uri')) for ch in node2]) + self._escalation['artifacts'].extend(art) + return self._escalation + + @property + def next_actions(self): + actions = [] + self.get() + if self.root.find('next-actions') is not None: + for node in self.root.find('next-actions').findall('next-action'): + action = { + 'artifact': Artifact(self.lims, node.attrib.get('artifact-uri')), + 'action': node.attrib.get('action'), + } + if node.attrib.get('step-uri'): + action['step']=Step(self.lims, uri=node.attrib.get('step-uri')) + if node.attrib.get('rework-step-uri'): + action['rework-step']=Step(self.lims, uri=node.attrib.get('rework-step-uri')) + actions.append(action) + return actions + +class ReagentKit(Entity): + """Type of Reagent with information about the provider""" + _URI="reagenttypes" + _TAG="reagent-kit" + + name = StringDescriptor('name') + supplier = StringDescriptor('supplier') + website = StringDescriptor('website') + archived = BooleanDescriptor('archived') - for node2 in node.findall('escalated-artifacts'): - art= lims.get_batch([Artifact(lims,uri=ch.attrib.get('uri')) for ch in node2]) - self.escalation['artifacts'].extend(art) +class ReagentLot(Entity): + """Reagent Lots contain information about a particualr lot of reagent used in a step""" + _URI="reagentlot" + _TAG="reagent-lot" + + reagent_kit = EntityDescriptor('reagent-kit', ReagentKit) + name = StringDescriptor('name') + lot_number = StringDescriptor('lot-number') + created_date = StringDescriptor('created-date') + last_modified_date = StringDescriptor('last-modified-date') + expiry_date = StringDescriptor('expiry-date') + created_by = EntityDescriptor('created-by', Researcher) + last_modified_by = EntityDescriptor('last-modified-by', Researcher) + status = StringDescriptor('status') + usage_count = IntegerDescriptor('usage-count') + + +class StepReagentLots(Entity): + reagent_lots = NestedEntityListDescriptor('reagent-lot', ReagentLot, 'reagent-lots') class Step(Entity): @@ -1134,16 +1173,15 @@ class Step(Entity): _URI = 'steps' - def __init__(self, lims, uri=None, id=None): - super(Step, self).__init__(lims,uri,id) - assert self.uri is not None - actionsuri="{0}/actions".format(self.uri) - self.actions= StepActions(lims,uri=actionsuri) - + _reagent_lots = EntityDescriptor('reagent-lots', StepReagentLots) + actions = EntityDescriptor('actions', StepActions) #placements = EntityDescriptor('placements', StepPlacements) #program_status = EntityDescriptor('program-status',StepProgramStatus) #details = EntityListDescriptor(nsmap('file:file'), StepDetails) + @property + def reagent_lots(self): + return self._reagent_lots.reagent_lots class ProtocolStep(Entity): """Steps key in the Protocol object""" @@ -1173,6 +1211,7 @@ class Stage(Entity): """Holds Protocol/Workflow""" protocol = EntityDescriptor('protocol', Protocol) + class Workflow(Entity): """ Workflow, introduced in 3.5""" _URI="configuration/workflows" @@ -1200,10 +1239,10 @@ class ReagentType(Entity): if child.attrib.get("name") == "Sequence": self.sequence=child.attrib.get("value") + Sample.artifact = EntityDescriptor('artifact', Artifact) StepActions.step = EntityDescriptor('step', Step) Stage.workflow = EntityDescriptor('workflow', Workflow) Artifact.workflow_stages = NestedEntityListDescriptor('workflow-stage', Stage, 'workflow-stages') Step.configuration = EntityDescriptor('configuration', ProtocolStep) - diff --git a/genologics/lims.py b/genologics/lims.py index 3ee432a..316e9b1 100644 --- a/genologics/lims.py +++ b/genologics/lims.py @@ -489,9 +489,7 @@ class Lims(object): root = self.post(uri, data) def route_artifacts(self, artifact_list, workflow_uri=None, stage_uri=None, unassign=False): - root = ElementTree.Element(nsmap('rt:routing')) - if unassign: s = ElementTree.SubElement(root, 'unassign') else: @@ -509,7 +507,6 @@ class Lims(object): auth=(self.username, self.password), headers={'content-type': 'application/xml', 'accept': 'application/xml'}) - self.validate_response(r) diff --git a/genologics/version.py b/genologics/version.py index 18a95ec..b4cd250 100644 --- a/genologics/version.py +++ b/genologics/version.py @@ -1,1 +1,1 @@ -__version__="0.3.1" +__version__="0.3.2" diff --git a/setup.py b/setup.py index 7aa61db..772f4e8 100644 --- a/setup.py +++ b/setup.py @@ -11,7 +11,8 @@ version_py = os.path.join(os.path.dirname(__file__), 'genologics', 'version.py') version = subprocess.Popen(["git", "describe", "--abbrev=0"],stdout=subprocess.PIPE, universal_newlines=True).communicate()[0].rstrip() if not version: version = __version__ - +else: + version = version.decode("utf-8") setup(name='genologics',
Change to StepActions Hi, I need to rework the way Step stores actions and that might results in backward incompatible changes to the API. I was wondering how much this would affect you and the potential users of the API as it seems to be still work in progress Cheers Tim
SciLifeLab/genologics
diff --git a/tests/test_entities.py b/tests/test_entities.py index dbe16c6..2163bce 100644 --- a/tests/test_entities.py +++ b/tests/test_entities.py @@ -3,16 +3,18 @@ from xml.etree import ElementTree from sys import version_info from io import BytesIO +from genologics.lims import Lims +from genologics.entities import StringDescriptor, StringAttributeDescriptor, StringListDescriptor, \ + StringDictionaryDescriptor, IntegerDescriptor, BooleanDescriptor, UdfDictionary, StepActions, Researcher, Artifact, \ + Step + if version_info.major == 2: from mock import patch, Mock - import __builtin__ as builtins else: from unittest.mock import patch, Mock - import builtins -from genologics.entities import StringDescriptor, StringAttributeDescriptor, StringListDescriptor, \ - StringDictionaryDescriptor, IntegerDescriptor, BooleanDescriptor, UdfDictionary + class TestEntities(TestCase): @@ -214,3 +216,86 @@ class TestUdfDictionary(TestCase): def test_get(self): pass + + +class TestEntities(TestCase): + url = 'http://testgenologics.com:4040' + dummy_xml="""<?xml version="1.0" encoding="UTF-8" standalone="yes"?> + <dummy></dummy>""" + + def setUp(self): + self.lims = Lims(self.url, username='test', password='password') + +class TestStepActions(TestEntities): + url = 'http://testgenologics.com:4040' + step_actions_xml = """<stp:actions xmlns:stp="http://genologics.com/ri/step" uri="..."> + <step rel="..." uri="{url}/steps/s1"> + </step> + <configuration uri="{url}/config/1">...</configuration> + <next-actions> + <next-action artifact-uri="{url}/artifacts/a1" action="requeue" step-uri="..." rework-step-uri="..."> + </next-action> + </next-actions> + <escalation> + <request> + <author uri="{url}/researchers/r1"> + <first-name>foo</first-name> + <last-name>bar</last-name> + </author> + <reviewer uri="{url}/researchers/r1"> + <first-name>foo</first-name> + <last-name>bar</last-name> + </reviewer> + <date>01-01-1970</date> + <comment>no comments</comment> + </request> + <review> + <author uri="{url}/researchers/r1"> + <first-name>foo</first-name> + <last-name>bar</last-name> + </author> + <date>01-01-1970</date> + <comment>no comments</comment> + </review> + <escalated-artifacts> + <escalated-artifact uri="{url}/artifacts/r1"> + </escalated-artifact> + </escalated-artifacts> + </escalation> +</stp:actions>""".format(url=url) + + step_actions_no_escalation_xml = """<stp:actions xmlns:stp="http://genologics.com/ri/step" uri="..."> + <step rel="..." uri="{url}/steps/s1"> + </step> + <configuration uri="{url}/config/1">...</configuration> + <next-actions> + <next-action artifact-uri="{url}/artifacts/a1" action="requeue" step-uri="{url}/steps/s1" rework-step-uri="{url}/steps/s2"> + </next-action> + </next-actions> +</stp:actions>""".format(url=url) + + def test_escalation(self): + s = StepActions(uri=self.lims.get_uri('steps', 'step_id', 'actions'), lims=self.lims) + with patch('requests.Session.get',return_value=Mock(content = self.step_actions_xml, status_code=200)),\ + patch('requests.post', return_value=Mock(content = self.dummy_xml, status_code=200)): + r = Researcher(uri='http://testgenologics.com:4040/researchers/r1', lims=self.lims) + a = Artifact(uri='http://testgenologics.com:4040/artifacts/r1', lims=self.lims) + expected_escalation = { + 'status': 'Reviewed', + 'author': r, + 'artifacts': [a], 'request': 'no comments', + 'answer': 'no comments', + 'reviewer': r} + + assert s.escalation == expected_escalation + + def test_next_actions(self): + s = StepActions(uri=self.lims.get_uri('steps', 'step_id', 'actions'), lims=self.lims) + with patch('requests.Session.get',return_value=Mock(content = self.step_actions_no_escalation_xml, status_code=200)): + step1 = Step(self.lims, uri='http://testgenologics.com:4040/steps/s1') + step2 = Step(self.lims, uri='http://testgenologics.com:4040/steps/s2') + artifact = Artifact(self.lims, uri='http://testgenologics.com:4040/artifacts/a1') + expected_next_actions = [{'artifact': artifact, 'action': 'requeue', + 'step': step1, 'rework-step': step2}] + assert s.next_actions == expected_next_actions +
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 3, "issue_text_score": 3, "test_score": 3 }, "num_modified_files": 4 }
0.3
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.9", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
certifi==2025.1.31 charset-normalizer==3.4.1 exceptiongroup==1.2.2 -e git+https://github.com/SciLifeLab/genologics.git@33b08db9bc8d85427c79f5e10792b51f10b27a47#egg=genologics idna==3.10 iniconfig==2.1.0 packaging==24.2 pluggy==1.5.0 pytest==8.3.5 requests==2.32.3 tomli==2.2.1 urllib3==2.3.0
name: genologics channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - certifi==2025.1.31 - charset-normalizer==3.4.1 - exceptiongroup==1.2.2 - idna==3.10 - iniconfig==2.1.0 - packaging==24.2 - pluggy==1.5.0 - pytest==8.3.5 - requests==2.32.3 - tomli==2.2.1 - urllib3==2.3.0 prefix: /opt/conda/envs/genologics
[ "tests/test_entities.py::TestStepActions::test_next_actions" ]
[ "tests/test_entities.py::TestStringDictionaryDescriptor::test__get__", "tests/test_entities.py::TestUdfDictionary::test___contains__", "tests/test_entities.py::TestUdfDictionary::test___delitem__", "tests/test_entities.py::TestUdfDictionary::test___getitem__", "tests/test_entities.py::TestUdfDictionary::test___iter__", "tests/test_entities.py::TestUdfDictionary::test___next__", "tests/test_entities.py::TestUdfDictionary::test___setitem__", "tests/test_entities.py::TestUdfDictionary::test__prepare_lookup", "tests/test_entities.py::TestUdfDictionary::test__update_elems", "tests/test_entities.py::TestUdfDictionary::test_clear", "tests/test_entities.py::TestUdfDictionary::test_get", "tests/test_entities.py::TestUdfDictionary::test_get_udt", "tests/test_entities.py::TestUdfDictionary::test_items", "tests/test_entities.py::TestUdfDictionary::test_set_udt", "tests/test_entities.py::TestStepActions::test_escalation" ]
[ "tests/test_entities.py::TestStringDescriptor::test__get__", "tests/test_entities.py::TestStringDescriptor::test__set__", "tests/test_entities.py::TestStringAttributeDescriptor::test__get__", "tests/test_entities.py::TestStringListDescriptor::test__get__", "tests/test_entities.py::TestIntegerDescriptor::test__get__", "tests/test_entities.py::TestIntegerDescriptor::test__set__", "tests/test_entities.py::TestBooleanDescriptor::test__get__", "tests/test_entities.py::TestBooleanDescriptor::test__set__" ]
[]
MIT License
490
dpkp__kafka-python-620
b96f4ccf070109a022deb98b569e61d23e4e75b9
2016-04-03 16:29:40
810f08b7996a15e65cdd8af6c1a7167c28f94646
diff --git a/kafka/coordinator/consumer.py b/kafka/coordinator/consumer.py index a5e3067..b2ef1ea 100644 --- a/kafka/coordinator/consumer.py +++ b/kafka/coordinator/consumer.py @@ -91,8 +91,10 @@ class ConsumerCoordinator(BaseCoordinator): log.warning('Broker version (%s) does not support offset' ' commits; disabling auto-commit.', self.config['api_version']) + self.config['enable_auto_commit'] = False elif self.config['group_id'] is None: log.warning('group_id is None: disabling auto-commit.') + self.config['enable_auto_commit'] = False else: interval = self.config['auto_commit_interval_ms'] / 1000.0 self._auto_commit_task = AutoCommitTask(weakref.proxy(self), interval) @@ -192,7 +194,7 @@ class ConsumerCoordinator(BaseCoordinator): assignor.on_assignment(assignment) # restart the autocommit task if needed - if self.config['enable_auto_commit']: + if self._auto_commit_task: self._auto_commit_task.enable() assigned = set(self._subscription.assigned_partitions()) @@ -364,27 +366,27 @@ class ConsumerCoordinator(BaseCoordinator): time.sleep(self.config['retry_backoff_ms'] / 1000.0) def _maybe_auto_commit_offsets_sync(self): - if self.config['api_version'] < (0, 8, 1): + if self._auto_commit_task is None: return - if self.config['enable_auto_commit']: - # disable periodic commits prior to committing synchronously. note that they will - # be re-enabled after a rebalance completes - self._auto_commit_task.disable() - try: - self.commit_offsets_sync(self._subscription.all_consumed_offsets()) - - # The three main group membership errors are known and should not - # require a stacktrace -- just a warning - except (Errors.UnknownMemberIdError, - Errors.IllegalGenerationError, - Errors.RebalanceInProgressError): - log.warning("Offset commit failed: group membership out of date" - " This is likely to cause duplicate message" - " delivery.") - except Exception: - log.exception("Offset commit failed: This is likely to cause" - " duplicate message delivery") + # disable periodic commits prior to committing synchronously. note that they will + # be re-enabled after a rebalance completes + self._auto_commit_task.disable() + + try: + self.commit_offsets_sync(self._subscription.all_consumed_offsets()) + + # The three main group membership errors are known and should not + # require a stacktrace -- just a warning + except (Errors.UnknownMemberIdError, + Errors.IllegalGenerationError, + Errors.RebalanceInProgressError): + log.warning("Offset commit failed: group membership out of date" + " This is likely to cause duplicate message" + " delivery.") + except Exception: + log.exception("Offset commit failed: This is likely to cause" + " duplicate message delivery") def _send_offset_commit_request(self, offsets): """Commit offsets for the specified list of topics and partitions.
Consumer exception on close when group id is None Following the conversation in #601, setting the `group_id` to `None` in a Consumer causes an exception to be raised when the consumer is closed. ``` >>> from kafka import KafkaConsumer >>> k = KafkaConsumer('example', bootstrap_servers=['server'], group_id=None) >>> k.close() Traceback (most recent call last): File "<stdin>", line 1, in <module> File "/Users/madisonb/.local/share/virtualenvs/scrapy-cluster/lib/python2.7/site-packages/kafka/consumer/group.py", line 257, in close self._coordinator.close() File "/Users/madisonb/.local/share/virtualenvs/scrapy-cluster/lib/python2.7/site-packages/kafka/coordinator/consumer.py", line 306, in close self._maybe_auto_commit_offsets_sync() File "/Users/madisonb/.local/share/virtualenvs/scrapy-cluster/lib/python2.7/site-packages/kafka/coordinator/consumer.py", line 372, in _maybe_auto_commit_offsets_sync self._auto_commit_task.disable() AttributeError: 'NoneType' object has no attribute 'disable' >>> k = KafkaConsumer('example', bootstrap_servers=['server'], group_id='stuff') >>> k.close() >>> ```
dpkp/kafka-python
diff --git a/test/test_coordinator.py b/test/test_coordinator.py index 847cbc1..44db808 100644 --- a/test/test_coordinator.py +++ b/test/test_coordinator.py @@ -52,12 +52,16 @@ def test_init(conn): @pytest.mark.parametrize("api_version", [(0, 8, 0), (0, 8, 1), (0, 8, 2), (0, 9)]) def test_autocommit_enable_api_version(conn, api_version): - coordinator = ConsumerCoordinator( - KafkaClient(), SubscriptionState(), api_version=api_version) + coordinator = ConsumerCoordinator(KafkaClient(), SubscriptionState(), + enable_auto_commit=True, + group_id='foobar', + api_version=api_version) if api_version < (0, 8, 1): assert coordinator._auto_commit_task is None + assert coordinator.config['enable_auto_commit'] is False else: assert coordinator._auto_commit_task is not None + assert coordinator.config['enable_auto_commit'] is True def test_protocol_type(coordinator): @@ -349,28 +353,40 @@ def test_commit_offsets_sync(mocker, coordinator, offsets): @pytest.mark.parametrize( - 'api_version,enable,error,task_disable,commit_offsets,warn,exc', [ - ((0, 8), True, None, False, False, False, False), - ((0, 9), False, None, False, False, False, False), - ((0, 9), True, Errors.UnknownMemberIdError(), True, True, True, False), - ((0, 9), True, Errors.IllegalGenerationError(), True, True, True, False), - ((0, 9), True, Errors.RebalanceInProgressError(), True, True, True, False), - ((0, 9), True, Exception(), True, True, False, True), - ((0, 9), True, None, True, True, False, False), + 'api_version,group_id,enable,error,has_auto_commit,commit_offsets,warn,exc', [ + ((0, 8), 'foobar', True, None, False, False, True, False), + ((0, 9), 'foobar', False, None, False, False, False, False), + ((0, 9), 'foobar', True, Errors.UnknownMemberIdError(), True, True, True, False), + ((0, 9), 'foobar', True, Errors.IllegalGenerationError(), True, True, True, False), + ((0, 9), 'foobar', True, Errors.RebalanceInProgressError(), True, True, True, False), + ((0, 9), 'foobar', True, Exception(), True, True, False, True), + ((0, 9), 'foobar', True, None, True, True, False, False), + ((0, 9), None, True, None, False, False, True, False), ]) -def test_maybe_auto_commit_offsets_sync(mocker, coordinator, - api_version, enable, error, task_disable, - commit_offsets, warn, exc): - auto_commit_task = mocker.patch.object(coordinator, '_auto_commit_task') - commit_sync = mocker.patch.object(coordinator, 'commit_offsets_sync', - side_effect=error) +def test_maybe_auto_commit_offsets_sync(mocker, api_version, group_id, enable, + error, has_auto_commit, commit_offsets, + warn, exc): mock_warn = mocker.patch('kafka.coordinator.consumer.log.warning') mock_exc = mocker.patch('kafka.coordinator.consumer.log.exception') + coordinator = ConsumerCoordinator(KafkaClient(), SubscriptionState(), + api_version=api_version, + enable_auto_commit=enable, + group_id=group_id) + commit_sync = mocker.patch.object(coordinator, 'commit_offsets_sync', + side_effect=error) + if has_auto_commit: + assert coordinator._auto_commit_task is not None + coordinator._auto_commit_task.enable() + assert coordinator._auto_commit_task._enabled is True + else: + assert coordinator._auto_commit_task is None - coordinator.config['api_version'] = api_version - coordinator.config['enable_auto_commit'] = enable assert coordinator._maybe_auto_commit_offsets_sync() is None - assert auto_commit_task.disable.call_count == (1 if task_disable else 0) + + if has_auto_commit: + assert coordinator._auto_commit_task is not None + assert coordinator._auto_commit_task._enabled is False + assert commit_sync.call_count == (1 if commit_offsets else 0) assert mock_warn.call_count == (1 if warn else 0) assert mock_exc.call_count == (1 if exc else 0)
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_issue_reference" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 2, "test_score": 0 }, "num_modified_files": 1 }
1.0
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "pytest-cov", "pytest-catchlog", "pytest-pylint", "pytest-sugar", "pytest-mock", "mock", "python-snappy", "lz4tools", "xxhash" ], "pre_install": [ "apt-get update", "apt-get install -y libsnappy-dev" ], "python": "3.5", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
astroid==2.11.7 attrs==22.2.0 certifi==2021.5.30 coverage==6.2 cramjam==2.5.0 dill==0.3.4 importlib-metadata==4.8.3 iniconfig==1.1.1 isort==5.10.1 -e git+https://github.com/dpkp/kafka-python.git@b96f4ccf070109a022deb98b569e61d23e4e75b9#egg=kafka_python lazy-object-proxy==1.7.1 lz4tools==1.3.1.2 mccabe==0.7.0 mock==5.2.0 packaging==21.3 platformdirs==2.4.0 pluggy==1.0.0 py==1.11.0 pylint==2.13.9 pyparsing==3.1.4 pytest==7.0.1 pytest-catchlog==1.2.2 pytest-cov==4.0.0 pytest-mock==3.6.1 pytest-pylint==0.18.0 pytest-sugar==0.9.6 python-snappy==0.7.3 six==1.17.0 termcolor==1.1.0 toml==0.10.2 tomli==1.2.3 typed-ast==1.5.5 typing_extensions==4.1.1 wrapt==1.16.0 xxhash==3.2.0 zipp==3.6.0
name: kafka-python channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - astroid==2.11.7 - attrs==22.2.0 - coverage==6.2 - cramjam==2.5.0 - dill==0.3.4 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - isort==5.10.1 - lazy-object-proxy==1.7.1 - lz4tools==1.3.1.2 - mccabe==0.7.0 - mock==5.2.0 - packaging==21.3 - platformdirs==2.4.0 - pluggy==1.0.0 - py==1.11.0 - pylint==2.13.9 - pyparsing==3.1.4 - pytest==7.0.1 - pytest-catchlog==1.2.2 - pytest-cov==4.0.0 - pytest-mock==3.6.1 - pytest-pylint==0.18.0 - pytest-sugar==0.9.6 - python-snappy==0.7.3 - six==1.17.0 - termcolor==1.1.0 - toml==0.10.2 - tomli==1.2.3 - typed-ast==1.5.5 - typing-extensions==4.1.1 - wrapt==1.16.0 - xxhash==3.2.0 - zipp==3.6.0 prefix: /opt/conda/envs/kafka-python
[ "test/test_coordinator.py::test_autocommit_enable_api_version[api_version0]", "test/test_coordinator.py::test_maybe_auto_commit_offsets_sync[api_version7-None-True-None-False-False-True-False]" ]
[ "test/test_coordinator.py::test_handle_offset_commit_response[response10-InvalidTopicError-False-False]" ]
[ "test/test_coordinator.py::test_init", "test/test_coordinator.py::test_autocommit_enable_api_version[api_version1]", "test/test_coordinator.py::test_autocommit_enable_api_version[api_version2]", "test/test_coordinator.py::test_autocommit_enable_api_version[api_version3]", "test/test_coordinator.py::test_protocol_type", "test/test_coordinator.py::test_group_protocols", "test/test_coordinator.py::test_pattern_subscription[api_version0]", "test/test_coordinator.py::test_pattern_subscription[api_version1]", "test/test_coordinator.py::test_pattern_subscription[api_version2]", "test/test_coordinator.py::test_pattern_subscription[api_version3]", "test/test_coordinator.py::test_lookup_assignor", "test/test_coordinator.py::test_join_complete", "test/test_coordinator.py::test_subscription_listener", "test/test_coordinator.py::test_subscription_listener_failure", "test/test_coordinator.py::test_perform_assignment", "test/test_coordinator.py::test_on_join_prepare", "test/test_coordinator.py::test_need_rejoin", "test/test_coordinator.py::test_refresh_committed_offsets_if_needed", "test/test_coordinator.py::test_fetch_committed_offsets", "test/test_coordinator.py::test_close", "test/test_coordinator.py::test_commit_offsets_async", "test/test_coordinator.py::test_commit_offsets_sync", "test/test_coordinator.py::test_maybe_auto_commit_offsets_sync[api_version0-foobar-True-None-False-False-True-False]", "test/test_coordinator.py::test_maybe_auto_commit_offsets_sync[api_version1-foobar-False-None-False-False-False-False]", "test/test_coordinator.py::test_maybe_auto_commit_offsets_sync[api_version2-foobar-True-error2-True-True-True-False]", "test/test_coordinator.py::test_maybe_auto_commit_offsets_sync[api_version3-foobar-True-error3-True-True-True-False]", "test/test_coordinator.py::test_maybe_auto_commit_offsets_sync[api_version4-foobar-True-error4-True-True-True-False]", "test/test_coordinator.py::test_maybe_auto_commit_offsets_sync[api_version5-foobar-True-error5-True-True-False-True]", "test/test_coordinator.py::test_maybe_auto_commit_offsets_sync[api_version6-foobar-True-None-True-True-False-False]", "test/test_coordinator.py::test_send_offset_commit_request_fail", "test/test_coordinator.py::test_send_offset_commit_request_versions[api_version0-OffsetCommitRequest_v0]", "test/test_coordinator.py::test_send_offset_commit_request_versions[api_version1-OffsetCommitRequest_v1]", "test/test_coordinator.py::test_send_offset_commit_request_versions[api_version2-OffsetCommitRequest_v2]", "test/test_coordinator.py::test_send_offset_commit_request_failure", "test/test_coordinator.py::test_send_offset_commit_request_success", "test/test_coordinator.py::test_handle_offset_commit_response[response0-GroupAuthorizationFailedError-False-False]", "test/test_coordinator.py::test_handle_offset_commit_response[response1-OffsetMetadataTooLargeError-False-False]", "test/test_coordinator.py::test_handle_offset_commit_response[response2-InvalidCommitOffsetSizeError-False-False]", "test/test_coordinator.py::test_handle_offset_commit_response[response3-GroupLoadInProgressError-False-False]", "test/test_coordinator.py::test_handle_offset_commit_response[response4-GroupCoordinatorNotAvailableError-True-False]", "test/test_coordinator.py::test_handle_offset_commit_response[response5-NotCoordinatorForGroupError-True-False]", "test/test_coordinator.py::test_handle_offset_commit_response[response6-RequestTimedOutError-True-False]", "test/test_coordinator.py::test_handle_offset_commit_response[response7-UnknownMemberIdError-False-True]", "test/test_coordinator.py::test_handle_offset_commit_response[response8-IllegalGenerationError-False-True]", "test/test_coordinator.py::test_handle_offset_commit_response[response9-RebalanceInProgressError-False-True]", "test/test_coordinator.py::test_handle_offset_commit_response[response11-TopicAuthorizationFailedError-False-False]", "test/test_coordinator.py::test_send_offset_fetch_request_fail", "test/test_coordinator.py::test_send_offset_fetch_request_versions[api_version0-OffsetFetchRequest_v0]", "test/test_coordinator.py::test_send_offset_fetch_request_versions[api_version1-OffsetFetchRequest_v1]", "test/test_coordinator.py::test_send_offset_fetch_request_versions[api_version2-OffsetFetchRequest_v1]", "test/test_coordinator.py::test_send_offset_fetch_request_failure", "test/test_coordinator.py::test_send_offset_fetch_request_success", "test/test_coordinator.py::test_handle_offset_fetch_response[response0-GroupLoadInProgressError-False-False]", "test/test_coordinator.py::test_handle_offset_fetch_response[response1-NotCoordinatorForGroupError-True-False]", "test/test_coordinator.py::test_handle_offset_fetch_response[response2-UnknownMemberIdError-False-True]", "test/test_coordinator.py::test_handle_offset_fetch_response[response3-IllegalGenerationError-False-True]", "test/test_coordinator.py::test_handle_offset_fetch_response[response4-TopicAuthorizationFailedError-False-False]", "test/test_coordinator.py::test_handle_offset_fetch_response[response5-None-False-False]", "test/test_coordinator.py::test_heartbeat" ]
[]
Apache License 2.0
491
andialbrecht__sqlparse-231
ee5799fbb60e9739e42922861cd9f24990fc52dd
2016-04-05 08:28:29
058f6fdcfdb0c84bedbaea0745d9b6b92cb20fe4
diff --git a/sqlparse/engine/grouping.py b/sqlparse/engine/grouping.py index 4e45f65..68960d5 100644 --- a/sqlparse/engine/grouping.py +++ b/sqlparse/engine/grouping.py @@ -135,7 +135,8 @@ def group_comparison(tlist): T.Name, T.Number, T.Number.Float, T.Number.Integer, T.Literal, T.Literal.Number.Integer, T.Name.Placeholder) - or isinstance(token, (sql.Identifier, sql.Parenthesis)) + or isinstance(token, (sql.Identifier, sql.Parenthesis, + sql.Function)) or (token.ttype is T.Keyword and token.value.upper() in ['NULL', ])) _group_left_right(tlist, T.Operator.Comparison, None, sql.Comparison,
Functions are not grouped into a Comparison I.e. `foo = DATE(bar.baz)` is not grouped.
andialbrecht/sqlparse
diff --git a/tests/test_grouping.py b/tests/test_grouping.py index e846176..a6c4028 100644 --- a/tests/test_grouping.py +++ b/tests/test_grouping.py @@ -325,6 +325,29 @@ def test_comparison_with_strings(): # issue148 assert p.tokens[0].right.ttype == T.String.Single +def test_comparison_with_functions(): # issue230 + p = sqlparse.parse('foo = DATE(bar.baz)')[0] + assert len(p.tokens) == 1 + assert isinstance(p.tokens[0], sql.Comparison) + assert len(p.tokens[0].tokens) == 5 + assert p.tokens[0].left.value == 'foo' + assert p.tokens[0].right.value == 'DATE(bar.baz)' + + p = sqlparse.parse('DATE(foo.bar) = DATE(bar.baz)')[0] + assert len(p.tokens) == 1 + assert isinstance(p.tokens[0], sql.Comparison) + assert len(p.tokens[0].tokens) == 5 + assert p.tokens[0].left.value == 'DATE(foo.bar)' + assert p.tokens[0].right.value == 'DATE(bar.baz)' + + p = sqlparse.parse('DATE(foo.bar) = bar.baz')[0] + assert len(p.tokens) == 1 + assert isinstance(p.tokens[0], sql.Comparison) + assert len(p.tokens[0].tokens) == 5 + assert p.tokens[0].left.value == 'DATE(foo.bar)' + assert p.tokens[0].right.value == 'bar.baz' + + @pytest.mark.parametrize('start', ['FOR', 'FOREACH']) def test_forloops(start): p = sqlparse.parse('%s foo in bar LOOP foobar END LOOP' % start)[0]
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 2, "test_score": 0 }, "num_modified_files": 1 }
0.1
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "pytest", "pytest-cov" ], "pre_install": null, "python": "3.9", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
coverage==7.8.0 exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work packaging @ file:///croot/packaging_1734472117206/work pluggy @ file:///croot/pluggy_1733169602837/work pytest @ file:///croot/pytest_1738938843180/work pytest-cov==6.0.0 -e git+https://github.com/andialbrecht/sqlparse.git@ee5799fbb60e9739e42922861cd9f24990fc52dd#egg=sqlparse tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
name: sqlparse channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - exceptiongroup=1.2.0=py39h06a4308_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - packaging=24.2=py39h06a4308_0 - pip=25.0=py39h06a4308_0 - pluggy=1.5.0=py39h06a4308_0 - pytest=8.3.4=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tomli=2.0.1=py39h06a4308_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - coverage==7.8.0 - pytest-cov==6.0.0 prefix: /opt/conda/envs/sqlparse
[ "tests/test_grouping.py::test_comparison_with_functions" ]
[]
[ "tests/test_grouping.py::TestGrouping::test_alias", "tests/test_grouping.py::TestGrouping::test_alias_case", "tests/test_grouping.py::TestGrouping::test_alias_returns_none", "tests/test_grouping.py::TestGrouping::test_assignment", "tests/test_grouping.py::TestGrouping::test_comments", "tests/test_grouping.py::TestGrouping::test_comparison_exclude", "tests/test_grouping.py::TestGrouping::test_function", "tests/test_grouping.py::TestGrouping::test_function_not_in", "tests/test_grouping.py::TestGrouping::test_identifier_as_invalid", "tests/test_grouping.py::TestGrouping::test_identifier_extended", "tests/test_grouping.py::TestGrouping::test_identifier_function", "tests/test_grouping.py::TestGrouping::test_identifier_invalid", "tests/test_grouping.py::TestGrouping::test_identifier_list", "tests/test_grouping.py::TestGrouping::test_identifier_list_case", "tests/test_grouping.py::TestGrouping::test_identifier_list_other", "tests/test_grouping.py::TestGrouping::test_identifier_list_with_inline_comments", "tests/test_grouping.py::TestGrouping::test_identifier_name_wildcard", "tests/test_grouping.py::TestGrouping::test_identifier_wildcard", "tests/test_grouping.py::TestGrouping::test_identifiers", "tests/test_grouping.py::TestGrouping::test_idlist_function", "tests/test_grouping.py::TestGrouping::test_parenthesis", "tests/test_grouping.py::TestGrouping::test_typecast", "tests/test_grouping.py::TestGrouping::test_varchar", "tests/test_grouping.py::TestGrouping::test_where", "tests/test_grouping.py::TestStatement::test_get_type", "tests/test_grouping.py::test_identifier_with_operators", "tests/test_grouping.py::test_identifier_with_op_trailing_ws", "tests/test_grouping.py::test_identifier_with_string_literals", "tests/test_grouping.py::test_identifier_consumes_ordering", "tests/test_grouping.py::test_comparison_with_keywords", "tests/test_grouping.py::test_comparison_with_floats", "tests/test_grouping.py::test_comparison_with_parenthesis", "tests/test_grouping.py::test_comparison_with_strings", "tests/test_grouping.py::test_forloops[FOR]", "tests/test_grouping.py::test_forloops[FOREACH]", "tests/test_grouping.py::test_nested_for", "tests/test_grouping.py::test_begin", "tests/test_grouping.py::test_nested_begin", "tests/test_grouping.py::test_aliased_column_without_as", "tests/test_grouping.py::test_qualified_function", "tests/test_grouping.py::test_aliased_function_without_as", "tests/test_grouping.py::test_aliased_literal_without_as" ]
[]
BSD 3-Clause "New" or "Revised" License
492
docker__docker-py-1022
e743254b42080e6d199fc10f4812a42ecb8d536f
2016-04-05 19:54:10
299ffadb95c90eb7134b9cee2648fb683912c303
dnephin: LGTM when build is green. Not sure why they all failed, maybe just a jenkins issue?
diff --git a/docker/auth/auth.py b/docker/auth/auth.py index eedb7944..d23e6f3c 100644 --- a/docker/auth/auth.py +++ b/docker/auth/auth.py @@ -117,7 +117,7 @@ def parse_auth(entries, raise_on_error=False): conf = {} for registry, entry in six.iteritems(entries): - if not (isinstance(entry, dict) and 'auth' in entry): + if not isinstance(entry, dict): log.debug( 'Config entry for key {0} is not auth config'.format(registry) ) @@ -130,6 +130,16 @@ def parse_auth(entries, raise_on_error=False): 'Invalid configuration for registry {0}'.format(registry) ) return {} + if 'auth' not in entry: + # Starting with engine v1.11 (API 1.23), an empty dictionary is + # a valid value in the auths config. + # https://github.com/docker/compose/issues/3265 + log.debug( + 'Auth data for {0} is absent. Client might be using a ' + 'credentials store instead.' + ) + return {} + username, password = decode_auth(entry['auth']) log.debug( 'Found entry (registry={0}, username={1})' @@ -189,6 +199,9 @@ def load_config(config_path=None): if data.get('HttpHeaders'): log.debug("Found 'HttpHeaders' section") res.update({'HttpHeaders': data['HttpHeaders']}) + if data.get('credsStore'): + log.debug("Found 'credsStore' section") + res.update({'credsStore': data['credsStore']}) if res: return res else:
Empty auth dictionary should be valid docker/compose#3265
docker/docker-py
diff --git a/tests/unit/auth_test.py b/tests/unit/auth_test.py index 921aae00..4ea40477 100644 --- a/tests/unit/auth_test.py +++ b/tests/unit/auth_test.py @@ -459,6 +459,5 @@ class LoadConfigTest(base.Cleanup, base.BaseTestCase): with open(dockercfg_path, 'w') as f: json.dump(config, f) - self.assertRaises( - errors.InvalidConfigFile, auth.load_config, dockercfg_path - ) + cfg = auth.load_config(dockercfg_path) + assert cfg == {}
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_pytest_match_arg" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 3, "test_score": 2 }, "num_modified_files": 1 }
1.8
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.4", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==22.2.0 certifi==2021.5.30 -e git+https://github.com/docker/docker-py.git@e743254b42080e6d199fc10f4812a42ecb8d536f#egg=docker_py importlib-metadata==4.8.3 iniconfig==1.1.1 packaging==21.3 pluggy==1.0.0 py==1.11.0 pyparsing==3.1.4 pytest==7.0.1 requests==2.5.3 six==1.17.0 tomli==1.2.3 typing_extensions==4.1.1 websocket-client==0.32.0 zipp==3.6.0
name: docker-py channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==22.2.0 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - packaging==21.3 - pluggy==1.0.0 - py==1.11.0 - pyparsing==3.1.4 - pytest==7.0.1 - requests==2.5.3 - six==1.17.0 - tomli==1.2.3 - typing-extensions==4.1.1 - websocket-client==0.32.0 - zipp==3.6.0 prefix: /opt/conda/envs/docker-py
[ "tests/unit/auth_test.py::LoadConfigTest::test_load_config_invalid_auth_dict" ]
[]
[ "tests/unit/auth_test.py::RegressionTest::test_803_urlsafe_encode", "tests/unit/auth_test.py::ResolveRepositoryNameTest::test_explicit_hub_index_library_image", "tests/unit/auth_test.py::ResolveRepositoryNameTest::test_explicit_legacy_hub_index_library_image", "tests/unit/auth_test.py::ResolveRepositoryNameTest::test_invalid_index_name", "tests/unit/auth_test.py::ResolveRepositoryNameTest::test_resolve_repository_name_dotted_hub_library_image", "tests/unit/auth_test.py::ResolveRepositoryNameTest::test_resolve_repository_name_hub_image", "tests/unit/auth_test.py::ResolveRepositoryNameTest::test_resolve_repository_name_hub_library_image", "tests/unit/auth_test.py::ResolveRepositoryNameTest::test_resolve_repository_name_localhost", "tests/unit/auth_test.py::ResolveRepositoryNameTest::test_resolve_repository_name_localhost_with_username", "tests/unit/auth_test.py::ResolveRepositoryNameTest::test_resolve_repository_name_no_dots_but_port", "tests/unit/auth_test.py::ResolveRepositoryNameTest::test_resolve_repository_name_no_dots_but_port_and_username", "tests/unit/auth_test.py::ResolveRepositoryNameTest::test_resolve_repository_name_private_registry", "tests/unit/auth_test.py::ResolveRepositoryNameTest::test_resolve_repository_name_private_registry_with_port", "tests/unit/auth_test.py::ResolveRepositoryNameTest::test_resolve_repository_name_private_registry_with_username", "tests/unit/auth_test.py::ResolveAuthTest::test_resolve_authconfig_default_explicit_none", "tests/unit/auth_test.py::ResolveAuthTest::test_resolve_authconfig_default_registry", "tests/unit/auth_test.py::ResolveAuthTest::test_resolve_authconfig_fully_explicit", "tests/unit/auth_test.py::ResolveAuthTest::test_resolve_authconfig_hostname_only", "tests/unit/auth_test.py::ResolveAuthTest::test_resolve_authconfig_legacy_config", "tests/unit/auth_test.py::ResolveAuthTest::test_resolve_authconfig_no_match", "tests/unit/auth_test.py::ResolveAuthTest::test_resolve_authconfig_no_path", "tests/unit/auth_test.py::ResolveAuthTest::test_resolve_authconfig_no_path_trailing_slash", "tests/unit/auth_test.py::ResolveAuthTest::test_resolve_authconfig_no_path_wrong_insecure_proto", "tests/unit/auth_test.py::ResolveAuthTest::test_resolve_authconfig_no_path_wrong_secure_proto", "tests/unit/auth_test.py::ResolveAuthTest::test_resolve_authconfig_no_protocol", "tests/unit/auth_test.py::ResolveAuthTest::test_resolve_authconfig_path_wrong_proto", "tests/unit/auth_test.py::ResolveAuthTest::test_resolve_registry_and_auth_explicit_hub", "tests/unit/auth_test.py::ResolveAuthTest::test_resolve_registry_and_auth_explicit_legacy_hub", "tests/unit/auth_test.py::ResolveAuthTest::test_resolve_registry_and_auth_hub_image", "tests/unit/auth_test.py::ResolveAuthTest::test_resolve_registry_and_auth_library_image", "tests/unit/auth_test.py::ResolveAuthTest::test_resolve_registry_and_auth_private_registry", "tests/unit/auth_test.py::ResolveAuthTest::test_resolve_registry_and_auth_unauthenticated_registry", "tests/unit/auth_test.py::LoadConfigTest::test_load_config", "tests/unit/auth_test.py::LoadConfigTest::test_load_config_custom_config_env", "tests/unit/auth_test.py::LoadConfigTest::test_load_config_custom_config_env_utf8", "tests/unit/auth_test.py::LoadConfigTest::test_load_config_custom_config_env_with_auths", "tests/unit/auth_test.py::LoadConfigTest::test_load_config_custom_config_env_with_headers", "tests/unit/auth_test.py::LoadConfigTest::test_load_config_no_file", "tests/unit/auth_test.py::LoadConfigTest::test_load_config_unknown_keys", "tests/unit/auth_test.py::LoadConfigTest::test_load_config_with_random_name" ]
[]
Apache License 2.0
493
thesquelched__suggestive-9
b5c3a2f0a1734d8948778fbb0b252d3ea5059def
2016-04-06 17:15:24
b5c3a2f0a1734d8948778fbb0b252d3ea5059def
diff --git a/CHANGELOG.md b/CHANGELOG.md index 414275e..949e00f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,11 @@ +Version 0.4.1 +============= + +Bug Fixes +--------- +- Add mpd connection retries to MPD loader +- Fix error when log directory doesn't exist + Version 0.4.0 ============= diff --git a/README.md b/README.md index c5c8d15..8bbf90d 100644 --- a/README.md +++ b/README.md @@ -57,6 +57,8 @@ Here is a minimal `suggestive` configuration file: [lastfm] user = my_lastfm_user api_key = 0123456789abcdefghijklmnopqrstuv + +# For LastFM write access (optional) api_secret = 141iojhu789uihy78uiho9uih89080 ``` diff --git a/suggestive/_version.py b/suggestive/_version.py index 0ed5398..90134aa 100644 --- a/suggestive/_version.py +++ b/suggestive/_version.py @@ -1,2 +1,2 @@ -__version_info__ = (0, 4, 0) +__version_info__ = (0, 4, 1) __version__ = '.'.join(str(piece) for piece in __version_info__) diff --git a/suggestive/app.py b/suggestive/app.py index d751041..1de628d 100644 --- a/suggestive/app.py +++ b/suggestive/app.py @@ -25,8 +25,7 @@ import urwid import logging from logging.handlers import RotatingFileHandler import threading -import os.path -from os import remove +import os import sys import gzip @@ -509,7 +508,12 @@ def initialize_logging(conf): for line in sf: df.write(line) - remove(source) + os.remove(source) + + try: + os.makedirs(os.path.dirname(conf.log_file())) + except IOError: + pass handler = RotatingFileHandler( conf.log_file(), diff --git a/suggestive/mstat.py b/suggestive/mstat.py index d6d22a7..61ff8d8 100644 --- a/suggestive/mstat.py +++ b/suggestive/mstat.py @@ -125,19 +125,22 @@ def database_tracks_from_mpd(conf, tracks_info): """ Return the database Track object corresponding to track info from MPD """ + track_filenames = [info['file'] for info in tracks_info] + with session_scope(conf, commit=False) as session: - filenames = [info['file'] for info in tracks_info] - db_tracks = session.query(Track).\ - options( - subqueryload(Track.album), - subqueryload(Track.artist), - subqueryload(Track.lastfm_info) - ).\ - filter(Track.filename.in_(filenames)).\ - all() + tracks_by_filename = {} - tracks_by_filename = {t.filename: t for t in db_tracks} - return [tracks_by_filename[info['file']] for info in tracks_info] + for chunk in partition(track_filenames, 128): + db_tracks = (session.query(Track). + options(subqueryload(Track.album), + subqueryload(Track.artist), + subqueryload(Track.lastfm_info)). + filter(Track.filename.in_(chunk)). + all()) + + tracks_by_filename.update({t.filename: t for t in db_tracks}) + + return [tracks_by_filename[filename] for filename in track_filenames] def get_scrobbles(conf, limit, offset=None): @@ -319,11 +322,11 @@ class ScrobbleLoader(object): if not len(scrobbles): return 0 - first = next(scrobbles) + first = scrobbles[0] self.load_scrobble(session, first) track = None # Necessary if there was only one scrobble total - for track in scrobbles: + for track in scrobbles[1:]: self.load_scrobble(session, track) last = track or first @@ -339,8 +342,17 @@ class MpdLoader(object): Synchronizes the MPD and suggestive databases """ - def __init__(self, mpd): - self.mpd = mpd + def __init__(self, conf): + self._conf = conf + self._mpd = initialize_mpd(conf) + + @property + def mpd(self): + return self._mpd + + @property + def conf(self): + return self._conf def load_track(self, session, db_artist, db_album, info): """ @@ -442,6 +454,7 @@ class MpdLoader(object): logger.debug('Deleted {} empty albums'.format(len(empty))) + @mpd_retry def check_duplicates(self, session): """ Check for albums with duplicate tracks @@ -492,11 +505,19 @@ class MpdLoader(object): return by_artist_album + @mpd_retry + def _list_mpd_files(self): + return self.mpd.list('file') + + @mpd_retry + def _mpd_info(self, path): + return self.mpd.listallinfo(path) + def load(self, session): """ Synchronize MPD and suggestive databases """ - files_in_mpd = set(self.mpd.list('file')) + files_in_mpd = set(self._list_mpd_files()) files_in_db = set(item.filename for item in session.query( Track.filename).all()) @@ -509,8 +530,7 @@ class MpdLoader(object): logger.debug('Missing files:\n {}'.format( '\n '.join(missing))) missing_info = list( - chain.from_iterable(self.mpd.listallinfo(path) - for path in missing)) + chain.from_iterable(self._mpd_info(path) for path in missing)) by_artist_album = self.segregate_track_info(missing_info) self.load_by_artist_album(session, by_artist_album) @@ -750,8 +770,7 @@ def update_mpd(config): albums_start = session.query(Album).count() tracks_start = session.query(Track).count() - mpdclient = initialize_mpd(config) - mpd_loader = MpdLoader(mpdclient) + mpd_loader = MpdLoader(config) mpd_loader.load(session) session.commit() diff --git a/tox.ini b/tox.ini index d5c3ff3..410c552 100644 --- a/tox.ini +++ b/tox.ini @@ -5,10 +5,12 @@ envlist = py33,py34,py35 deps = -r{toxinidir}/test-requirements.txt commands = py.test -m "not live" --ignore=build --ignore=suggestive/alembic \ - --pep8 --flakes --cov={envsitepackagesdir}/suggestive -rs -v {posargs} + --ignore=venv --pep8 --flakes \ + --cov={envsitepackagesdir}/suggestive -rs -v {posargs} [testenv:coverage] deps = -r{toxinidir}/test-requirements.txt commands = py.test -m "not live" --ignore=build --ignore=suggestive/alembic \ - --cov={envsitepackagesdir}/suggestive --cov-report=html + --ignore=venv --cov={envsitepackagesdir}/suggestive \ + --cov-report=html
crash on startup When starting up suggestive it crashes with a KeyError. ``` carnager@caprica ~/suggestive/suggestive > suggestive Traceback (most recent call last): File "/usr/bin/suggestive", line 9, in <module> load_entry_point('suggestive==0.4.0', 'console_scripts', 'suggestive')() File "/usr/lib/python3.5/site-packages/suggestive-0.4.0-py3.5.egg/suggestive/app.py", line 586, in main File "/usr/lib/python3.5/site-packages/suggestive-0.4.0-py3.5.egg/suggestive/app.py", line 566, in run File "/usr/lib/python3.5/site-packages/suggestive-0.4.0-py3.5.egg/suggestive/app.py", line 223, in __init__ File "/usr/lib/python3.5/site-packages/suggestive-0.4.0-py3.5.egg/suggestive/app.py", line 53, in __init__ File "/usr/lib/python3.5/site-packages/suggestive-0.4.0-py3.5.egg/suggestive/app.py", line 97, in initialize_buffers File "/usr/lib/python3.5/site-packages/suggestive-0.4.0-py3.5.egg/suggestive/app.py", line 114, in create_playlist_buffer File "/usr/lib/python3.5/site-packages/suggestive-0.4.0-py3.5.egg/suggestive/playlist.py", line 471, in __init__ File "/usr/lib/python3.5/site-packages/suggestive-0.4.0-py3.5.egg/suggestive/playlist.py", line 90, in __init__ File "/usr/lib/python3.5/site-packages/suggestive-0.4.0-py3.5.egg/suggestive/playlist.py", line 220, in update_model File "/usr/lib/python3.5/site-packages/suggestive-0.4.0-py3.5.egg/suggestive/playlist.py", line 191, in track_models File "/usr/lib/python3.5/site-packages/suggestive-0.4.0-py3.5.egg/suggestive/playlist.py", line 165, in playlist_tracks File "/usr/lib/python3.5/site-packages/suggestive-0.4.0-py3.5.egg/suggestive/mstat.py", line 140, in database_tracks_from_mpd File "/usr/lib/python3.5/site-packages/suggestive-0.4.0-py3.5.egg/suggestive/mstat.py", line 140, in <listcomp> KeyError: 'flac/Motorpsycho/1993 Demon Box/CD 1/01-Waiting for the One.flac' ```
thesquelched/suggestive
diff --git a/tests/test_mstat.py b/tests/test_mstat.py new file mode 100644 index 0000000..77a90ea --- /dev/null +++ b/tests/test_mstat.py @@ -0,0 +1,45 @@ +from unittest.mock import patch, MagicMock +from suggestive import mstat + + +class TestMpdLoader: + + @patch('suggestive.mstat.initialize_mpd') + def test_check_duplicates(self, init_mpd): + init_mpd.side_effect = [ + MagicMock(find=MagicMock(side_effect=OSError)), + MagicMock(), + ] + + session = MagicMock() + (session.query.return_value.join.return_value.group_by.return_value + .having.return_value.all.return_value) = [MagicMock()] + + loader = mstat.MpdLoader(None) + loader.check_duplicates(session) + + assert init_mpd.call_count == 2 + + @patch('suggestive.mstat.initialize_mpd') + def test_list_mpd_files(self, init_mpd): + init_mpd.side_effect = [ + MagicMock(list=MagicMock(side_effect=OSError)), + MagicMock(), + ] + + loader = mstat.MpdLoader(None) + loader._list_mpd_files() + + assert init_mpd.call_count == 2 + + @patch('suggestive.mstat.initialize_mpd') + def test_mpd_info(self, init_mpd): + init_mpd.side_effect = [ + MagicMock(listallinfo=MagicMock(side_effect=OSError)), + MagicMock(), + ] + + loader = mstat.MpdLoader(None) + loader._mpd_info(None) + + assert init_mpd.call_count == 2
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 1, "test_score": 2 }, "num_modified_files": 6 }
0.4
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "tox", "pytest>=2.6.4", "pytest-flakes>=0.2", "pytest-pep8>=1.0.6", "pytest-cov>=1.8.1", "pytest" ], "pre_install": null, "python": "3.9", "reqs_path": [ "test-requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
alembic==1.15.2 cachetools==5.5.2 certifi==2025.1.31 chardet==5.2.0 charset-normalizer==3.4.1 colorama==0.4.6 coverage==7.8.0 distlib==0.3.9 exceptiongroup==1.2.2 execnet==2.1.1 figgis==1.8.1 filelock==3.18.0 greenlet==3.1.1 idna==3.10 iniconfig==2.1.0 iso3166==2.1.1 Mako==1.3.9 MarkupSafe==3.0.2 packaging==24.2 pep8==1.7.1 platformdirs==4.3.7 pluggy==1.5.0 pyflakes==3.3.1 pylastfm==0.2.0 pyproject-api==1.9.0 pytest==8.3.5 pytest-cache==1.0 pytest-cov==6.0.0 pytest-flakes==4.0.5 pytest-pep8==1.0.6 python-dateutil==2.9.0.post0 python-mpd2==3.1.1 requests==2.32.3 six==1.17.0 SQLAlchemy==2.0.40 -e git+https://github.com/thesquelched/suggestive.git@b5c3a2f0a1734d8948778fbb0b252d3ea5059def#egg=suggestive tomli==2.2.1 tox==4.25.0 typing_extensions==4.13.0 urllib3==2.3.0 urwid==2.6.16 virtualenv==20.29.3 wcwidth==0.2.13
name: suggestive channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - alembic==1.15.2 - cachetools==5.5.2 - certifi==2025.1.31 - chardet==5.2.0 - charset-normalizer==3.4.1 - colorama==0.4.6 - coverage==7.8.0 - distlib==0.3.9 - exceptiongroup==1.2.2 - execnet==2.1.1 - figgis==1.8.1 - filelock==3.18.0 - greenlet==3.1.1 - idna==3.10 - iniconfig==2.1.0 - iso3166==2.1.1 - mako==1.3.9 - markupsafe==3.0.2 - packaging==24.2 - pep8==1.7.1 - platformdirs==4.3.7 - pluggy==1.5.0 - pyflakes==3.3.1 - pylastfm==0.2.0 - pyproject-api==1.9.0 - pytest==8.3.5 - pytest-cache==1.0 - pytest-cov==6.0.0 - pytest-flakes==4.0.5 - pytest-pep8==1.0.6 - python-dateutil==2.9.0.post0 - python-mpd2==3.1.1 - requests==2.32.3 - six==1.17.0 - sqlalchemy==2.0.40 - tomli==2.2.1 - tox==4.25.0 - typing-extensions==4.13.0 - urllib3==2.3.0 - urwid==2.6.16 - virtualenv==20.29.3 - wcwidth==0.2.13 prefix: /opt/conda/envs/suggestive
[ "tests/test_mstat.py::TestMpdLoader::test_check_duplicates", "tests/test_mstat.py::TestMpdLoader::test_list_mpd_files", "tests/test_mstat.py::TestMpdLoader::test_mpd_info" ]
[]
[]
[]
BSD 2-Clause "Simplified" License
494
enthought__okonomiyaki-182
38b9e3ecc18d2041f43a7681d05ed860b76b8d01
2016-04-07 13:30:38
ced8e9ed8db05996bc8d296c5203a942b15804ef
diff --git a/okonomiyaki/runtimes/runtime_metadata.py b/okonomiyaki/runtimes/runtime_metadata.py index ed9d415..d103630 100644 --- a/okonomiyaki/runtimes/runtime_metadata.py +++ b/okonomiyaki/runtimes/runtime_metadata.py @@ -209,7 +209,7 @@ def runtime_metadata_factory(path_or_file): key = _factory_key_from_metadata(json_dict) klass = _METADATA_KLASS_FACTORY.get(key) if klass is None: - msg = "No support for language '{1}' ('{0!r}')".format(*key) + msg = "No support for language '{1}' (metadata version '{0}')".format(*key) raise UnsupportedMetadata(key[0], msg) else: return klass._from_path(path_or_file)
MetadataVersion object should not be repr()'d in exception message ```python 'cpython-2.7.9+1-rh5_x86_64-gnu.runtime': No support for language 'whitespace' ('MetadataVersion(1, 0)') ``` https://github.com/enthought/okonomiyaki/blob/master/okonomiyaki/runtimes/runtime_metadata.py#L212
enthought/okonomiyaki
diff --git a/okonomiyaki/runtimes/tests/test_runtime_metadata.py b/okonomiyaki/runtimes/tests/test_runtime_metadata.py index 1fc8f65..1a3c660 100644 --- a/okonomiyaki/runtimes/tests/test_runtime_metadata.py +++ b/okonomiyaki/runtimes/tests/test_runtime_metadata.py @@ -203,7 +203,7 @@ class TestRuntimeMetadataFactory(unittest.TestCase): # When/Then with self.assertRaisesRegexp( UnsupportedMetadata, - r"^No support for language 'r' \('MetadataVersion\(1, 0\)'\)"): + r"^No support for language 'r' \(metadata version '1.0'\)"): runtime_metadata_factory(path) # Given
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_pytest_match_arg" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 1, "test_score": 2 }, "num_modified_files": 1 }
0.14
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.9", "reqs_path": [ "dev_requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==25.3.0 coverage==7.8.0 docutils==0.21.2 enum34==1.1.10 exceptiongroup==1.2.2 flake8==7.2.0 haas==0.9.0 iniconfig==2.1.0 jsonschema==4.23.0 jsonschema-specifications==2024.10.1 mccabe==0.7.0 mock==1.0.1 -e git+https://github.com/enthought/okonomiyaki.git@38b9e3ecc18d2041f43a7681d05ed860b76b8d01#egg=okonomiyaki packaging==24.2 pbr==6.1.1 pluggy==1.5.0 pycodestyle==2.13.0 pyflakes==3.3.2 pytest==8.3.5 referencing==0.36.2 rpds-py==0.24.0 six==1.17.0 statistics==1.0.3.5 stevedore==1.9.0 testfixtures==8.3.0 tomli==2.2.1 typing_extensions==4.13.0 zipfile2==0.0.12
name: okonomiyaki channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - argparse==1.4.0 - attrs==25.3.0 - coverage==7.8.0 - docutils==0.21.2 - enum34==1.1.10 - exceptiongroup==1.2.2 - flake8==7.2.0 - haas==0.9.0 - iniconfig==2.1.0 - jsonschema==4.23.0 - jsonschema-specifications==2024.10.1 - mccabe==0.7.0 - mock==1.0.1 - packaging==24.2 - pbr==6.1.1 - pluggy==1.5.0 - pycodestyle==2.13.0 - pyflakes==3.3.2 - pytest==8.3.5 - referencing==0.36.2 - rpds-py==0.24.0 - six==1.17.0 - statistics==1.0.3.5 - stevedore==1.9.0 - testfixtures==8.3.0 - tomli==2.2.1 - typing-extensions==4.13.0 - zipfile2==0.0.12 prefix: /opt/conda/envs/okonomiyaki
[ "okonomiyaki/runtimes/tests/test_runtime_metadata.py::TestRuntimeMetadataFactory::test_invalid" ]
[]
[ "okonomiyaki/runtimes/tests/test_runtime_metadata.py::TestPythonMetadataV1::test_invalid", "okonomiyaki/runtimes/tests/test_runtime_metadata.py::TestPythonMetadataV1::test_simple", "okonomiyaki/runtimes/tests/test_runtime_metadata.py::TestPythonMetadataV1::test_simple_pypy", "okonomiyaki/runtimes/tests/test_runtime_metadata.py::TestJuliaRuntimeMetadataV1::test_simple", "okonomiyaki/runtimes/tests/test_runtime_metadata.py::TestRuntimeMetadataFactory::test_simple" ]
[]
BSD License
495
enthought__okonomiyaki-183
38b9e3ecc18d2041f43a7681d05ed860b76b8d01
2016-04-07 13:45:02
ced8e9ed8db05996bc8d296c5203a942b15804ef
diff --git a/okonomiyaki/errors.py b/okonomiyaki/errors.py index be3e7d6..fd003c4 100644 --- a/okonomiyaki/errors.py +++ b/okonomiyaki/errors.py @@ -47,8 +47,11 @@ class InvalidMetadataField(InvalidMetadata): def __init__(self, name, value, *a, **kw): self.name = name self.value = value - message = 'Metadata field is invalid ({0} = {1!r})'.format( - name, value) + if value is InvalidMetadataField.undefined: + message = "Missing metadata field {0!r}".format(self.name) + else: + message = 'Invalid value for metadata field {0!r}: {1!r}'.format( + name, value) super(InvalidMetadataField, self).__init__(message, *a, **kw) diff --git a/okonomiyaki/file_formats/_egg_info.py b/okonomiyaki/file_formats/_egg_info.py index 332168f..49fae61 100644 --- a/okonomiyaki/file_formats/_egg_info.py +++ b/okonomiyaki/file_formats/_egg_info.py @@ -347,8 +347,7 @@ def _guess_python_tag(pyver): else: m = _PYVER_RE.search(pyver) if m is None: - msg = "python_tag cannot be guessed for python = {0}" - raise InvalidMetadata(msg.format(pyver)) + raise InvalidMetadataField('python', pyver) else: major = m.groupdict()["major"] minor = m.groupdict()["minor"]
Wrong exception raised for invalid python attribute in egg metadata The exception raised should be `InvalidMetadataField`, as used elsewhere in the module. ``` Traceback (most recent call last): File "okonomiyaki/file_formats/_egg_info.py", line 733, in _from_egg spec_depend = LegacySpecDepend._from_egg(path_or_file, sha256) File "okonomiyaki/file_formats/_egg_info.py", line 463, in _from_egg return _create_spec_depend(path_or_file) File "okonomiyaki/file_formats/_egg_info.py", line 455, in _create_spec_depend spec_depend_string, epd_platform, sha256 File "okonomiyaki/file_formats/_egg_info.py", line 647, in _normalized_info_from_string raw_data[_TAG_PYTHON] File "okonomiyaki/file_formats/_egg_info.py", line 327, in _guess_python_tag raise InvalidMetadata(msg.format(pyver)) okonomiyaki.errors.InvalidMetadata: python_tag cannot be guessed for python = red ```
enthought/okonomiyaki
diff --git a/okonomiyaki/file_formats/tests/test__egg_info.py b/okonomiyaki/file_formats/tests/test__egg_info.py index 6765b8f..7760c64 100644 --- a/okonomiyaki/file_formats/tests/test__egg_info.py +++ b/okonomiyaki/file_formats/tests/test__egg_info.py @@ -301,10 +301,14 @@ packages = [ # When/Then with self.assertRaisesRegexp( - InvalidMetadata, - r'^python_tag cannot be guessed'): + InvalidMetadataField, + r"^Invalid value for metadata field 'python': 'a.7'" + ) as exc: LegacySpecDepend.from_string(s) + self.assertEqual(exc.exception.name, "python") + self.assertEqual(exc.exception.value, "a.7") + def test_blacklisted_platform(self): # Given egg = XZ_5_2_0_EGG @@ -776,8 +780,8 @@ class TestParseRawspec(unittest.TestCase): # When/Then with self.assertRaisesRegexp( - InvalidMetadataField, - r'^Metadata field is invalid \(name = <undefined>\)$'): + InvalidMetadataField, r"^Missing metadata field 'name'" + ): parse_rawspec(spec_string) def test_simple_1_2(self): @@ -911,8 +915,9 @@ packages = [ # When/Then with self.assertRaisesRegexp( - InvalidMetadataField, - r'^Metadata field is invalid \(metadata_version = None\)$'): + InvalidMetadataField, + r"^Invalid value for metadata field 'metadata_version': None" + ): parse_rawspec(spec_s) # Given a spec_string without some other metadata in >= 1.1 @@ -933,8 +938,8 @@ packages = [ # When/Then with self.assertRaisesRegexp( - InvalidMetadataField, - r'^Metadata field is invalid \(platform = <undefined>\)$'): + InvalidMetadataField, r"^Missing metadata field 'platform'" + ): parse_rawspec(spec_s) # Given a spec_string without some other metadata in >= 1.2 @@ -956,8 +961,8 @@ packages = [ # When/Then with self.assertRaisesRegexp( - InvalidMetadataField, - r'^Metadata field is invalid \(python_tag = <undefined>\)$'): + InvalidMetadataField, r"^Missing metadata field 'python_tag'" + ): parse_rawspec(spec_s) diff --git a/okonomiyaki/platforms/tests/test_python_implementation.py b/okonomiyaki/platforms/tests/test_python_implementation.py index 8ccddd9..92d6cf9 100644 --- a/okonomiyaki/platforms/tests/test_python_implementation.py +++ b/okonomiyaki/platforms/tests/test_python_implementation.py @@ -50,8 +50,9 @@ class TestPythonImplementation(unittest.TestCase): # When/Then with self.assertRaisesRegexp( - InvalidMetadataField, - r"^Metadata field is invalid \(python_tag = 'cp'\)$"): + InvalidMetadataField, + r"^Invalid value for metadata field 'python_tag': 'cp'" + ): PythonImplementation.from_string(s) # Given @@ -59,8 +60,9 @@ class TestPythonImplementation(unittest.TestCase): # When/Then with self.assertRaisesRegexp( - InvalidMetadataField, - r"^Metadata field is invalid \(python_tag = 'py2'\)$"): + InvalidMetadataField, + r"^Invalid value for metadata field 'python_tag': 'py2'$" + ): PythonImplementation.from_string(s) # Given @@ -68,8 +70,9 @@ class TestPythonImplementation(unittest.TestCase): # When/Then with self.assertRaisesRegexp( - InvalidMetadataField, - r"^Metadata field is invalid \(python_tag = 'py234'\)$"): + InvalidMetadataField, + r"^Invalid value for metadata field 'python_tag': 'py234'$" + ): PythonImplementation.from_string(s) def test_simple(self): @@ -125,8 +128,9 @@ class TestPythonImplementation(unittest.TestCase): # When/Then with self.assertRaisesRegexp( - InvalidMetadataField, - r"^Metadata field is invalid \(python_tag = 'py3'\)$"): + InvalidMetadataField, + r"^Invalid value for metadata field 'python_tag': 'py3'$" + ): PythonImplementation.from_string(tag_string) # Given @@ -134,6 +138,7 @@ class TestPythonImplementation(unittest.TestCase): # When/Then with self.assertRaisesRegexp( - InvalidMetadataField, - r"^Metadata field is invalid \(python_tag = 'py345'\)$"): + InvalidMetadataField, + r"^Invalid value for metadata field 'python_tag': 'py345'$" + ): PythonImplementation.from_string(tag_string)
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files", "has_pytest_match_arg" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 2, "test_score": 0 }, "num_modified_files": 2 }
0.14
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.9", "reqs_path": [ "dev_requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==25.3.0 coverage==7.8.0 docutils==0.21.2 enum34==1.1.10 exceptiongroup==1.2.2 flake8==7.2.0 haas==0.9.0 iniconfig==2.1.0 jsonschema==4.23.0 jsonschema-specifications==2024.10.1 mccabe==0.7.0 mock==1.0.1 -e git+https://github.com/enthought/okonomiyaki.git@38b9e3ecc18d2041f43a7681d05ed860b76b8d01#egg=okonomiyaki packaging==24.2 pbr==6.1.1 pluggy==1.5.0 pycodestyle==2.13.0 pyflakes==3.3.2 pytest==8.3.5 referencing==0.36.2 rpds-py==0.24.0 six==1.17.0 statistics==1.0.3.5 stevedore==1.9.0 testfixtures==8.3.0 tomli==2.2.1 typing_extensions==4.13.0 zipfile2==0.0.12
name: okonomiyaki channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - argparse==1.4.0 - attrs==25.3.0 - coverage==7.8.0 - docutils==0.21.2 - enum34==1.1.10 - exceptiongroup==1.2.2 - flake8==7.2.0 - haas==0.9.0 - iniconfig==2.1.0 - jsonschema==4.23.0 - jsonschema-specifications==2024.10.1 - mccabe==0.7.0 - mock==1.0.1 - packaging==24.2 - pbr==6.1.1 - pluggy==1.5.0 - pycodestyle==2.13.0 - pyflakes==3.3.2 - pytest==8.3.5 - referencing==0.36.2 - rpds-py==0.24.0 - six==1.17.0 - statistics==1.0.3.5 - stevedore==1.9.0 - testfixtures==8.3.0 - tomli==2.2.1 - typing-extensions==4.13.0 - zipfile2==0.0.12 prefix: /opt/conda/envs/okonomiyaki
[ "okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDepend::test_error_python_to_python_tag", "okonomiyaki/file_formats/tests/test__egg_info.py::TestParseRawspec::test_invalid_spec_strings", "okonomiyaki/file_formats/tests/test__egg_info.py::TestParseRawspec::test_simple_unsupported", "okonomiyaki/platforms/tests/test_python_implementation.py::TestPythonImplementation::test_errors", "okonomiyaki/platforms/tests/test_python_implementation.py::TestPythonImplementation::test_from_string" ]
[]
[ "okonomiyaki/file_formats/tests/test__egg_info.py::TestRequirement::test_from_spec_string", "okonomiyaki/file_formats/tests/test__egg_info.py::TestRequirement::test_from_string", "okonomiyaki/file_formats/tests/test__egg_info.py::TestRequirement::test_str", "okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDepend::test_blacklisted_platform", "okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDepend::test_create_from_egg1", "okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDepend::test_create_from_egg2", "okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDepend::test_format_1_3", "okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDepend::test_format_1_4", "okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDepend::test_from_string", "okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDepend::test_missing_spec_depend", "okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDepend::test_to_string", "okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDepend::test_unsupported_metadata_version", "okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDepend::test_windows_platform", "okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDependAbi::test_default_extension_python_egg", "okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDependAbi::test_default_no_python_egg", "okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDependAbi::test_default_pure_python_egg", "okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDependAbi::test_default_pure_python_egg_pypi", "okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDependAbi::test_to_string", "okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDependPlatform::test_default_all_none", "okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDependPlatform::test_default_rh5_32", "okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDependPlatform::test_default_rh5_64", "okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDependPlatform::test_default_win_32", "okonomiyaki/file_formats/tests/test__egg_info.py::TestLegacySpecDependPlatform::test_default_win_64", "okonomiyaki/file_formats/tests/test__egg_info.py::TestGuessPlatformAbi::test_no_platform", "okonomiyaki/file_formats/tests/test__egg_info.py::TestGuessPlatformAbi::test_no_python_implementation", "okonomiyaki/file_formats/tests/test__egg_info.py::TestGuessPlatformAbi::test_python_27", "okonomiyaki/file_formats/tests/test__egg_info.py::TestGuessPlatformAbi::test_python_34", "okonomiyaki/file_formats/tests/test__egg_info.py::TestGuessPlatformAbi::test_python_35", "okonomiyaki/file_formats/tests/test__egg_info.py::TestEggName::test_split_egg_name", "okonomiyaki/file_formats/tests/test__egg_info.py::TestEggName::test_split_egg_name_invalid", "okonomiyaki/file_formats/tests/test__egg_info.py::TestParseRawspec::test_simple_1_1", "okonomiyaki/file_formats/tests/test__egg_info.py::TestParseRawspec::test_simple_1_2", "okonomiyaki/file_formats/tests/test__egg_info.py::TestParseRawspec::test_with_dependencies", "okonomiyaki/file_formats/tests/test__egg_info.py::TestParseRawspec::test_with_none", "okonomiyaki/file_formats/tests/test__egg_info.py::TestEggInfo::test_blacklisted_pkg_info", "okonomiyaki/file_formats/tests/test__egg_info.py::TestEggInfo::test_blacklisted_platform", "okonomiyaki/file_formats/tests/test__egg_info.py::TestEggInfo::test_blacklisted_python_tag", "okonomiyaki/file_formats/tests/test__egg_info.py::TestEggInfo::test_dump_blacklisted", "okonomiyaki/file_formats/tests/test__egg_info.py::TestEggInfo::test_dump_blacklisted_platform", "okonomiyaki/file_formats/tests/test__egg_info.py::TestEggInfo::test_dump_simple", "okonomiyaki/file_formats/tests/test__egg_info.py::TestEggInfo::test_fixed_requirement", "okonomiyaki/file_formats/tests/test__egg_info.py::TestEggInfo::test_from_cross_platform_egg", "okonomiyaki/file_formats/tests/test__egg_info.py::TestEggInfo::test_from_platform_egg", "okonomiyaki/file_formats/tests/test__egg_info.py::TestEggInfo::test_no_pkg_info", "okonomiyaki/file_formats/tests/test__egg_info.py::TestEggInfo::test_platform_abi", "okonomiyaki/file_formats/tests/test__egg_info.py::TestEggInfo::test_platform_abi_no_python", "okonomiyaki/file_formats/tests/test__egg_info.py::TestEggInfo::test_simple", "okonomiyaki/file_formats/tests/test__egg_info.py::TestEggInfo::test_simple_non_python_egg", "okonomiyaki/file_formats/tests/test__egg_info.py::TestEggInfo::test_strictness", "okonomiyaki/file_formats/tests/test__egg_info.py::TestEggInfo::test_support_higher_compatible_version", "okonomiyaki/file_formats/tests/test__egg_info.py::TestEggInfo::test_support_lower_compatible_version", "okonomiyaki/file_formats/tests/test__egg_info.py::TestEggInfo::test_to_spec_string", "okonomiyaki/platforms/tests/test_python_implementation.py::TestPythonImplementation::test_abbreviations", "okonomiyaki/platforms/tests/test_python_implementation.py::TestPythonImplementation::test_from_running_python", "okonomiyaki/platforms/tests/test_python_implementation.py::TestPythonImplementation::test_simple" ]
[]
BSD License
496
dpkp__kafka-python-640
810f08b7996a15e65cdd8af6c1a7167c28f94646
2016-04-09 00:02:35
810f08b7996a15e65cdd8af6c1a7167c28f94646
diff --git a/kafka/client_async.py b/kafka/client_async.py index 0c22f90..36e808c 100644 --- a/kafka/client_async.py +++ b/kafka/client_async.py @@ -6,7 +6,14 @@ import heapq import itertools import logging import random -import select + +# selectors in stdlib as of py3.4 +try: + import selectors # pylint: disable=import-error +except ImportError: + # vendored backport module + from . import selectors34 as selectors + import socket import time @@ -92,6 +99,7 @@ class KafkaClient(object): self.cluster = ClusterMetadata(**self.config) self._topics = set() # empty set will fetch all topic metadata self._metadata_refresh_in_progress = False + self._selector = selectors.DefaultSelector() self._conns = {} self._connecting = set() self._refresh_on_disconnects = True @@ -101,6 +109,7 @@ class KafkaClient(object): self._bootstrap(collect_hosts(self.config['bootstrap_servers'])) self._wake_r, self._wake_w = socket.socketpair() self._wake_r.setblocking(False) + self._selector.register(self._wake_r, selectors.EVENT_READ) def __del__(self): self._wake_r.close() @@ -160,11 +169,19 @@ class KafkaClient(object): def _conn_state_change(self, node_id, conn): if conn.connecting(): self._connecting.add(node_id) + self._selector.register(conn._sock, selectors.EVENT_WRITE) elif conn.connected(): log.debug("Node %s connected", node_id) if node_id in self._connecting: self._connecting.remove(node_id) + + try: + self._selector.unregister(conn._sock) + except KeyError: + pass + self._selector.register(conn._sock, selectors.EVENT_READ, conn) + if 'bootstrap' in self._conns and node_id != 'bootstrap': bootstrap = self._conns.pop('bootstrap') # XXX: make conn.close() require error to cause refresh @@ -176,6 +193,10 @@ class KafkaClient(object): elif conn.state is ConnectionStates.DISCONNECTING: if node_id in self._connecting: self._connecting.remove(node_id) + try: + self._selector.unregister(conn._sock) + except KeyError: + pass if self._refresh_on_disconnects: log.warning("Node %s connect failed -- refreshing metadata", node_id) self.cluster.request_update() @@ -388,45 +409,25 @@ class KafkaClient(object): return responses - def _poll(self, timeout, sleep=False): + def _poll(self, timeout, sleep=True): # select on reads across all connected sockets, blocking up to timeout - sockets = dict([(conn._sock, conn) - for conn in six.itervalues(self._conns) - if conn.state is ConnectionStates.CONNECTED - and conn.in_flight_requests]) - if not sockets: - # if sockets are connecting, we can wake when they are writeable - if self._connecting: - sockets = [self._conns[node]._sock for node in self._connecting] - select.select([self._wake_r], sockets, [], timeout) - elif timeout: - if sleep: - log.debug('Sleeping at %s for %s', time.time(), timeout) - select.select([self._wake_r], [], [], timeout) - log.debug('Woke up at %s', time.time()) - else: - log.warning('_poll called with a non-zero timeout and' - ' sleep=False -- but there was nothing to do.' - ' This can cause high CPU usage during idle.') - self._clear_wake_fd() - return [] - - # Add a private pipe fd to allow external wakeups - fds = list(sockets.keys()) - fds.append(self._wake_r) - ready, _, _ = select.select(fds, [], [], timeout) - + assert self.in_flight_request_count() > 0 or self._connecting or sleep responses = [] - for sock in ready: - if sock == self._wake_r: + for key, events in self._selector.select(timeout): + if key.fileobj is self._wake_r: + self._clear_wake_fd() + continue + elif not (events & selectors.EVENT_READ): continue - conn = sockets[sock] + conn = key.data while conn.in_flight_requests: response = conn.recv() # Note: conn.recv runs callbacks / errbacks + + # Incomplete responses are buffered internally + # while conn.in_flight_requests retains the request if not response: break responses.append(response) - self._clear_wake_fd() return responses def in_flight_request_count(self, node_id=None): diff --git a/kafka/selectors34.py b/kafka/selectors34.py new file mode 100644 index 0000000..541c29c --- /dev/null +++ b/kafka/selectors34.py @@ -0,0 +1,635 @@ +# pylint: skip-file +# vendored from https://github.com/berkerpeksag/selectors34 +# at commit 5195dd2cbe598047ad0a2e446a829546f6ffc9eb (v1.1) +# +# Original author: Charles-Francois Natali (c.f.natali[at]gmail.com) +# Maintainer: Berker Peksag (berker.peksag[at]gmail.com) +# Also see https://pypi.python.org/pypi/selectors34 +"""Selectors module. + +This module allows high-level and efficient I/O multiplexing, built upon the +`select` module primitives. + +The following code adapted from trollius.selectors. +""" + + +from abc import ABCMeta, abstractmethod +from collections import namedtuple, Mapping +from errno import EINTR +import math +import select +import sys + +import six + + +def _wrap_error(exc, mapping, key): + if key not in mapping: + return + new_err_cls = mapping[key] + new_err = new_err_cls(*exc.args) + + # raise a new exception with the original traceback + if hasattr(exc, '__traceback__'): + traceback = exc.__traceback__ + else: + traceback = sys.exc_info()[2] + six.reraise(new_err_cls, new_err, traceback) + + +# generic events, that must be mapped to implementation-specific ones +EVENT_READ = (1 << 0) +EVENT_WRITE = (1 << 1) + + +def _fileobj_to_fd(fileobj): + """Return a file descriptor from a file object. + + Parameters: + fileobj -- file object or file descriptor + + Returns: + corresponding file descriptor + + Raises: + ValueError if the object is invalid + """ + if isinstance(fileobj, six.integer_types): + fd = fileobj + else: + try: + fd = int(fileobj.fileno()) + except (AttributeError, TypeError, ValueError): + raise ValueError("Invalid file object: " + "{0!r}".format(fileobj)) + if fd < 0: + raise ValueError("Invalid file descriptor: {0}".format(fd)) + return fd + + +SelectorKey = namedtuple('SelectorKey', ['fileobj', 'fd', 'events', 'data']) +"""Object used to associate a file object to its backing file descriptor, +selected event mask and attached data.""" + + +class _SelectorMapping(Mapping): + """Mapping of file objects to selector keys.""" + + def __init__(self, selector): + self._selector = selector + + def __len__(self): + return len(self._selector._fd_to_key) + + def __getitem__(self, fileobj): + try: + fd = self._selector._fileobj_lookup(fileobj) + return self._selector._fd_to_key[fd] + except KeyError: + raise KeyError("{0!r} is not registered".format(fileobj)) + + def __iter__(self): + return iter(self._selector._fd_to_key) + + +class BaseSelector(six.with_metaclass(ABCMeta)): + """Selector abstract base class. + + A selector supports registering file objects to be monitored for specific + I/O events. + + A file object is a file descriptor or any object with a `fileno()` method. + An arbitrary object can be attached to the file object, which can be used + for example to store context information, a callback, etc. + + A selector can use various implementations (select(), poll(), epoll()...) + depending on the platform. The default `Selector` class uses the most + efficient implementation on the current platform. + """ + + @abstractmethod + def register(self, fileobj, events, data=None): + """Register a file object. + + Parameters: + fileobj -- file object or file descriptor + events -- events to monitor (bitwise mask of EVENT_READ|EVENT_WRITE) + data -- attached data + + Returns: + SelectorKey instance + + Raises: + ValueError if events is invalid + KeyError if fileobj is already registered + OSError if fileobj is closed or otherwise is unacceptable to + the underlying system call (if a system call is made) + + Note: + OSError may or may not be raised + """ + raise NotImplementedError + + @abstractmethod + def unregister(self, fileobj): + """Unregister a file object. + + Parameters: + fileobj -- file object or file descriptor + + Returns: + SelectorKey instance + + Raises: + KeyError if fileobj is not registered + + Note: + If fileobj is registered but has since been closed this does + *not* raise OSError (even if the wrapped syscall does) + """ + raise NotImplementedError + + def modify(self, fileobj, events, data=None): + """Change a registered file object monitored events or attached data. + + Parameters: + fileobj -- file object or file descriptor + events -- events to monitor (bitwise mask of EVENT_READ|EVENT_WRITE) + data -- attached data + + Returns: + SelectorKey instance + + Raises: + Anything that unregister() or register() raises + """ + self.unregister(fileobj) + return self.register(fileobj, events, data) + + @abstractmethod + def select(self, timeout=None): + """Perform the actual selection, until some monitored file objects are + ready or a timeout expires. + + Parameters: + timeout -- if timeout > 0, this specifies the maximum wait time, in + seconds + if timeout <= 0, the select() call won't block, and will + report the currently ready file objects + if timeout is None, select() will block until a monitored + file object becomes ready + + Returns: + list of (key, events) for ready file objects + `events` is a bitwise mask of EVENT_READ|EVENT_WRITE + """ + raise NotImplementedError + + def close(self): + """Close the selector. + + This must be called to make sure that any underlying resource is freed. + """ + pass + + def get_key(self, fileobj): + """Return the key associated to a registered file object. + + Returns: + SelectorKey for this file object + """ + mapping = self.get_map() + if mapping is None: + raise RuntimeError('Selector is closed') + try: + return mapping[fileobj] + except KeyError: + raise KeyError("{0!r} is not registered".format(fileobj)) + + @abstractmethod + def get_map(self): + """Return a mapping of file objects to selector keys.""" + raise NotImplementedError + + def __enter__(self): + return self + + def __exit__(self, *args): + self.close() + + +class _BaseSelectorImpl(BaseSelector): + """Base selector implementation.""" + + def __init__(self): + # this maps file descriptors to keys + self._fd_to_key = {} + # read-only mapping returned by get_map() + self._map = _SelectorMapping(self) + + def _fileobj_lookup(self, fileobj): + """Return a file descriptor from a file object. + + This wraps _fileobj_to_fd() to do an exhaustive search in case + the object is invalid but we still have it in our map. This + is used by unregister() so we can unregister an object that + was previously registered even if it is closed. It is also + used by _SelectorMapping. + """ + try: + return _fileobj_to_fd(fileobj) + except ValueError: + # Do an exhaustive search. + for key in self._fd_to_key.values(): + if key.fileobj is fileobj: + return key.fd + # Raise ValueError after all. + raise + + def register(self, fileobj, events, data=None): + if (not events) or (events & ~(EVENT_READ | EVENT_WRITE)): + raise ValueError("Invalid events: {0!r}".format(events)) + + key = SelectorKey(fileobj, self._fileobj_lookup(fileobj), events, data) + + if key.fd in self._fd_to_key: + raise KeyError("{0!r} (FD {1}) is already registered" + .format(fileobj, key.fd)) + + self._fd_to_key[key.fd] = key + return key + + def unregister(self, fileobj): + try: + key = self._fd_to_key.pop(self._fileobj_lookup(fileobj)) + except KeyError: + raise KeyError("{0!r} is not registered".format(fileobj)) + return key + + def modify(self, fileobj, events, data=None): + # TODO: Subclasses can probably optimize this even further. + try: + key = self._fd_to_key[self._fileobj_lookup(fileobj)] + except KeyError: + raise KeyError("{0!r} is not registered".format(fileobj)) + if events != key.events: + self.unregister(fileobj) + key = self.register(fileobj, events, data) + elif data != key.data: + # Use a shortcut to update the data. + key = key._replace(data=data) + self._fd_to_key[key.fd] = key + return key + + def close(self): + self._fd_to_key.clear() + self._map = None + + def get_map(self): + return self._map + + def _key_from_fd(self, fd): + """Return the key associated to a given file descriptor. + + Parameters: + fd -- file descriptor + + Returns: + corresponding key, or None if not found + """ + try: + return self._fd_to_key[fd] + except KeyError: + return None + + +class SelectSelector(_BaseSelectorImpl): + """Select-based selector.""" + + def __init__(self): + super(SelectSelector, self).__init__() + self._readers = set() + self._writers = set() + + def register(self, fileobj, events, data=None): + key = super(SelectSelector, self).register(fileobj, events, data) + if events & EVENT_READ: + self._readers.add(key.fd) + if events & EVENT_WRITE: + self._writers.add(key.fd) + return key + + def unregister(self, fileobj): + key = super(SelectSelector, self).unregister(fileobj) + self._readers.discard(key.fd) + self._writers.discard(key.fd) + return key + + if sys.platform == 'win32': + def _select(self, r, w, _, timeout=None): + r, w, x = select.select(r, w, w, timeout) + return r, w + x, [] + else: + _select = select.select + + def select(self, timeout=None): + timeout = None if timeout is None else max(timeout, 0) + ready = [] + try: + r, w, _ = self._select(self._readers, self._writers, [], timeout) + except select.error as exc: + if exc.args[0] == EINTR: + return ready + else: + raise + r = set(r) + w = set(w) + for fd in r | w: + events = 0 + if fd in r: + events |= EVENT_READ + if fd in w: + events |= EVENT_WRITE + + key = self._key_from_fd(fd) + if key: + ready.append((key, events & key.events)) + return ready + + +if hasattr(select, 'poll'): + + class PollSelector(_BaseSelectorImpl): + """Poll-based selector.""" + + def __init__(self): + super(PollSelector, self).__init__() + self._poll = select.poll() + + def register(self, fileobj, events, data=None): + key = super(PollSelector, self).register(fileobj, events, data) + poll_events = 0 + if events & EVENT_READ: + poll_events |= select.POLLIN + if events & EVENT_WRITE: + poll_events |= select.POLLOUT + self._poll.register(key.fd, poll_events) + return key + + def unregister(self, fileobj): + key = super(PollSelector, self).unregister(fileobj) + self._poll.unregister(key.fd) + return key + + def select(self, timeout=None): + if timeout is None: + timeout = None + elif timeout <= 0: + timeout = 0 + else: + # poll() has a resolution of 1 millisecond, round away from + # zero to wait *at least* timeout seconds. + timeout = int(math.ceil(timeout * 1e3)) + ready = [] + try: + fd_event_list = self._poll.poll(timeout) + except select.error as exc: + if exc.args[0] == EINTR: + return ready + else: + raise + for fd, event in fd_event_list: + events = 0 + if event & ~select.POLLIN: + events |= EVENT_WRITE + if event & ~select.POLLOUT: + events |= EVENT_READ + + key = self._key_from_fd(fd) + if key: + ready.append((key, events & key.events)) + return ready + + +if hasattr(select, 'epoll'): + + class EpollSelector(_BaseSelectorImpl): + """Epoll-based selector.""" + + def __init__(self): + super(EpollSelector, self).__init__() + self._epoll = select.epoll() + + def fileno(self): + return self._epoll.fileno() + + def register(self, fileobj, events, data=None): + key = super(EpollSelector, self).register(fileobj, events, data) + epoll_events = 0 + if events & EVENT_READ: + epoll_events |= select.EPOLLIN + if events & EVENT_WRITE: + epoll_events |= select.EPOLLOUT + self._epoll.register(key.fd, epoll_events) + return key + + def unregister(self, fileobj): + key = super(EpollSelector, self).unregister(fileobj) + try: + self._epoll.unregister(key.fd) + except IOError: + # This can happen if the FD was closed since it + # was registered. + pass + return key + + def select(self, timeout=None): + if timeout is None: + timeout = -1 + elif timeout <= 0: + timeout = 0 + else: + # epoll_wait() has a resolution of 1 millisecond, round away + # from zero to wait *at least* timeout seconds. + timeout = math.ceil(timeout * 1e3) * 1e-3 + + # epoll_wait() expects `maxevents` to be greater than zero; + # we want to make sure that `select()` can be called when no + # FD is registered. + max_ev = max(len(self._fd_to_key), 1) + + ready = [] + try: + fd_event_list = self._epoll.poll(timeout, max_ev) + except IOError as exc: + if exc.errno == EINTR: + return ready + else: + raise + for fd, event in fd_event_list: + events = 0 + if event & ~select.EPOLLIN: + events |= EVENT_WRITE + if event & ~select.EPOLLOUT: + events |= EVENT_READ + + key = self._key_from_fd(fd) + if key: + ready.append((key, events & key.events)) + return ready + + def close(self): + self._epoll.close() + super(EpollSelector, self).close() + + +if hasattr(select, 'devpoll'): + + class DevpollSelector(_BaseSelectorImpl): + """Solaris /dev/poll selector.""" + + def __init__(self): + super(DevpollSelector, self).__init__() + self._devpoll = select.devpoll() + + def fileno(self): + return self._devpoll.fileno() + + def register(self, fileobj, events, data=None): + key = super(DevpollSelector, self).register(fileobj, events, data) + poll_events = 0 + if events & EVENT_READ: + poll_events |= select.POLLIN + if events & EVENT_WRITE: + poll_events |= select.POLLOUT + self._devpoll.register(key.fd, poll_events) + return key + + def unregister(self, fileobj): + key = super(DevpollSelector, self).unregister(fileobj) + self._devpoll.unregister(key.fd) + return key + + def select(self, timeout=None): + if timeout is None: + timeout = None + elif timeout <= 0: + timeout = 0 + else: + # devpoll() has a resolution of 1 millisecond, round away from + # zero to wait *at least* timeout seconds. + timeout = math.ceil(timeout * 1e3) + ready = [] + try: + fd_event_list = self._devpoll.poll(timeout) + except OSError as exc: + if exc.errno == EINTR: + return ready + else: + raise + for fd, event in fd_event_list: + events = 0 + if event & ~select.POLLIN: + events |= EVENT_WRITE + if event & ~select.POLLOUT: + events |= EVENT_READ + + key = self._key_from_fd(fd) + if key: + ready.append((key, events & key.events)) + return ready + + def close(self): + self._devpoll.close() + super(DevpollSelector, self).close() + + +if hasattr(select, 'kqueue'): + + class KqueueSelector(_BaseSelectorImpl): + """Kqueue-based selector.""" + + def __init__(self): + super(KqueueSelector, self).__init__() + self._kqueue = select.kqueue() + + def fileno(self): + return self._kqueue.fileno() + + def register(self, fileobj, events, data=None): + key = super(KqueueSelector, self).register(fileobj, events, data) + if events & EVENT_READ: + kev = select.kevent(key.fd, select.KQ_FILTER_READ, + select.KQ_EV_ADD) + self._kqueue.control([kev], 0, 0) + if events & EVENT_WRITE: + kev = select.kevent(key.fd, select.KQ_FILTER_WRITE, + select.KQ_EV_ADD) + self._kqueue.control([kev], 0, 0) + return key + + def unregister(self, fileobj): + key = super(KqueueSelector, self).unregister(fileobj) + if key.events & EVENT_READ: + kev = select.kevent(key.fd, select.KQ_FILTER_READ, + select.KQ_EV_DELETE) + try: + self._kqueue.control([kev], 0, 0) + except OSError: + # This can happen if the FD was closed since it + # was registered. + pass + if key.events & EVENT_WRITE: + kev = select.kevent(key.fd, select.KQ_FILTER_WRITE, + select.KQ_EV_DELETE) + try: + self._kqueue.control([kev], 0, 0) + except OSError: + # See comment above. + pass + return key + + def select(self, timeout=None): + timeout = None if timeout is None else max(timeout, 0) + max_ev = len(self._fd_to_key) + ready = [] + try: + kev_list = self._kqueue.control(None, max_ev, timeout) + except OSError as exc: + if exc.errno == EINTR: + return ready + else: + raise + for kev in kev_list: + fd = kev.ident + flag = kev.filter + events = 0 + if flag == select.KQ_FILTER_READ: + events |= EVENT_READ + if flag == select.KQ_FILTER_WRITE: + events |= EVENT_WRITE + + key = self._key_from_fd(fd) + if key: + ready.append((key, events & key.events)) + return ready + + def close(self): + self._kqueue.close() + super(KqueueSelector, self).close() + + +# Choose the best implementation, roughly: +# epoll|kqueue|devpoll > poll > select. +# select() also can't accept a FD > FD_SETSIZE (usually around 1024) +if 'KqueueSelector' in globals(): + DefaultSelector = KqueueSelector +elif 'EpollSelector' in globals(): + DefaultSelector = EpollSelector +elif 'DevpollSelector' in globals(): + DefaultSelector = DevpollSelector +elif 'PollSelector' in globals(): + DefaultSelector = PollSelector +else: + DefaultSelector = SelectSelector
KafkaProducer fails when many sockets are opened When using KafkaProducer in an environment with over 1024 open sockets we get the following issue: ```python File "/srv/.../lib/python2.7/site-packages/kafka/producer/kafka.py", line 248, in __init__ self.config['api_version'] = client.check_version() File "/srv/.../lib/python2.7/site-packages/kafka/client_async.py", line 639, in check_version self.poll(future=f) File "/srv/.../lib/python2.7/site-packages/kafka/client_async.py", line 367, in poll responses.extend(self._poll(timeout, sleep=sleep)) File "/srv/.../lib/python2.7/site-packages/kafka/client_async.py", line 402, in _poll ready, _, _ = select.select(fds, [], [], timeout) ValueError: filedescriptor out of range in select() ``` AFAIK this is because the select() call is limited to FD_SETSIZE being 1024 on Linux.
dpkp/kafka-python
diff --git a/test/test_client_async.py b/test/test_client_async.py index ad76aad..922e43c 100644 --- a/test/test_client_async.py +++ b/test/test_client_async.py @@ -1,3 +1,10 @@ +# selectors in stdlib as of py3.4 +try: + import selectors # pylint: disable=import-error +except ImportError: + # vendored backport module + import kafka.selectors34 as selectors + import socket import time @@ -99,15 +106,19 @@ def test_maybe_connect(conn): def test_conn_state_change(mocker, conn): cli = KafkaClient() + sel = mocker.patch.object(cli, '_selector') node_id = 0 conn.state = ConnectionStates.CONNECTING cli._conn_state_change(node_id, conn) assert node_id in cli._connecting + sel.register.assert_called_with(conn._sock, selectors.EVENT_WRITE) conn.state = ConnectionStates.CONNECTED cli._conn_state_change(node_id, conn) assert node_id not in cli._connecting + sel.unregister.assert_called_with(conn._sock) + sel.register.assert_called_with(conn._sock, selectors.EVENT_READ, conn) # Failure to connect should trigger metadata update assert cli.cluster._need_update is False @@ -115,6 +126,7 @@ def test_conn_state_change(mocker, conn): cli._conn_state_change(node_id, conn) assert node_id not in cli._connecting assert cli.cluster._need_update is True + sel.unregister.assert_called_with(conn._sock) conn.state = ConnectionStates.CONNECTING cli._conn_state_change(node_id, conn) @@ -167,8 +179,9 @@ def test_is_ready(mocker, conn): assert not cli.is_ready(0) -def test_close(conn): +def test_close(mocker, conn): cli = KafkaClient() + mocker.patch.object(cli, '_selector') # Unknown node - silent cli.close(2)
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_added_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 1, "test_score": 2 }, "num_modified_files": 1 }
1.0
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "six", "pip_packages": [ "pytest", "pytest-cov", "pytest-catchlog", "pytest-pylint", "pytest-sugar", "pytest-mock", "mock" ], "pre_install": [ "apt-get update", "apt-get install -y gcc libsnappy-dev" ], "python": "3.5", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
astroid==2.11.7 attrs==22.2.0 certifi==2021.5.30 coverage==6.2 dill==0.3.4 importlib-metadata==4.8.3 iniconfig==1.1.1 isort==5.10.1 -e git+https://github.com/dpkp/kafka-python.git@810f08b7996a15e65cdd8af6c1a7167c28f94646#egg=kafka_python lazy-object-proxy==1.7.1 mccabe==0.7.0 mock==5.2.0 packaging==21.3 platformdirs==2.4.0 pluggy==1.0.0 py==1.11.0 pylint==2.13.9 pyparsing==3.1.4 pytest==7.0.1 pytest-catchlog==1.2.2 pytest-cov==4.0.0 pytest-mock==3.6.1 pytest-pylint==0.18.0 pytest-sugar==0.9.6 six @ file:///tmp/build/80754af9/six_1644875935023/work termcolor==1.1.0 toml==0.10.2 tomli==1.2.3 typed-ast==1.5.5 typing_extensions==4.1.1 wrapt==1.16.0 zipp==3.6.0
name: kafka-python channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - six=1.16.0=pyhd3eb1b0_1 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - astroid==2.11.7 - attrs==22.2.0 - coverage==6.2 - dill==0.3.4 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - isort==5.10.1 - lazy-object-proxy==1.7.1 - mccabe==0.7.0 - mock==5.2.0 - packaging==21.3 - platformdirs==2.4.0 - pluggy==1.0.0 - py==1.11.0 - pylint==2.13.9 - pyparsing==3.1.4 - pytest==7.0.1 - pytest-catchlog==1.2.2 - pytest-cov==4.0.0 - pytest-mock==3.6.1 - pytest-pylint==0.18.0 - pytest-sugar==0.9.6 - termcolor==1.1.0 - toml==0.10.2 - tomli==1.2.3 - typed-ast==1.5.5 - typing-extensions==4.1.1 - wrapt==1.16.0 - zipp==3.6.0 prefix: /opt/conda/envs/kafka-python
[ "test/test_client_async.py::test_conn_state_change", "test/test_client_async.py::test_close" ]
[]
[ "test/test_client_async.py::test_bootstrap_servers[None-expected_hosts0]", "test/test_client_async.py::test_bootstrap_servers[foobar:1234-expected_hosts1]", "test/test_client_async.py::test_bootstrap_servers[fizzbuzz-expected_hosts2]", "test/test_client_async.py::test_bootstrap_servers[foo:12,bar:34-expected_hosts3]", "test/test_client_async.py::test_bootstrap_servers[bootstrap4-expected_hosts4]", "test/test_client_async.py::test_bootstrap_success", "test/test_client_async.py::test_bootstrap_failure", "test/test_client_async.py::test_can_connect", "test/test_client_async.py::test_maybe_connect", "test/test_client_async.py::test_ready", "test/test_client_async.py::test_is_ready", "test/test_client_async.py::test_is_disconnected", "test/test_client_async.py::test_send", "test/test_client_async.py::test_poll", "test/test_client_async.py::test__poll", "test/test_client_async.py::test_in_flight_request_count", "test/test_client_async.py::test_least_loaded_node", "test/test_client_async.py::test_set_topics", "test/test_client_async.py::test_maybe_refresh_metadata", "test/test_client_async.py::test_schedule", "test/test_client_async.py::test_unschedule" ]
[]
Apache License 2.0
497
QuickPay__quickpay-python-client-4
79e52fcd5075d58e5da7692aa5850d567af1d824
2016-04-11 12:21:59
79e52fcd5075d58e5da7692aa5850d567af1d824
diff --git a/README.md b/README.md index e6b320f..c4a0949 100644 --- a/README.md +++ b/README.md @@ -9,11 +9,11 @@ Installation Add to your `requirements.txt` - quickpay + quickpay-api-client or install via [pip](https://github.com/pypa/pip): - $ pip install quickpay-python-client + $ pip install quickpay-api-client It is currently tested with Python >= 2.5 and Python 3. @@ -31,14 +31,14 @@ First you should create a client instance that is anonymous or authorized with a To initialise an anonymous client: ``` -from quickpay import QPClient +from quickpay_api_client import QPClient client = QPClient() ``` To initialise a client with QuickPay Api Key: ``` -from quickpay import QPClient +from quickpay_api_client import QPClient secret = ":{0}".format(os.environ['QUICKPAY_API_KEY']) client = QPClient(secret) ``` @@ -46,7 +46,7 @@ client = QPClient(secret) Or you can provide login credentials like: ``` -from quickpay import QPClient +from quickpay_api_client import QPClient secret= "{0}:{1}".format(os.environ['QUICKPAY_LOGIN'], os.environ['QUICKPAY_PASSWORD']) client = QPClient(secret) ``` @@ -81,7 +81,7 @@ By default (get|post|patch|put|delete) will return JSON parsed body on success ( You can listen for any api error like: ``` -from quickpay.exceptions import ApiError +from quickpay_api_client.exceptions import ApiError try: client.post('/payments', currency='DKK', order_id='1212') ... diff --git a/quickpay/__init__.py b/quickpay_api_client/__init__.py similarity index 100% rename from quickpay/__init__.py rename to quickpay_api_client/__init__.py diff --git a/quickpay/api.py b/quickpay_api_client/api.py similarity index 96% rename from quickpay/api.py rename to quickpay_api_client/api.py index 1032900..a0a4777 100644 --- a/quickpay/api.py +++ b/quickpay_api_client/api.py @@ -7,8 +7,8 @@ import requests from requests.adapters import HTTPAdapter from requests.packages.urllib3.poolmanager import PoolManager -from quickpay import exceptions -import quickpay +from quickpay_api_client import exceptions +import quickpay_api_client class QPAdapter(HTTPAdapter): @@ -49,7 +49,7 @@ class QPApi(object): headers = { "Accept-Version": 'v%s' % self.api_version, - "User-Agent": "quickpay-python-client, v%s" % quickpay.__version__ + "User-Agent": "quickpay-python-client, v%s" % quickpay_api_client.__version__ } if self.secret: diff --git a/setup.py b/setup.py index 3c9281c..e1faeb7 100644 --- a/setup.py +++ b/setup.py @@ -10,7 +10,7 @@ reqs = ['requests>=2.5'] tests_requires = ['nose', 'httpretty', 'mock'] version = '' -with open('quickpay/__init__.py', 'r') as fd: +with open('quickpay_api_client/__init__.py', 'r') as fd: version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]', fd.read(), re.MULTILINE).group(1) @@ -18,13 +18,13 @@ if not version: raise RuntimeError('Cannot find version information') setup( - name='quickpay', + name='quickpay-api-client', version=version, description='Python client for QuickPay API', author_email="[email protected]", author="QuickPay Developers", url="https://github.com/QuickPay/quickpay-python-client", - packages=['quickpay'], + packages=['quickpay_api_client'], install_requires=reqs, tests_requires=tests_requires, test_suite='nose.collector')
Not available on PIP Looks like this isn't really available on pip even though the readme file states so. ``` pip install quickpay-python-client Collecting quickpay-python-client Could not find any downloads that satisfy the requirement quickpay-python-client No distributions at all found for quickpay-python-client ``` Also the readme file says to add "quickpay" to the requirements file, but says that the package name is quickpay-python-client And neither of them works.
QuickPay/quickpay-python-client
diff --git a/quickpay/client.py b/quickpay_api_client/client.py similarity index 100% rename from quickpay/client.py rename to quickpay_api_client/client.py diff --git a/quickpay/exceptions.py b/quickpay_api_client/exceptions.py similarity index 100% rename from quickpay/exceptions.py rename to quickpay_api_client/exceptions.py diff --git a/quickpay/tests/__init__.py b/quickpay_api_client/tests/__init__.py similarity index 100% rename from quickpay/tests/__init__.py rename to quickpay_api_client/tests/__init__.py diff --git a/quickpay/tests/api_tests.py b/quickpay_api_client/tests/api_tests.py similarity index 94% rename from quickpay/tests/api_tests.py rename to quickpay_api_client/tests/api_tests.py index a82edff..4fdd36b 100644 --- a/quickpay/tests/api_tests.py +++ b/quickpay_api_client/tests/api_tests.py @@ -1,8 +1,8 @@ import base64, json from nose.tools import assert_equal, assert_raises import requests -from quickpay.api import QPApi -from quickpay.exceptions import ApiError +from quickpay_api_client.api import QPApi +from quickpay_api_client.exceptions import ApiError import httpretty diff --git a/quickpay/tests/client_tests.py b/quickpay_api_client/tests/client_tests.py similarity index 93% rename from quickpay/tests/client_tests.py rename to quickpay_api_client/tests/client_tests.py index 06eb4de..46d3e25 100644 --- a/quickpay/tests/client_tests.py +++ b/quickpay_api_client/tests/client_tests.py @@ -1,6 +1,6 @@ from nose.tools import assert_equal, assert_raises -from quickpay.api import QPApi -from quickpay import QPClient +from quickpay_api_client.api import QPApi +from quickpay_api_client import QPClient from mock import MagicMock
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 1, "test_score": 0 }, "num_modified_files": 3 }
unknown
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": null, "python": "3.7", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
certifi @ file:///croot/certifi_1671487769961/work/certifi charset-normalizer==3.4.1 exceptiongroup==1.2.2 httpretty==0.8.8 idna==3.10 importlib-metadata==6.7.0 iniconfig==2.0.0 mock==1.0.1 nose==1.3.6 packaging==24.0 pluggy==1.2.0 pytest==7.4.4 -e git+https://github.com/QuickPay/quickpay-python-client.git@79e52fcd5075d58e5da7692aa5850d567af1d824#egg=quickpay requests==2.31.0 tomli==2.0.1 typing_extensions==4.7.1 urllib3==2.0.7 zipp==3.15.0
name: quickpay-python-client channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2022.12.7=py37h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=22.3.1=py37h06a4308_0 - python=3.7.16=h7a1cb2a_0 - readline=8.2=h5eee18b_0 - setuptools=65.6.3=py37h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.38.4=py37h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - charset-normalizer==3.4.1 - exceptiongroup==1.2.2 - httpretty==0.8.8 - idna==3.10 - importlib-metadata==6.7.0 - iniconfig==2.0.0 - mock==1.0.1 - nose==1.3.6 - packaging==24.0 - pluggy==1.2.0 - pytest==7.4.4 - requests==2.31.0 - tomli==2.0.1 - typing-extensions==4.7.1 - urllib3==2.0.7 - zipp==3.15.0 prefix: /opt/conda/envs/quickpay-python-client
[ "quickpay_api_client/tests/client_tests.py::TestQPClient::test_api_instance", "quickpay_api_client/tests/client_tests.py::TestQPClient::test_get_delegation", "quickpay_api_client/tests/client_tests.py::TestQPClient::test_post_delegation", "quickpay_api_client/tests/client_tests.py::TestQPClient::test_delete_delegation", "quickpay_api_client/tests/client_tests.py::TestQPClient::test_put_delegation", "quickpay_api_client/tests/client_tests.py::TestQPClient::test_patch_delegation", "quickpay_api_client/tests/client_tests.py::TestQPClient::test_non_http_method" ]
[ "quickpay_api_client/tests/api_tests.py::TestApi::test_perform_success", "quickpay_api_client/tests/api_tests.py::TestApi::test_perform_failure", "quickpay_api_client/tests/api_tests.py::TestApi::test_headers", "quickpay_api_client/tests/api_tests.py::TestApi::test_perform_when_raw" ]
[]
[]
MIT License
498
sympy__sympy-10997
70f5f1ff0a1d6a204544845680bb3ce780fa17c7
2016-04-11 20:37:44
8bb5814067cfa0348fb8b708848f35dba2b55ff4
asmeurer: Don't know what your aims are here, but I have found a [couple of issues](https://github.com/sympy/sympy/issues?utf8=%E2%9C%93&q=is%3Aopen+is%3Aissue+author%3Aasmeurer+diophantine) I've found with diophantine in the past that don't appear to be addressed here. smichr: > I have found a couple of issues Those (and others) have now been addressed.
diff --git a/doc/src/modules/solvers/diophantine.rst b/doc/src/modules/solvers/diophantine.rst index 743a28f1d4..cb07f93588 100644 --- a/doc/src/modules/solvers/diophantine.rst +++ b/doc/src/modules/solvers/diophantine.rst @@ -45,8 +45,10 @@ structured in the following manner. - :py:meth:`~sympy.solvers.diophantine.diop_linear` - :py:meth:`~sympy.solvers.diophantine.diop_quadratic` - :py:meth:`~sympy.solvers.diophantine.diop_ternary_quadratic` + - :py:meth:`~sympy.solvers.diophantine.diop_ternary_quadratic_normal` - :py:meth:`~sympy.solvers.diophantine.diop_general_pythagorean` - :py:meth:`~sympy.solvers.diophantine.diop_general_sum_of_squares` + - :py:meth:`~sympy.solvers.diophantine.diop_general_sum_of_even_powers` - :py:meth:`~sympy.solvers.diophantine.merge_solution` @@ -135,7 +137,7 @@ For linear Diophantine equations, the customized parameter is the prefix used for each free variable in the solution. Consider the following example: >>> diop_solve(2*x + 3*y - 5*z + 7, m) -(m_0, -9*m_0 - 5*m_1 - 14, -5*m_0 - 3*m_1 - 7) +(m_0, m_0 + 5*m_1 - 14, m_0 + 3*m_1 - 7) In the solution above, m_0 and m_1 are independent free variables. @@ -228,7 +230,7 @@ We can solve an equation of the form `X^2 - DY^2 = N` by passing `D` and `N` to >>> diop_DN(5, 920) [] -Unfortunately, our equation does not have solutions. +Unfortunately, our equation has no solution. Now let's turn to homogeneous ternary quadratic equations. These equations are of the form `ax^2 + by^2 + cz^2 + dxy + eyz + fzx = 0`. These type of equations @@ -236,11 +238,11 @@ either have infinitely many solutions or no solutions (except the obvious solution (0, 0, 0)) >>> diophantine(3*x**2 + 4*y**2 - 5*z**2 + 4*x*y + 6*y*z + 7*z*x) -set() +set([(0, 0, 0)]) >>> diophantine(3*x**2 + 4*y**2 - 5*z**2 + 4*x*y - 7*y*z + 7*z*x) set([(-16*p**2 + 28*p*q + 20*q**2, 3*p**2 + 38*p*q - 25*q**2, 4*p**2 - 24*p*q + 68*q**2)]) -If you are only interested about a base solution rather than the parameterized +If you are only interested in a base solution rather than the parameterized general solution (to be more precise, one of the general solutions), you can use :py:meth:`~sympy.solvers.diophantine.diop_ternary_quadratic`. @@ -272,15 +274,41 @@ also be solved using the Diophantine module. set([(70*t1**2 + 70*t2**2 + 70*t3**2 + 70*t4**2 - 70*t5**2, 105*t1*t5, 420*t2*t5, 60*t3*t5, 210*t4*t5, 42*t1**2 + 42*t2**2 + 42*t3**2 + 42*t4**2 + 42*t5**2)]) function :py:meth:`~sympy.solvers.diophantine.diop_general_pythagorean` can -also be called directly to solve the same equation. This is true about the -general sum of squares too. Either you can call +also be called directly to solve the same equation. Either you can call :py:meth:`~sympy.solvers.diophantine.diop_general_pythagorean` or use the high -level API. - ->>> diophantine(a**2 + b**2 + c**2 + d**2 + e**2 + f**2 - 112) -set([(8, 4, 4, 4, 0, 0)]) - -If you want to get a more thorough idea about the the Diophantine module please +level API. For the general sum of squares, this is also true, but one advantage +of calling :py:meth:`~sympy.solvers.diophantine.diop_general_sum_of_squares` is that +you can control how many solutions are returned. + +>>> from sympy.solvers.diophantine import diop_general_sum_of_squares +>>> eq = a**2 + b**2 + c**2 + d**2 - 18 +>>> diophantine(eq) +set([(0, 0, 3, 3), (0, 1, 1, 4), (1, 2, 2, 3)]) +>>> diop_general_sum_of_squares(eq, 2) +set([(0, 0, 3, 3), (1, 2, 2, 3)]) + +The :py:meth:`~sympy.solvers.diophantine.sum_of_squares` routine will +providean iterator that returns solutions and one may control whether +the solutions contain zeros or not (and the solutions not containing +zeros are returned first): + +>>> from sympy.solvers.diophantine import sum_of_squares +>>> sos = sum_of_squares(18, 4, zeros=True) +>>> next(sos) +(1, 2, 2, 3) +>>> next(sos) +(0, 0, 3, 3) + + +Simple Eqyptian fractions can be found with the Diophantine module, too. +For example, here are the ways that one might represent 1/2 as a sum of two +unit fractions: + +>>> from sympy import Eq, S +>>> diophantine(Eq(1/x + 1/y, S(1)/2)) +set([(-2, 1), (1, -2), (3, 6), (4, 4), (6, 3)]) + +To get a more thorough understanding of the Diophantine module, please refer to the following blog. http://thilinaatsympy.wordpress.com/ @@ -301,21 +329,28 @@ References User Functions -------------- -These are functions that are imported into the global namespace with ``from -sympy import *``. These functions are intended for use by ordinary users of SymPy. +This functions is imported into the global namespace +with ``from sympy import *``: :func:`diophantine` ^^^^^^^^^^^^^^^^^^^ .. autofunction:: sympy.solvers.diophantine.diophantine -:func:`diop_solve` -^^^^^^^^^^^^^^^^^^ -.. autofunction:: sympy.solvers.diophantine.diop_solve +And this function is imported with ``from sympy.solvers.diophantine import *``: :func:`classify_diop` ^^^^^^^^^^^^^^^^^^^^^ .. autofunction:: sympy.solvers.diophantine.classify_diop +Internal Functions +------------------ + +These functions are intended for internal use in the Diophantine module. + +:func:`diop_solve` +^^^^^^^^^^^^^^^^^^ +.. autofunction:: sympy.solvers.diophantine.diop_solve + :func:`diop_linear` ^^^^^^^^^^^^^^^^^^^ .. autofunction:: sympy.solvers.diophantine.diop_linear @@ -368,6 +403,10 @@ sympy import *``. These functions are intended for use by ordinary users of SymP ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. autofunction:: sympy.solvers.diophantine.diop_general_sum_of_squares +:func:`diop_general_sum_of_even_powers` +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +.. autofunction:: sympy.solvers.diophantine.diop_general_sum_of_even_powers + :func:`partition` ^^^^^^^^^^^^^^^^^ .. autofunction:: sympy.solvers.diophantine.partition @@ -380,9 +419,13 @@ sympy import *``. These functions are intended for use by ordinary users of SymP ^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. autofunction:: sympy.solvers.diophantine.sum_of_four_squares -Internal Functions ------------------- -These functions are intended for the internal use in Diophantine module. +:func:`sum_of_powers` +^^^^^^^^^^^^^^^^^^^^^ +.. autofunction:: sympy.solvers.diophantine.sum_of_powers + +:func:`sum_of_squares` +^^^^^^^^^^^^^^^^^^^^^^ +.. autofunction:: sympy.solvers.diophantine.sum_of_squares :obj:`merge_solution` ^^^^^^^^^^^^^^^^^^^^^ @@ -392,10 +435,6 @@ These functions are intended for the internal use in Diophantine module. ^^^^^^^^^^^^^^^^ .. autofunction:: sympy.solvers.diophantine.divisible -:obj:`extended_euclid` -^^^^^^^^^^^^^^^^^^^^^^ -.. autofunction:: sympy.solvers.diophantine.extended_euclid - :obj:`PQa` ^^^^^^^^^^ .. autofunction:: sympy.solvers.diophantine.PQa @@ -404,10 +443,6 @@ These functions are intended for the internal use in Diophantine module. ^^^^^^^^^^^^^^^^^ .. autofunction:: sympy.solvers.diophantine.equivalent -:obj:`simplified` -^^^^^^^^^^^^^^^^^ -.. autofunction:: sympy.solvers.diophantine.simplified - :obj:`parametrize_ternary_quadratic` ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. autofunction:: sympy.solvers.diophantine.parametrize_ternary_quadratic @@ -429,16 +464,12 @@ These functions are intended for the internal use in Diophantine module. .. autofunction:: sympy.solvers.diophantine.holzer :obj:`prime_as_sum_of_two_squares` -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. autofunction:: sympy.solvers.diophantine.prime_as_sum_of_two_squares -:obj:`pairwise_prime` -^^^^^^^^^^^^^^^^^^^^^ -.. autofunction:: sympy.solvers.diophantine.pairwise_prime - -:obj:`make_prime` +:obj:`sqf_normal` ^^^^^^^^^^^^^^^^^ -.. autofunction:: sympy.solvers.diophantine.make_prime +.. autofunction:: sympy.solvers.diophantine.sqf_normal :obj:`reconstruct` ^^^^^^^^^^^^^^^^^^ diff --git a/sympy/core/add.py b/sympy/core/add.py index 21caa0bc22..b70c51c12f 100644 --- a/sympy/core/add.py +++ b/sympy/core/add.py @@ -333,17 +333,13 @@ def as_coeff_add(self, *deps): return coeff, notrat + self.args[1:] return S.Zero, self.args - def as_coeff_Add(self): + def as_coeff_Add(self, rational=False): """Efficiently extract the coefficient of a summation. """ coeff, args = self.args[0], self.args[1:] - if coeff.is_Number: - if len(args) == 1: - return coeff, args[0] - else: - return coeff, self._new_rawargs(*args) - else: - return S.Zero, self + if coeff.is_Number and not rational or coeff.is_Rational: + return coeff, self._new_rawargs(*args) + return S.Zero, self # Note, we intentionally do not implement Add.as_coeff_mul(). Rather, we # let Expr.as_coeff_mul() just always return (S.One, self) for an Add. See diff --git a/sympy/core/expr.py b/sympy/core/expr.py index 3c81fb4ed1..1da1255cf6 100644 --- a/sympy/core/expr.py +++ b/sympy/core/expr.py @@ -2832,7 +2832,7 @@ def as_coeff_Mul(self, rational=False): """Efficiently extract the coefficient of a product. """ return S.One, self - def as_coeff_Add(self): + def as_coeff_Add(self, rational=False): """Efficiently extract the coefficient of a summation. """ return S.Zero, self diff --git a/sympy/core/numbers.py b/sympy/core/numbers.py index ca0b2dd8f3..aeff883ef7 100644 --- a/sympy/core/numbers.py +++ b/sympy/core/numbers.py @@ -549,9 +549,11 @@ def as_coeff_Mul(self, rational=False): return S.One, self return (self, S.One) if self else (S.One, self) - def as_coeff_Add(self): + def as_coeff_Add(self, rational=False): """Efficiently extract the coefficient of a summation. """ - return self, S.Zero + if not rational: + return self, S.Zero + return S.Zero, self def gcd(self, other): """Compute GCD of `self` and `other`. """ @@ -1671,6 +1673,14 @@ def as_content_primitive(self, radical=False, clear=True): return -self, S.NegativeOne return S.One, self + def as_coeff_Mul(self, rational=False): + """Efficiently extract the coefficient of a product. """ + return self, S.One + + def as_coeff_Add(self, rational=False): + """Efficiently extract the coefficient of a summation. """ + return self, S.Zero + # int -> Integer _intcache = {} @@ -2276,6 +2286,10 @@ def __nonzero__(self): __bool__ = __nonzero__ + def as_coeff_Mul(self, rational=False): # XXX this routine should be deleted + """Efficiently extract the coefficient of a summation. """ + return S.One, self + class One(with_metaclass(Singleton, IntegerConstant)): """The number one. diff --git a/sympy/ntheory/continued_fraction.py b/sympy/ntheory/continued_fraction.py index dd94a59f10..07f86f2720 100644 --- a/sympy/ntheory/continued_fraction.py +++ b/sympy/ntheory/continued_fraction.py @@ -1,6 +1,7 @@ from sympy.core.numbers import Integer, Rational + def continued_fraction_periodic(p, q, d=0): r""" Find the periodic continued fraction expansion of a quadratic irrational. diff --git a/sympy/ntheory/factor_.py b/sympy/ntheory/factor_.py index 9242120dfd..2138c24262 100644 --- a/sympy/ntheory/factor_.py +++ b/sympy/ntheory/factor_.py @@ -1669,7 +1669,7 @@ def core(n, t=2): See Also ======== - factorint + factorint, sympy.solvers.diophantine.square_factor """ n = as_int(n) diff --git a/sympy/printing/llvmjitcode.py b/sympy/printing/llvmjitcode.py index 51e0e65c41..f1e092f8ad 100644 --- a/sympy/printing/llvmjitcode.py +++ b/sympy/printing/llvmjitcode.py @@ -34,10 +34,6 @@ def __init__(self, module, builder, fn, *args, **kwargs): self.builder = builder self.fn = fn self.ext_fn = {} # keep track of wrappers to external functions - self.tmp_var = {} - - def _add_tmp_var(self, name, value): - self.tmp_var[name] = value def _print_Number(self, n, **kwargs): return ll.Constant(self.fp_type, float(n)) @@ -46,13 +42,8 @@ def _print_Integer(self, expr): return ll.Constant(self.fp_type, float(expr.p)) def _print_Symbol(self, s): - val = self.tmp_var.get(s) - if not val: - # look up parameter with name s - val = self.func_arg_map.get(s) - if not val: - raise LookupError("Symbol not found: %s" % s) - return val + # look up parameter with name s + return self.func_arg_map.get(s) def _print_Pow(self, expr): base0 = self._print(expr.base) @@ -122,13 +113,7 @@ def _print_Indexed(self, expr): return value def _print_Symbol(self, s): - val = self.tmp_var.get(s) - if val: - return val - array, idx = self.func_arg_map.get(s, [None, 0]) - if not array: - raise LookupError("Symbol not found: %s" % s) array_ptr = self.builder.gep(array, [ll.Constant(ll.IntType(32), idx)]) fp_array_ptr = self.builder.bitcast(array_ptr, ll.PointerType(self.fp_type)) @@ -165,8 +150,6 @@ def _from_ctype(self, ctype): return ll.PointerType(self.fp_type) if ctype == ctypes.c_void_p: return ll.PointerType(ll.IntType(32)) - if ctype == ctypes.py_object: - return ll.PointerType(ll.IntType(32)) print("Unhandled ctype = %s" % str(ctype)) @@ -201,65 +184,12 @@ def _create_function(self, expr): lj = LLVMJitPrinter(self.module, builder, self.fn, func_arg_map=self.param_dict) - ret = self._convert_expr(lj, expr) - lj.builder.ret(self._wrap_return(lj, ret)) + ret = lj._print(expr) + lj.builder.ret(ret) strmod = str(self.module) return strmod - def _wrap_return(self, lj, vals): - # Return a single double if there is one return value, - # else return a tuple of doubles. - - # Don't wrap return value in this case - if self.signature.ret_type == ctypes.c_double: - return vals[0] - - # Use this instead of a real PyObject* - void_ptr = ll.PointerType(ll.IntType(32)) - - # Create a wrapped double: PyObject* PyFloat_FromDouble(double v) - wrap_type = ll.FunctionType(void_ptr, [self.fp_type]) - wrap_fn = ll.Function(lj.module, wrap_type, "PyFloat_FromDouble") - - wrapped_vals = [lj.builder.call(wrap_fn, [v]) for v in vals] - if len(vals) == 1: - final_val = wrapped_vals[0] - else: - # Create a tuple: PyObject* PyTuple_Pack(Py_ssize_t n, ...) - - # This should be Py_ssize_t - tuple_arg_types = [ll.IntType(32)] - - tuple_arg_types.extend([void_ptr]*len(vals)) - tuple_type = ll.FunctionType(void_ptr, tuple_arg_types) - tuple_fn = ll.Function(lj.module, tuple_type, "PyTuple_Pack") - - tuple_args = [ll.Constant(ll.IntType(32), len(wrapped_vals))] - tuple_args.extend(wrapped_vals) - - final_val = lj.builder.call(tuple_fn, tuple_args) - - return final_val - - def _convert_expr(self, lj, expr): - try: - # Match CSE return data structure. - if len(expr) == 2: - tmp_exprs = expr[0] - final_exprs = expr[1] - if len(final_exprs) != 1 and self.signature.ret_type == ctypes.c_double: - raise NotImplementedError("Return of multiple expressions not supported for this callback") - for name, e in tmp_exprs: - val = lj._print(e) - lj._add_tmp_var(name, val) - except TypeError: - final_exprs = [expr] - - vals = [lj._print(e) for e in final_exprs] - - return vals - def _compile_function(self, strmod): global exe_engines llmod = llvm.parse_assembly(strmod) @@ -308,18 +238,13 @@ def _create_function(self, expr): lj = LLVMJitCallbackPrinter(self.module, builder, self.fn, func_arg_map=self.param_dict) - ret = self._convert_expr(lj, expr) + ret = lj._print(expr) if self.signature.ret_arg: - output_fp_ptr = builder.bitcast(self.fn.args[self.signature.ret_arg], - ll.PointerType(self.fp_type)) - for i, val in enumerate(ret): - index = ll.Constant(ll.IntType(32), i) - output_array_ptr = builder.gep(output_fp_ptr, [index]) - builder.store(val, output_array_ptr) + builder.store(ret, self.fn.args[self.signature.ret_arg]) builder.ret(ll.Constant(ll.IntType(32), 0)) # return success else: - lj.builder.ret(self._wrap_return(lj, ret)) + lj.builder.ret(ret) strmod = str(self.module) return strmod @@ -370,7 +295,7 @@ def llvm_callable(args, expr, callback_type=None): Arguments to the generated function. Usually the free symbols in the expression. Currently each one is assumed to convert to a double precision scalar. - expr : Expr, or (Replacements, Expr) as returned from 'cse' + expr : Expr Expression to compile. callback_type : string Create function with signature appropriate to use as a callback. @@ -417,29 +342,12 @@ def llvm_callable(args, expr, callback_type=None): The second ('scipy.integrate.test') is only useful for directly calling the function using ctypes variables. It will not pass the signature checks for scipy.integrate. - - The return value from the cse module can also be compiled. This - can improve the performance of the compiled function. If multiple - expressions are given to cse, the compiled function returns a tuple. - The 'cubature' callback handles multiple expressions (set `fdim` - to match in the integration call.) - >>> import sympy.printing.llvmjitcode as jit - >>> from sympy import cse, exp - >>> from sympy.abc import x,y - >>> e1 = x*x + y*y - >>> e2 = 4*(x*x + y*y) + 8.0 - >>> after_cse = cse([e1,e2]) - >>> after_cse - ([(x0, x**2), (x1, y**2)], [x0 + x1, 4*x0 + 4*x1 + 8.0]) - >>> j1 = jit.llvm_callable([x,y], after_cse) - >>> j1(1.0, 2.0) - (5.0, 28.0) ''' if not llvmlite: raise ImportError("llvmlite is required for llvmjitcode") - signature = CodeSignature(ctypes.py_object) + signature = CodeSignature(ctypes.c_double) arg_ctypes = [] if callback_type is None: @@ -447,7 +355,6 @@ def llvm_callable(args, expr, callback_type=None): arg_ctype = ctypes.c_double arg_ctypes.append(arg_ctype) elif callback_type == 'scipy.integrate' or callback_type == 'scipy.integrate.test': - signature.ret_type = ctypes.c_double arg_ctypes = [ctypes.c_int, ctypes.POINTER(ctypes.c_double)] arg_ctypes_formal = [ctypes.c_int, ctypes.c_double] signature.input_arg = 1 diff --git a/sympy/simplify/fu.py b/sympy/simplify/fu.py index 3eadde52d1..e74b1a0c54 100644 --- a/sympy/simplify/fu.py +++ b/sympy/simplify/fu.py @@ -176,11 +176,8 @@ References ========== - -Fu, Hongguang, Xiuqin Zhong, and Zhenbing Zeng. "Automated and readable -simplification of trigonometric expressions." Mathematical and computer -modelling 44.11 (2006): 1169-1177. -http://rfdz.ph-noe.ac.at/fileadmin/Mathematik_Uploads/ACDCA/DESTIME2006/DES_contribs/Fu/simplification.pdf +http://rfdz.ph-noe.ac.at/fileadmin/Mathematik_Uploads/ACDCA/ +DESTIME2006/DES_contribs/Fu/simplification.pdf http://www.sosmath.com/trig/Trig5/trig5/pdf/pdf.html gives a formula sheet. diff --git a/sympy/simplify/simplify.py b/sympy/simplify/simplify.py index e93fdf8246..a8413332a6 100644 --- a/sympy/simplify/simplify.py +++ b/sympy/simplify/simplify.py @@ -1288,14 +1288,16 @@ def clear_coefficients(expr, rhs=S.Zero): """ was = None free = expr.free_symbols - while was != expr: + if expr.is_Rational: + return (S.Zero, rhs - expr) + while expr and was != expr: was = expr m, expr = ( expr.as_content_primitive() if free else - factor_terms(expr).as_coeff_Mul()) + factor_terms(expr).as_coeff_Mul(rational=True)) rhs /= m - c, expr = expr.as_coeff_Add() + c, expr = expr.as_coeff_Add(rational=True) rhs -= c expr = signsimp(expr, evaluate = False) if _coeff_isneg(expr): diff --git a/sympy/solvers/__init__.py b/sympy/solvers/__init__.py index ba51543f56..b5e7f6355f 100644 --- a/sympy/solvers/__init__.py +++ b/sympy/solvers/__init__.py @@ -12,6 +12,8 @@ solve_undetermined_coeffs, nsolve, solve_linear, checksol, \ det_quick, inv_quick +from .diophantine import diophantine + from .recurr import rsolve, rsolve_poly, rsolve_ratio, rsolve_hyper from .ode import checkodesol, classify_ode, dsolve, \ diff --git a/sympy/solvers/diophantine.py b/sympy/solvers/diophantine.py index f85bf48485..856e09deae 100644 --- a/sympy/solvers/diophantine.py +++ b/sympy/solvers/diophantine.py @@ -1,47 +1,121 @@ from __future__ import print_function, division -from sympy import (Add, ceiling, divisors, factor_list, factorint, floor, igcd, - ilcm, Integer, integer_nthroot, isprime, Matrix, Mul, nextprime, - perfect_power, Poly, S, sign, solve, sqrt, Subs, Symbol, symbols, sympify, - Wild) - +from sympy.core.add import Add +from sympy.core.compatibility import as_int, is_sequence, range +from sympy.core.exprtools import factor_terms from sympy.core.function import _mexpand -from sympy.simplify.radsimp import rad_rationalize -from sympy.utilities import default_sort_key, numbered_symbols -from sympy.core.numbers import igcdex -from sympy.ntheory.residue_ntheory import sqrt_mod -from sympy.core.compatibility import range +from sympy.core.mul import Mul +from sympy.core.numbers import Rational +from sympy.core.numbers import igcdex, ilcm, igcd +from sympy.core.power import integer_nthroot from sympy.core.relational import Eq +from sympy.core.singleton import S +from sympy.core.symbol import Symbol, symbols +from sympy.functions.elementary.complexes import sign +from sympy.functions.elementary.integers import floor +from sympy.functions.elementary.miscellaneous import sqrt +from sympy.matrices.dense import MutableDenseMatrix as Matrix +from sympy.ntheory.factor_ import ( + divisors, factorint, multiplicity, perfect_power) +from sympy.ntheory.generate import nextprime +from sympy.ntheory.primetest import is_square, isprime +from sympy.ntheory.residue_ntheory import sqrt_mod +from sympy.polys.polyerrors import GeneratorsNeeded +from sympy.polys.polytools import Poly, factor_list +from sympy.simplify.simplify import signsimp from sympy.solvers.solvers import check_assumptions +from sympy.solvers.solveset import solveset_real +from sympy.utilities import default_sort_key, numbered_symbols +from sympy.utilities.misc import filldedent + + +# these are imported with 'from sympy.solvers.diophantine import * +__all__ = ['diophantine', 'classify_diop'] + + +# these types are known (but not necessarily handled) +diop_known = { + "binary_quadratic", + "cubic_thue", + "general_pythagorean", + "general_sum_of_even_powers", + "general_sum_of_squares", + "homogeneous_general_quadratic", + "homogeneous_ternary_quadratic", + "homogeneous_ternary_quadratic_normal", + "inhomogeneous_general_quadratic", + "inhomogeneous_ternary_quadratic", + "linear", + "univariate"} + + +def _is_int(i): + try: + as_int(i) + return True + except ValueError: + pass + + +def _sorted_tuple(*i): + return tuple(sorted(i)) + -__all__ = ['base_solution_linear', 'classify_diop', 'cornacchia', 'descent', - 'diop_bf_DN', 'diop_DN', 'diop_general_pythagorean', - 'diop_general_sum_of_squares', 'diop_linear', 'diop_quadratic', - 'diop_solve', 'diop_ternary_quadratic', 'diophantine', 'find_DN', - 'partition', 'square_factor', 'sum_of_four_squares', - 'sum_of_three_squares', 'transformation_to_DN'] +def _remove_gcd(*x): + try: + g = igcd(*x) + return tuple([i//g for i in x]) + except ValueError: + return x + except TypeError: + raise TypeError('_remove_gcd(a,b,c) or _remove_gcd(*container)') -def diophantine(eq, param=symbols("t", integer=True)): + +def _rational_pq(a, b): + # return `(numer, denom)` for a/b; sign in numer and gcd removed + return _remove_gcd(sign(b)*a, abs(b)) + + +def _nint_or_floor(p, q): + # return nearest int to p/q; in case of tie return floor(p/q) + w, r = divmod(p, q) + if abs(r) <= abs(q)//2: + return w + return w + 1 + + +def _odd(i): + return i % 2 != 0 + + +def _even(i): + return i % 2 == 0 + + +def diophantine(eq, param=symbols("t", integer=True), syms=None): """ Simplify the solution procedure of diophantine equation ``eq`` by converting it into a product of terms which should equal zero. For example, when solving, `x^2 - y^2 = 0` this is treated as - `(x + y)(x - y) = 0` and `x+y = 0` and `x-y = 0` are solved independently - and combined. Each term is solved by calling ``diop_solve()``. + `(x + y)(x - y) = 0` and `x + y = 0` and `x - y = 0` are solved + independently and combined. Each term is solved by calling + ``diop_solve()``. - Output of ``diophantine()`` is a set of tuples. Each tuple represents a - solution of the input equation. In a tuple, solution for each variable is - listed according to the alphabetic order of input variables. i.e. if we have - an equation with two variables `a` and `b`, first element of the tuple will - give the solution for `a` and the second element will give the solution for - `b`. + Output of ``diophantine()`` is a set of tuples. The elements of the + tuple are the solutions for each variable in the the equation and + are arranged according to the alphabetic ordering of the variables. + e.g. For an equation with two variables, `a` and `b`, the first + element of the tuple is the solution for `a` and the second for `b`. Usage ===== - ``diophantine(eq, t)``: Solve the diophantine equation ``eq``. - ``t`` is the parameter to be used by ``diop_solve()``. + ``diophantine(eq, t, syms, factor)``: Solve the diophantine + equation ``eq``. + ``t`` is the optional parameter to be used by ``diop_solve()``. + ``syms`` is an optional list of symbols which determines the + order of the elements in the returned tuple. Details ======= @@ -55,12 +129,12 @@ def diophantine(eq, param=symbols("t", integer=True)): >>> from sympy.solvers.diophantine import diophantine >>> from sympy.abc import x, y, z >>> diophantine(x**2 - y**2) - set([(-t_0, -t_0), (t_0, -t_0)]) + set([(t_0, -t_0), (t_0, t_0)]) - #>>> diophantine(x*(2*x + 3*y - z)) - #set([(0, n1, n2), (3*t - z, -2*t + z, z)]) - #>>> diophantine(x**2 + 3*x*y + 4*x) - #set([(0, n1), (3*t - 4, -t)]) + >>> diophantine(x*(2*x + 3*y - z)) + set([(0, n1, n2), (t_0, t_1, 2*t_0 + 3*t_1)]) + >>> diophantine(x**2 + 3*x*y + 4*x) + set([(0, n1), (3*t_0 - 4, -t_0)]) See Also ======== @@ -70,36 +144,85 @@ def diophantine(eq, param=symbols("t", integer=True)): if isinstance(eq, Eq): eq = eq.lhs - eq.rhs - eq = Poly(eq).as_expr() - if not eq.is_polynomial() or eq.is_number: - raise TypeError("Equation input format not supported") - - var = list(eq.expand(force=True).free_symbols) - var.sort(key=default_sort_key) - - terms = factor_list(eq)[1] + try: + var = list(eq.expand(force=True).free_symbols) + var.sort(key=default_sort_key) + if syms: + if not is_sequence(syms): + raise TypeError( + 'syms should be given as a sequence, e.g. a list') + syms = [i for i in syms if i in var] + if syms != var: + map = dict(zip(syms, range(len(syms)))) + return set([tuple([t[map[i]] for i in var]) + for t in diophantine(eq, param)]) + n, d = eq.as_numer_denom() + if not n.free_symbols: + return set() + if d.free_symbols: + dsol = diophantine(d) + good = diophantine(n) - dsol + return set([s for s in good if _mexpand(d.subs(zip(var, s)))]) + else: + eq = n + eq = factor_terms(eq) + assert not eq.is_number + eq = eq.as_independent(*var, as_Add=False)[1] + p = Poly(eq) + assert not any(g.is_number for g in p.gens) + eq = p.as_expr() + assert eq.is_polynomial() + except (GeneratorsNeeded, AssertionError, AttributeError): + raise TypeError(filldedent(''' + Equation should be a polynomial with Rational coefficients.''')) + + try: + # if we know that factoring should not be attempted, skip + # the factoring step + v, c, t = classify_diop(eq) + if t == 'general_sum_of_squares': + # trying to factor such expressions will sometimes hang + terms = [(eq, 1)] + else: + raise TypeError + except (TypeError, NotImplementedError): + terms = factor_list(eq)[1] sols = set([]) for term in terms: - base = term[0] - - var_t, jnk, eq_type = classify_diop(base) - if not var_t: - continue + base, _ = term + var_t, _, eq_type = classify_diop(base, _dict=False) + _, base = signsimp(base, evaluate=False).as_coeff_Mul() solution = diop_solve(base, param) - if eq_type in ["linear", "homogeneous_ternary_quadratic", "general_pythagorean"]: - if merge_solution(var, var_t, solution) != (): - sols.add(merge_solution(var, var_t, solution)) - - elif eq_type in ["binary_quadratic", "general_sum_of_squares", "univariate"]: + if eq_type in [ + "linear", + "homogeneous_ternary_quadratic", + "homogeneous_ternary_quadratic_normal", + "general_pythagorean"]: + sols.add(merge_solution(var, var_t, solution)) + + elif eq_type in [ + "binary_quadratic", + "general_sum_of_squares", + "general_sum_of_even_powers", + "univariate"]: for sol in solution: - if merge_solution(var, var_t, sol) != (): - sols.add(merge_solution(var, var_t, sol)) + sols.add(merge_solution(var, var_t, sol)) - return sols + else: + raise NotImplementedError('unhandled type: %s' % eq_type) + + # remove null merge results + if () in sols: + sols.remove(()) + null = tuple([0]*len(var)) + # if there is no solution, return trivial solution + if not sols and eq.subs(zip(var, null)) is S.Zero: + sols.add(null) + return set([S(i) for i in sols]) def merge_solution(var, var_t, solution): @@ -108,39 +231,39 @@ def merge_solution(var, var_t, solution): equations. For example when solving the equation `(x - y)(x^2 + y^2 - z^2) = 0`, - solutions for each of the equations `x-y = 0` and `x^2 + y^2 - z^2` are + solutions for each of the equations `x - y = 0` and `x^2 + y^2 - z^2` are found independently. Solutions for `x - y = 0` are `(x, y) = (t, t)`. But we should introduce a value for z when we output the solution for the original equation. This function converts `(t, t)` into `(t, t, n_{1})` where `n_{1}` is an integer parameter. """ - l = [] + sol = [] if None in solution: return () solution = iter(solution) - params = numbered_symbols("n", Integer=True, start=1) + params = numbered_symbols("n", integer=True, start=1) for v in var: if v in var_t: - l.append(next(solution)) + sol.append(next(solution)) else: - l.append(next(params)) + sol.append(next(params)) - for val, symb in zip(l, var): + for val, symb in zip(sol, var): if check_assumptions(val, **symb.assumptions0) is False: return tuple() - return tuple(l) + return tuple(sol) def diop_solve(eq, param=symbols("t", integer=True)): """ Solves the diophantine equation ``eq``. - Similar to ``diophantine()`` but doesn't try to factor ``eq`` as latter - does. Uses ``classify_diop()`` to determine the type of the eqaution and - calls the appropriate solver function. + Unlike ``diophantine()``, factoring of ``eq`` is not attempted. Uses + ``classify_diop()`` to determine the type of the equation and calls + the appropriate solver function. Usage ===== @@ -161,19 +284,19 @@ def diop_solve(eq, param=symbols("t", integer=True)): >>> from sympy.abc import x, y, z, w >>> diop_solve(2*x + 3*y - 5) (3*t_0 - 5, -2*t_0 + 5) - >>> diop_solve(4*x + 3*y -4*z + 5) - (t_0, -4*t_1 + 5, t_0 - 3*t_1 + 5) - >>> diop_solve(x + 3*y - 4*z + w -6) - (t_0, t_0 + t_1, -2*t_0 - 3*t_1 - 4*t_2 - 6, -t_0 - 2*t_1 - 3*t_2 - 6) + >>> diop_solve(4*x + 3*y - 4*z + 5) + (t_0, 8*t_0 + 4*t_1 + 5, 7*t_0 + 3*t_1 + 5) + >>> diop_solve(x + 3*y - 4*z + w - 6) + (t_0, t_0 + t_1, 6*t_0 + 5*t_1 + 4*t_2 - 6, 5*t_0 + 4*t_1 + 3*t_2 - 6) >>> diop_solve(x**2 + y**2 - 5) - set([(-2, -1), (-2, 1), (2, -1), (2, 1)]) + set([(-1, -2), (-1, 2), (1, -2), (1, 2)]) See Also ======== diophantine() """ - var, coeff, eq_type = classify_diop(eq) + var, coeff, eq_type = classify_diop(eq, _dict=False) if eq_type == "linear": return _diop_linear(var, coeff, param) @@ -183,167 +306,158 @@ def diop_solve(eq, param=symbols("t", integer=True)): elif eq_type == "homogeneous_ternary_quadratic": x_0, y_0, z_0 = _diop_ternary_quadratic(var, coeff) - return _parametrize_ternary_quadratic((x_0, y_0, z_0), var, coeff) + return _parametrize_ternary_quadratic( + (x_0, y_0, z_0), var, coeff) + + elif eq_type == "homogeneous_ternary_quadratic_normal": + x_0, y_0, z_0 = _diop_ternary_quadratic_normal(var, coeff) + return _parametrize_ternary_quadratic( + (x_0, y_0, z_0), var, coeff) elif eq_type == "general_pythagorean": return _diop_general_pythagorean(var, coeff, param) elif eq_type == "univariate": - l = solve(eq) - s = set([]) - - for soln in l: - if isinstance(soln, Integer): - s.add((soln,)) - return s + return set([(int(i),) for i in solveset_real( + eq, var[0]).intersect(S.Integers)]) elif eq_type == "general_sum_of_squares": - return _diop_general_sum_of_squares(var, coeff) - - -def classify_diop(eq): - """ - Helper routine used by diop_solve() to find the type of the ``eq`` etc. - - Returns a tuple containing the type of the diophantine equation along with - the variables(free symbols) and their coefficients. Variables are returned - as a list and coefficients are returned as a dict with the key being the - respective term and the constant term is keyed to Integer(1). Type is an - element in the set {"linear", "binary_quadratic", "general_pythagorean", - "homogeneous_ternary_quadratic", "univariate", "general_sum_of_squares"} - - Usage - ===== - - ``classify_diop(eq)``: Return variables, coefficients and type of the - ``eq``. - - Details - ======= - - ``eq`` should be an expression which is assumed to be zero. + return _diop_general_sum_of_squares(var, -int(coeff[1]), limit=S.Infinity) + + elif eq_type == "general_sum_of_even_powers": + for k in coeff.keys(): + if k.is_Pow and coeff[k]: + p = k.exp + return _diop_general_sum_of_even_powers(var, p, -int(coeff[1]), limit=S.Infinity) + + if eq_type is not None and eq_type not in diop_known: + raise ValueError(filldedent(''' + Alhough this type of equation was identified, it is not yet + handled. It should, however, be listed in `diop_known` at the + top of this file. Developers should see comments at the end of + `classify_diop`. + ''')) # pragma: no cover + else: + raise NotImplementedError( + 'No solver has been written for %s.' % eq_type) - Examples - ======== - >>> from sympy.solvers.diophantine import classify_diop - >>> from sympy.abc import x, y, z, w, t - >>> classify_diop(4*x + 6*y - 4) - ([x, y], {1: -4, x: 4, y: 6}, 'linear') - >>> classify_diop(x + 3*y -4*z + 5) - ([x, y, z], {1: 5, x: 1, y: 3, z: -4}, 'linear') - >>> classify_diop(x**2 + y**2 - x*y + x + 5) - ([x, y], {1: 5, x: 1, x**2: 1, y: 0, y**2: 1, x*y: -1}, 'binary_quadratic') - """ +def classify_diop(eq, _dict=True): + # docstring supplied externally + try: + var = list(eq.free_symbols) + assert var + except (AttributeError, AssertionError): + raise ValueError('equation should have 1 or more free symbols') + var.sort(key=default_sort_key) eq = eq.expand(force=True) - coeff = eq.as_coefficients_dict() - diop_type = None - - var = [] - if isinstance(eq, Symbol): - var.append(eq) - coeff[eq] = Integer(1) - elif isinstance(eq, Mul) and Poly(eq).total_degree() == 1: - var.append(eq.as_two_terms()[1]) - coeff[eq.as_two_terms()[1]] = Integer(eq.as_two_terms()[0]) - else: - var = list(eq.free_symbols) - var.sort(key=default_sort_key) - coeff = dict([reversed(t.as_independent(*var)) for t in eq.args]) - - for c in coeff: - if not isinstance(coeff[c], Integer): - raise TypeError("Coefficients should be Integers") + if not all(_is_int(c) for c in coeff.values()): + raise TypeError("Coefficients should be Integers") - if Poly(eq).total_degree() == 1: + diop_type = None + total_degree = Poly(eq).total_degree() + homogeneous = 1 not in coeff + if total_degree == 1: diop_type = "linear" + elif len(var) == 1: diop_type = "univariate" - elif Poly(eq).total_degree() == 2 and len(var) == 2: + + elif total_degree == 2 and len(var) == 2: diop_type = "binary_quadratic" - x, y = var[:2] - if isinstance(eq, Mul): - coeff = {x**2: 0, x*y: eq.args[0], y**2: 0, x: 0, y: 0, Integer(1): 0} + elif total_degree == 2 and len(var) == 3 and homogeneous: + if set(coeff) & set(var): + diop_type = "inhomogeneous_ternary_quadratic" else: - for term in [x**2, y**2, x*y, x, y, Integer(1)]: - if term not in coeff.keys(): - coeff[term] = Integer(0) - - elif Poly(eq).total_degree() == 2 and len(var) == 3 and Integer(1) not in coeff.keys(): - for v in var: - if v in coeff.keys(): - diop_type = "inhomogeneous_ternary_quadratic" - break - else: - diop_type = "homogeneous_ternary_quadratic" + nonzero = [k for k in coeff if coeff[k]] + if len(nonzero) == 3 and all(i**2 in nonzero for i in var): + diop_type = "homogeneous_ternary_quadratic_normal" + else: + diop_type = "homogeneous_ternary_quadratic" - x, y, z = var[:3] + elif total_degree == 2 and len(var) >= 3: + if set(coeff) & set(var): + diop_type = "inhomogeneous_general_quadratic" + else: + # there may be Pow keys like x**2 or Mul keys like x*y + if any(k.is_Mul for k in coeff): # cross terms + if not homogeneous: + diop_type = "inhomogeneous_general_quadratic" + else: + diop_type = "homogeneous_general_quadratic" + else: # all squares: x**2 + y**2 + ... + constant + if all(coeff[k] == 1 for k in coeff if k != 1): + diop_type = "general_sum_of_squares" + elif all(is_square(abs(coeff[k])) for k in coeff): + if abs(sum(sign(coeff[k]) for k in coeff)) == \ + len(var) - 2: + # all but one has the same sign + # e.g. 4*x**2 + y**2 - 4*z**2 + diop_type = "general_pythagorean" - for term in [x**2, y**2, z**2, x*y, y*z, x*z]: - if term not in coeff.keys(): - coeff[term] = Integer(0) + elif total_degree == 3 and len(var) == 2: + diop_type = "cubic_thue" - elif Poly(eq).degree() == 2 and len(var) >= 3: + elif (total_degree > 3 and total_degree % 2 == 0 and + all(k.is_Pow for k in coeff if k != 1)): + if all(coeff[k] == 1 for k in coeff if k != 1): + diop_type = 'general_sum_of_even_powers' - for v in var: - if v in coeff.keys(): - diop_type = "inhomogeneous_general_quadratic" - break + if diop_type is not None: + return var, dict(coeff) if _dict else coeff, diop_type - else: - if Integer(1) in coeff.keys(): - constant_term = True - else: - constant_term = False + # new diop type instructions + # -------------------------- + # if this error raises and the equation *can* be classified, + # * it should be identified in the if-block above + # * the type should be added to the diop_known + # if a solver can be written for it, + # * a dedicated handler should be written (e.g. diop_linear) + # * it should be passed to that handler in diop_solve + raise NotImplementedError(filldedent(''' + This equation is not yet recognized or else has not been + simplified sufficiently to put it in a form recognized by + diop_classify().''')) - non_square_degree_2_terms = False - for v in var: - for u in var: - if u != v and u*v in coeff.keys(): - non_square_degree_2_terms = True - break - if non_square_degree_2_terms: - break - if constant_term and non_square_degree_2_terms: - diop_type = "inhomogeneous_general_quadratic" +classify_diop.func_doc = ''' + Helper routine used by diop_solve() to find information about ``eq``. - elif constant_term and not non_square_degree_2_terms: - for v in var: - if coeff[v**2] != 1: - break - else: - diop_type = "general_sum_of_squares" + Returns a tuple containing the type of the diophantine equation + along with the variables (free symbols) and their coefficients. + Variables are returned as a list and coefficients are returned + as a dict with the key being the respective term and the constant + term is keyed to 1. The type is one of the following: - elif not constant_term and non_square_degree_2_terms: - diop_type = "homogeneous_general_quadratic" + * %s - else: - coeff_sign_sum = 0 + Usage + ===== - for v in var: - if not isinstance(sqrt(abs(Integer(coeff[v**2]))), Integer): - break - coeff_sign_sum = coeff_sign_sum + sign(coeff[v**2]) - else: - if abs(coeff_sign_sum) == len(var) - 2 and not constant_term: - diop_type = "general_pythagorean" + ``classify_diop(eq)``: Return variables, coefficients and type of the + ``eq``. - elif Poly(eq).total_degree() == 3 and len(var) == 2: + Details + ======= - x, y = var[:2] - diop_type = "cubic_thue" + ``eq`` should be an expression which is assumed to be zero. + ``_dict`` is for internal use: when True (default) a dict is returned, + otherwise a defaultdict which supplies 0 for missing keys is returned. - for term in [x**3, x**2*y, x*y**2, y**3, Integer(1)]: - if term not in coeff.keys(): - coeff[term] == Integer(0) + Examples + ======== - if diop_type is not None: - return var, coeff, diop_type - else: - raise NotImplementedError("Still not implemented") + >>> from sympy.solvers.diophantine import classify_diop + >>> from sympy.abc import x, y, z, w, t + >>> classify_diop(4*x + 6*y - 4) + ([x, y], {1: -4, x: 4, y: 6}, 'linear') + >>> classify_diop(x + 3*y -4*z + 5) + ([x, y, z], {1: 5, x: 1, y: 3, z: -4}, 'linear') + >>> classify_diop(x**2 + y**2 - x*y + x + 5) + ([x, y], {1: 5, x: 1, x**2: 1, y**2: 1, x*y: -1}, 'binary_quadratic') + ''' % ('\n * '.join(sorted(diop_known))) def diop_linear(eq, param=symbols("t", integer=True)): @@ -372,14 +486,13 @@ def diop_linear(eq, param=symbols("t", integer=True)): >>> from sympy.solvers.diophantine import diop_linear >>> from sympy.abc import x, y, z, t - >>> from sympy import Integer - >>> diop_linear(2*x - 3*y - 5) #solves equation 2*x - 3*y -5 = 0 - (-3*t_0 - 5, -2*t_0 - 5) + >>> diop_linear(2*x - 3*y - 5) # solves equation 2*x - 3*y - 5 == 0 + (3*t_0 - 5, 2*t_0 - 5) Here x = -3*t_0 - 5 and y = -2*t_0 - 5 >>> diop_linear(2*x - 3*y - 4*z -3) - (t_0, -6*t_0 - 4*t_1 + 3, 5*t_0 + 3*t_1 - 3) + (t_0, 2*t_0 + 4*t_1 + 3, -t_0 - 3*t_1 - 3) See Also ======== @@ -387,7 +500,8 @@ def diop_linear(eq, param=symbols("t", integer=True)): diop_quadratic(), diop_ternary_quadratic(), diop_general_pythagorean(), diop_general_sum_of_squares() """ - var, coeff, diop_type = classify_diop(eq) + from sympy.core.function import count_ops + var, coeff, diop_type = classify_diop(eq, _dict=False) if diop_type == "linear": return _diop_linear(var, coeff, param) @@ -401,32 +515,29 @@ def _diop_linear(var, coeff, param): Note that no solution exists if gcd(a_0, ..., a_n) doesn't divide c. """ - if len(var) == 0: - return None - if Integer(1) in coeff: - #coeff[] is negated because input is of the form: ax + by + c == 0 - # but is used as: ax + by == -c - c = -coeff[Integer(1)] + if 1 in coeff: + # negate coeff[] because input is of the form: ax + by + c == 0 + # but is used as: ax + by == -c + c = -coeff[1] else: c = 0 # Some solutions will have multiple free variables in their solutions. - params = [str(param) + "_" + str(i) for i in range(len(var))] - params = [symbols(p, integer=True) for p in params] + if param is None: + params = [symbols('t')]*len(var) + else: + temp = str(param) + "_%i" + params = [symbols(temp % i, integer=True) for i in range(len(var))] if len(var) == 1: - if coeff[var[0]] == 0: - if c == 0: - return tuple([params[0]]) - else: - return tuple([None]) - elif divisible(c, coeff[var[0]]): - return tuple([c/coeff[var[0]]]) + q, r = divmod(c, coeff[var[0]]) + if not r: + return (q,) else: - return tuple([None]) + return (None,) - """ + ''' base_solution_linear() can solve diophantine equations of the form: a*x + b*y == c @@ -466,7 +577,7 @@ def _diop_linear(var, coeff, param): The arrays A and B are the arrays of integers used for 'a' and 'b' in each of the n-1 bivariate equations we solve. - """ + ''' A = [coeff[v] for v in var] B = [] @@ -481,7 +592,7 @@ def _diop_linear(var, coeff, param): B.insert(0, gcd) B.append(A[-1]) - """ + ''' Consider the trivariate linear equation: 4*x_0 + 6*x_1 + 3*x_2 == 2 @@ -532,64 +643,51 @@ def _diop_linear(var, coeff, param): This method is generalised for many variables, below. - """ + ''' solutions = [] - no_solution = tuple([None] * len(var)) for i in range(len(B)): - tot_x, tot_y = 0, 0 + tot_x, tot_y = [], [] - if isinstance(c, Add): - # example: 5 + t_0 + 3*t_1 - args = c.args - else: # c is a Mul, a Symbol, or an Integer - args = [c] - - for j in range(len(args)): - if isinstance(args[j], Mul): + for j, arg in enumerate(Add.make_args(c)): + if arg.is_Integer: + # example: 5 -> k = 5 + k, p = arg, S.One + pnew = params[0] + else: # arg is a Mul or Symbol # example: 3*t_1 -> k = 3 - k = args[j].as_two_terms()[0] - param_index = params.index(args[j].as_two_terms()[1]) + 1 - elif isinstance(args[j], Symbol): # example: t_0 -> k = 1 - k = 1 - param_index = params.index(args[j]) + 1 - else: #args[j] is an Integer - # example: 5 -> k = 5 - k = args[j] - param_index = 0 + k, p = arg.as_coeff_Mul() + pnew = params[params.index(p) + 1] - sol_x, sol_y = base_solution_linear(k, A[i], B[i], params[param_index]) - if isinstance(args[j], Mul) or isinstance(args[j], Symbol): - if isinstance(sol_x, Add): - sol_x = sol_x.args[0]*params[param_index - 1] + sol_x.args[1] - elif isinstance(sol_x, Integer): - sol_x = sol_x*params[param_index - 1] - - if isinstance(sol_y, Add): - sol_y = sol_y.args[0]*params[param_index - 1] + sol_y.args[1] - elif isinstance(sol_y, Integer): - sol_y = sol_y*params[param_index - 1] + sol = sol_x, sol_y = base_solution_linear(k, A[i], B[i], pnew) + if p is S.One: + if None in sol: + return tuple([None]*len(var)) else: - if sol_x is None or sol_y is None: - return no_solution - - tot_x += sol_x - tot_y += sol_y + # convert a + b*pnew -> a*p + b*pnew + if isinstance(sol_x, Add): + sol_x = sol_x.args[0]*p + sol_x.args[1] + if isinstance(sol_y, Add): + sol_y = sol_y.args[0]*p + sol_y.args[1] - solutions.append(tot_x) - c = tot_y + tot_x.append(sol_x) + tot_y.append(sol_y) - solutions.append(tot_y) + solutions.append(Add(*tot_x)) + c = Add(*tot_y) + solutions.append(c) + if param is None: + # just keep the additive constant (i.e. replace t with 0) + solutions = [i.as_coeff_Add()[0] for i in solutions] return tuple(solutions) def base_solution_linear(c, a, b, t=None): """ - Return the base solution for a linear diophantine equation with two - variables. + Return the base solution for the linear equation, `ax + by = c`. Used by ``diop_linear()`` to find the base solution of a linear Diophantine equation. If ``t`` is given then the parametrized solution is @@ -615,70 +713,37 @@ def base_solution_linear(c, a, b, t=None): >>> base_solution_linear(0, 5, 7, t) # equation 5*x + 7*y = 0 (7*t, -5*t) """ - d = igcd(a, igcd(b, c)) - a = a // d - b = b // d - c = c // d + a, b, c = _remove_gcd(a, b, c) if c == 0: - if t != None: + if t is not None: + if b < 0: + t = -t return (b*t , -a*t) else: - return (S.Zero, S.Zero) + return (0, 0) else: - x0, y0, d = extended_euclid(int(abs(a)), int(abs(b))) + x0, y0, d = igcdex(abs(a), abs(b)) - x0 = x0 * sign(a) - y0 = y0 * sign(b) + x0 *= sign(a) + y0 *= sign(b) if divisible(c, d): - if t != None: + if t is not None: + if b < 0: + t = -t return (c*x0 + b*t, c*y0 - a*t) else: - return (Integer(c*x0), Integer(c*y0)) + return (c*x0, c*y0) else: return (None, None) -def extended_euclid(a, b): - """ - For given ``a``, ``b`` returns a tuple containing integers `x`, `y` and `d` - such that `ax + by = d`. Here `d = gcd(a, b)`. - - Usage - ===== - - ``extended_euclid(a, b)``: returns `x`, `y` and `\gcd(a, b)`. - - Details - ======= - - ``a`` Any instance of Integer. - ``b`` Any instance of Integer. - - Examples - ======== - - >>> from sympy.solvers.diophantine import extended_euclid - >>> extended_euclid(4, 6) - (-1, 1, 2) - >>> extended_euclid(3, 5) - (2, -1, 1) - """ - if b == 0: - return (1, 0, a) - - x0, y0, d = extended_euclid(b, a%b) - x, y = y0, x0 - (a//b) * y0 - - return x, y, d - - def divisible(a, b): """ Returns `True` if ``a`` is divisible by ``b`` and `False` otherwise. """ - return igcd(int(a), int(b)) == abs(int(b)) + return not a % b def diop_quadratic(eq, param=symbols("t", integer=True)): @@ -713,7 +778,7 @@ def diop_quadratic(eq, param=symbols("t", integer=True)): References ========== - .. [1] Methods to solve Ax^2 + Bxy + Cy^2 + Dx + Ey + F = 0,[online], + .. [1] Methods to solve Ax^2 + Bxy + Cy^2 + Dx + Ey + F = 0, [online], Available: http://www.alpertron.com.ar/METHODS.HTM .. [2] Solving the equation ax^2+ bxy + cy^2 + dx + ey + f= 0, [online], Available: http://www.jpr2718.org/ax2p.pdf @@ -724,7 +789,7 @@ def diop_quadratic(eq, param=symbols("t", integer=True)): diop_linear(), diop_ternary_quadratic(), diop_general_sum_of_squares(), diop_general_pythagorean() """ - var, coeff, diop_type = classify_diop(eq) + var, coeff, diop_type = classify_diop(eq, _dict=False) if diop_type == "binary_quadratic": return _diop_quadratic(var, coeff, param) @@ -732,134 +797,121 @@ def diop_quadratic(eq, param=symbols("t", integer=True)): def _diop_quadratic(var, coeff, t): - x, y = var[:2] - - for term in [x**2, y**2, x*y, x, y, Integer(1)]: - if term not in coeff.keys(): - coeff[term] = Integer(0) + x, y = var A = coeff[x**2] B = coeff[x*y] C = coeff[y**2] D = coeff[x] E = coeff[y] - F = coeff[Integer(1)] + F = coeff[1] - d = igcd(A, igcd(B, igcd(C, igcd(D, igcd(E, F))))) - A = A // d - B = B // d - C = C // d - D = D // d - E = E // d - F = F // d + A, B, C, D, E, F = [as_int(i) for i in _remove_gcd(A, B, C, D, E, F)] - # (1) Linear case: A = B = C = 0 ==> considered under linear diophantine equations - - # (2) Simple-Hyperbolic case:A = C = 0, B != 0 + # (1) Simple-Hyperbolic case: A = C = 0, B != 0 # In this case equation can be converted to (Bx + E)(By + D) = DE - BF # We consider two cases; DE - BF = 0 and DE - BF != 0 # More details, http://www.alpertron.com.ar/METHODS.HTM#SHyperb - l = set([]) - + sol = set([]) + discr = B**2 - 4*A*C if A == 0 and C == 0 and B != 0: if D*E - B*F == 0: - if divisible(int(E), int(B)): - l.add((-E/B, t)) - if divisible(int(D), int(B)): - l.add((t, -D/B)) - + q, r = divmod(E, B) + if not r: + sol.add((-q, t)) + q, r = divmod(D, B) + if not r: + sol.add((t, -q)) else: div = divisors(D*E - B*F) div = div + [-term for term in div] - for d in div: - if divisible(int(d - E), int(B)): - x0 = (d - E) // B - if divisible(int(D*E - B*F), int(d)): - if divisible(int((D*E - B*F)// d - D), int(B)): - y0 = ((D*E - B*F) // d - D) // B - l.add((x0, y0)) - - # (3) Parabolic case: B**2 - 4*A*C = 0 + x0, r = divmod(d - E, B) + if not r: + q, r = divmod(D*E - B*F, d) + if not r: + y0, r = divmod(q - D, B) + if not r: + sol.add((x0, y0)) + + # (2) Parabolic case: B**2 - 4*A*C = 0 # There are two subcases to be considered in this case. # sqrt(c)D - sqrt(a)E = 0 and sqrt(c)D - sqrt(a)E != 0 # More Details, http://www.alpertron.com.ar/METHODS.HTM#Parabol - elif B**2 - 4*A*C == 0: + elif discr == 0: if A == 0: s = _diop_quadratic([y, x], coeff, t) for soln in s: - l.add((soln[1], soln[0])) + sol.add((soln[1], soln[0])) else: - g = igcd(A, C) - g = abs(g) * sign(A) + g = sign(A)*igcd(A, C) a = A // g b = B // g c = C // g e = sign(B/A) - if e*sqrt(c)*D - sqrt(a)*E == 0: + _c = e*sqrt(c)*D - sqrt(a)*E + if _c == 0: z = symbols("z", real=True) - roots = solve(sqrt(a)*g*z**2 + D*z + sqrt(a)*F) + eq = sqrt(a)*g*z**2 + D*z + sqrt(a)*F + roots = solveset_real(eq, z).intersect(S.Integers) for root in roots: - if isinstance(root, Integer): - l.add((diop_solve(sqrt(a)*x + e*sqrt(c)*y - root)[0], diop_solve(sqrt(a)*x + e*sqrt(c)*y - root)[1])) + ans = diop_solve(sqrt(a)*x + e*sqrt(c)*y - root) + sol.add((ans[0], ans[1])) - elif isinstance(e*sqrt(c)*D - sqrt(a)*E, Integer): - solve_x = lambda u: e*sqrt(c)*g*(sqrt(a)*E - e*sqrt(c)*D)*t**2 - (E + 2*e*sqrt(c)*g*u)*t\ - - (e*sqrt(c)*g*u**2 + E*u + e*sqrt(c)*F) // (e*sqrt(c)*D - sqrt(a)*E) + elif _is_int(_c): + solve_x = lambda u: -e*sqrt(c)*g*_c*t**2 - (E + 2*e*sqrt(c)*g*u)*t\ + - (e*sqrt(c)*g*u**2 + E*u + e*sqrt(c)*F) // _c - solve_y = lambda u: sqrt(a)*g*(e*sqrt(c)*D - sqrt(a)*E)*t**2 + (D + 2*sqrt(a)*g*u)*t \ - + (sqrt(a)*g*u**2 + D*u + sqrt(a)*F) // (e*sqrt(c)*D - sqrt(a)*E) + solve_y = lambda u: sqrt(a)*g*_c*t**2 + (D + 2*sqrt(a)*g*u)*t \ + + (sqrt(a)*g*u**2 + D*u + sqrt(a)*F) // _c - for z0 in range(0, abs(e*sqrt(c)*D - sqrt(a)*E)): - if divisible(sqrt(a)*g*z0**2 + D*z0 + sqrt(a)*F, e*sqrt(c)*D - sqrt(a)*E): - l.add((solve_x(z0), solve_y(z0))) + for z0 in range(0, abs(_c)): + if divisible( + sqrt(a)*g*z0**2 + D*z0 + sqrt(a)*F, + _c): + sol.add((solve_x(z0), solve_y(z0))) - # (4) Method used when B**2 - 4*A*C is a square, is descibed in p. 6 of the below paper + # (3) Method used when B**2 - 4*A*C is a square, is described in p. 6 of the below paper # by John P. Robertson. # http://www.jpr2718.org/ax2p.pdf - elif isinstance(sqrt(B**2 - 4*A*C), Integer): + elif is_square(discr): if A != 0: - r = sqrt(B**2 - 4*A*C) + r = sqrt(discr) u, v = symbols("u, v", integer=True) - eq = _mexpand(4*A*r*u*v + 4*A*D*(B*v + r*u + r*v - B*u) + 2*A*4*A*E*(u - v) + 4*A*r*4*A*F) + eq = _mexpand( + 4*A*r*u*v + 4*A*D*(B*v + r*u + r*v - B*u) + + 2*A*4*A*E*(u - v) + 4*A*r*4*A*F) - sol = diop_solve(eq, t) - sol = list(sol) + solution = diop_solve(eq, t) - for solution in sol: - s0 = solution[0] - t0 = solution[1] + for s0, t0 in solution: - x_0 = S(B*t0 + r*s0 + r*t0 - B*s0)/(4*A*r) + num = B*t0 + r*s0 + r*t0 - B*s0 + x_0 = S(num)/(4*A*r) y_0 = S(s0 - t0)/(2*r) - if isinstance(s0, Symbol) or isinstance(t0, Symbol): if check_param(x_0, y_0, 4*A*r, t) != (None, None): - l.add((check_param(x_0, y_0, 4*A*r, t)[0], check_param(x_0, y_0, 4*A*r, t)[1])) + ans = check_param(x_0, y_0, 4*A*r, t) + sol.add((ans[0], ans[1])) + elif x_0.is_Integer and y_0.is_Integer: + if is_solution_quad(var, coeff, x_0, y_0): + sol.add((x_0, y_0)) - elif divisible(B*t0 + r*s0 + r*t0 - B*s0, 4*A*r): - if divisible(s0 - t0, 2*r): - if is_solution_quad(var, coeff, x_0, y_0): - l.add((x_0, y_0)) else: - _var = var - _var[0], _var[1] = _var[1], _var[0] # Interchange x and y - s = _diop_quadratic(_var, coeff, t) - - while len(s) > 0: - sol = s.pop() - l.add((sol[1], sol[0])) + s = _diop_quadratic(var[::-1], coeff, t) # Interchange x and y + while s: # | + sol.add(s.pop()[::-1]) # and solution <--------+ - # (5) B**2 - 4*A*C > 0 and B**2 - 4*A*C not a square or B**2 - 4*A*C < 0 + # (4) B**2 - 4*A*C > 0 and B**2 - 4*A*C not a square or B**2 - 4*A*C < 0 else: @@ -871,42 +923,34 @@ def _diop_quadratic(var, coeff, t): for solution in solns_pell: for X_i in [-solution[0], solution[0]]: for Y_i in [-solution[1], solution[1]]: - x_i, y_i = (P*Matrix([X_i, Y_i]) + Q)[0], (P*Matrix([X_i, Y_i]) + Q)[1] - if isinstance(x_i, Integer) and isinstance(y_i, Integer): - l.add((x_i, y_i)) + s = P*Matrix([X_i, Y_i]) + Q + try: + sol.add(tuple([as_int(_) for _ in s])) + except ValueError: + pass else: # In this case equation can be transformed into a Pell equation - #n = symbols("n", integer=True) - fund_solns = solns_pell - solns_pell = set(fund_solns) - for X, Y in fund_solns: + solns_pell = set(solns_pell) + for X, Y in list(solns_pell): solns_pell.add((-X, -Y)) a = diop_DN(D, 1) T = a[0][0] U = a[0][1] - if (isinstance(P[0], Integer) and isinstance(P[1], Integer) and isinstance(P[2], Integer) - and isinstance(P[3], Integer) and isinstance(Q[0], Integer) and isinstance(Q[1], Integer)): - - for sol in solns_pell: - - r = sol[0] - s = sol[1] - x_n = S((r + s*sqrt(D))*(T + U*sqrt(D))**t + (r - s*sqrt(D))*(T - U*sqrt(D))**t)/2 - y_n = S((r + s*sqrt(D))*(T + U*sqrt(D))**t - (r - s*sqrt(D))*(T - U*sqrt(D))**t)/(2*sqrt(D)) - - x_n = _mexpand(x_n) - y_n = _mexpand(y_n) - x_n, y_n = (P*Matrix([x_n, y_n]) + Q)[0], (P*Matrix([x_n, y_n]) + Q)[1] - - l.add((x_n, y_n)) + if all(_is_int(_) for _ in P[:4] + Q[:2]): + for r, s in solns_pell: + _a = (r + s*sqrt(D))*(T + U*sqrt(D))**t + _b = (r - s*sqrt(D))*(T - U*sqrt(D))**t + x_n = _mexpand(S(_a + _b)/2) + y_n = _mexpand(S(_a - _b)/(2*sqrt(D))) + s = P*Matrix([x_n, y_n]) + Q + sol.add(tuple(s)) else: - L = ilcm(S(P[0]).q, ilcm(S(P[1]).q, ilcm(S(P[2]).q, - ilcm(S(P[3]).q, ilcm(S(Q[0]).q, S(Q[1]).q))))) + L = ilcm(*[_.q for _ in P[:4] + Q[:2]]) k = 1 @@ -920,21 +964,18 @@ def _diop_quadratic(var, coeff, t): for X, Y in solns_pell: for i in range(k): - Z = P*Matrix([X, Y]) + Q - x, y = Z[0], Z[1] - - if isinstance(x, Integer) and isinstance(y, Integer): - Xt = S((X + sqrt(D)*Y)*(T_k + sqrt(D)*U_k)**t + - (X - sqrt(D)*Y)*(T_k - sqrt(D)*U_k)**t)/ 2 - Yt = S((X + sqrt(D)*Y)*(T_k + sqrt(D)*U_k)**t - - (X - sqrt(D)*Y)*(T_k - sqrt(D)*U_k)**t)/ (2*sqrt(D)) - Zt = P*Matrix([Xt, Yt]) + Q - l.add((Zt[0], Zt[1])) + if all(_is_int(_) for _ in P*Matrix([X, Y]) + Q): + _a = (X + sqrt(D)*Y)*(T_k + sqrt(D)*U_k)**t + _b = (X - sqrt(D)*Y)*(T_k - sqrt(D)*U_k)**t + Xt = S(_a + _b)/2 + Yt = S(_a - _b)/(2*sqrt(D)) + s = P*Matrix([Xt, Yt]) + Q + sol.add(tuple(s)) X, Y = X*T + D*U*Y, X*U + Y*T - return l + return sol def is_solution_quad(var, coeff, u, v): @@ -945,24 +986,22 @@ def is_solution_quad(var, coeff, u, v): Not intended for use by normal users. """ - x, y = var[:2] - - eq = x**2*coeff[x**2] + x*y*coeff[x*y] + y**2*coeff[y**2] + x*coeff[x] + y*coeff[y] + coeff[Integer(1)] - - return _mexpand(Subs(eq, (x, y), (u, v)).doit()) == 0 + reps = dict(zip(var, (u, v))) + eq = Add(*[j*i.xreplace(reps) for i, j in coeff.items()]) + return _mexpand(eq) == 0 def diop_DN(D, N, t=symbols("t", integer=True)): """ Solves the equation `x^2 - Dy^2 = N`. - Mainly concerned in the case `D > 0, D` is not a perfect square, which is - the same as generalized Pell equation. To solve the generalized Pell - equation this function Uses LMM algorithm. Refer [1]_ for more details on - the algorithm. - Returns one solution for each class of the solutions. Other solutions of - the class can be constructed according to the values of ``D`` and ``N``. - Returns a list containing the solution tuples `(x, y)`. + Mainly concerned with the case `D > 0, D` is not a perfect square, + which is the same as the generalized Pell equation. The LMM + algorithm [1]_ is used to solve this equation. + + Returns one solution tuple, (`x, y)` for each class of the solutions. + Other solutions of the class can be constructed according to the + values of ``D`` and ``N``. Usage ===== @@ -1005,52 +1044,55 @@ def diop_DN(D, N, t=symbols("t", integer=True)): """ if D < 0: if N == 0: - return [(S.Zero, S.Zero)] + return [(0, 0)] elif N < 0: return [] elif N > 0: - d = divisors(square_factor(N)) sol = [] - - for divisor in d: - sols = cornacchia(1, -D, N // divisor**2) + for d in divisors(square_factor(N)): + sols = cornacchia(1, -D, N // d**2) if sols: for x, y in sols: - sol.append((divisor*x, divisor*y)) + sol.append((d*x, d*y)) return sol elif D == 0: - if N < 0 or not isinstance(sqrt(N), Integer): + if N < 0: return [] if N == 0: - return [(S.Zero, t)] - if isinstance(sqrt(N), Integer): - return [(sqrt(N), t)] - - else: # D > 0 - if isinstance(sqrt(D), Integer): - r = sqrt(D) + return [(0, t)] + sN, _exact = integer_nthroot(N, 2) + if _exact: + return [(sN, t)] + else: + return [] + else: # D > 0 + sD, _exact = integer_nthroot(D, 2) + if _exact: if N == 0: - return [(r*t, t)] + return [(sD*t, t)] else: sol = [] - for y in range(floor(sign(N)*(N - 1)/(2*r)) + 1): - if isinstance(sqrt(D*y**2 + N), Integer): - sol.append((sqrt(D*y**2 + N), y)) + for y in range(floor(sign(N)*(N - 1)/(2*sD)) + 1): + try: + sq, _exact = integer_nthroot(D*y**2 + N, 2) + except ValueError: + _exact = False + if _exact: + sol.append((sq, y)) return sol else: if N == 0: - return [(S.Zero, S.Zero)] + return [(0, 0)] elif abs(N) == 1: pqa = PQa(0, 1, D) - a_0 = floor(sqrt(D)) - l = 0 + j = 0 G = [] B = [] @@ -1060,29 +1102,29 @@ def diop_DN(D, N, t=symbols("t", integer=True)): G.append(i[5]) B.append(i[4]) - if l != 0 and a == 2*a_0: + if j != 0 and a == 2*sD: break - l = l + 1 + j = j + 1 - if l % 2 == 1: + if _odd(j): if N == -1: - x = G[l-1] - y = B[l-1] + x = G[j - 1] + y = B[j - 1] else: - count = l - while count < 2*l - 1: + count = j + while count < 2*j - 1: i = next(pqa) G.append(i[5]) B.append(i[4]) - count = count + 1 + count += 1 x = G[count] y = B[count] else: if N == 1: - x = G[l-1] - y = B[l-1] + x = G[j - 1] + y = B[j - 1] else: return [] @@ -1100,18 +1142,17 @@ def diop_DN(D, N, t=symbols("t", integer=True)): for f in fs: m = N // f**2 - zs = sqrt_mod(D, abs(m), True) + zs = sqrt_mod(D, abs(m), all_roots=True) zs = [i for i in zs if i <= abs(m) // 2 ] + if abs(m) != 2: - zs = zs + [-i for i in zs] - if S.Zero in zs: - zs.remove(S.Zero) # Remove duplicate zero + zs = zs + [-i for i in zs if i] # omit dupl 0 for z in zs: pqa = PQa(z, abs(m), D) - l = 0 + j = 0 G = [] B = [] @@ -1121,9 +1162,9 @@ def diop_DN(D, N, t=symbols("t", integer=True)): G.append(i[5]) B.append(i[4]) - if l != 0 and abs(i[1]) == 1: - r = G[l-1] - s = B[l-1] + if j != 0 and abs(i[1]) == 1: + r = G[j-1] + s = B[j-1] if r**2 - D*s**2 == m: sol.append((f*r, f*s)) @@ -1134,8 +1175,8 @@ def diop_DN(D, N, t=symbols("t", integer=True)): break - l = l + 1 - if l == length(z, abs(m), D): + j = j + 1 + if j == length(z, abs(m), D): break return sol @@ -1148,8 +1189,8 @@ def cornacchia(a, b, m): Uses the algorithm due to Cornacchia. The method only finds primitive solutions, i.e. ones with `\gcd(x, y) = 1`. So this method can't be used to find the solutions of `x^2 + y^2 = 20` since the only solution to former is - `(x,y) = (4, 2)` and it is not primitive. When ` a = b = 1`, only the - solutions with `x \geq y` are found. For more details, see the References. + `(x, y) = (4, 2)` and it is not primitive. When `a = b`, only the + solutions with `x \leq y` are found. For more details, see the References. Examples ======== @@ -1158,7 +1199,7 @@ def cornacchia(a, b, m): >>> cornacchia(2, 3, 35) # equation 2x**2 + 3y**2 = 35 set([(2, 3), (4, 1)]) >>> cornacchia(1, 1, 25) # equation x**2 + y**2 = 25 - set([(4, 3)]) + set([(3, 4)]) References =========== @@ -1167,18 +1208,18 @@ def cornacchia(a, b, m): .. [2] Solving the diophantine equation ax**2 + by**2 = m by Cornacchia's method, [online], Available: http://www.numbertheory.org/php/cornacchia.html + + See Also + ======== + sympy.utilities.iterables.signed_permutations """ - sols = set([]) + sols = set() a1 = igcdex(a, m)[0] - v = sqrt_mod(-b*a1, m, True) - - if v is None: + v = sqrt_mod(-b*a1, m, all_roots=True) + if not v: return None - if not isinstance(v, list): - v = [v] - for t in v: if t < m // 2: continue @@ -1194,8 +1235,10 @@ def cornacchia(a, b, m): if m1 % b == 0: m1 = m1 // b - if isinstance(sqrt(m1), Integer): - s = sqrt(m1) + s, _exact = integer_nthroot(m1, 2) + if _exact: + if a == b and r > s: + r, s = s, r sols.add((int(r), int(s))) return sols @@ -1303,6 +1346,9 @@ def diop_bf_DN(D, N, t=symbols("t", integer=True)): .. [1] Solving the generalized Pell equation x**2 - D*y**2 = N, John P. Robertson, July 31, 2004, Page 15. http://www.jpr2718.org/pell.pdf """ + D = as_int(D) + N = as_int(N) + sol = [] a = diop_DN(D, 1) u = a[0][0] @@ -1314,27 +1360,33 @@ def diop_bf_DN(D, N, t=symbols("t", integer=True)): elif N > 1: L1 = 0 - L2 = floor(sqrt(S(N*(u - 1))/(2*D))) + 1 + L2 = integer_nthroot(int(N*(u - 1)/(2*D)), 2)[0] + 1 elif N < -1: - L1 = ceiling(sqrt(S(-N)/D)) - L2 = floor(sqrt(S(-N*(u + 1))/(2*D))) + 1 + L1, _exact = integer_nthroot(-int(N/D), 2) + if not _exact: + L1 += 1 + L2 = integer_nthroot(-int(N*(u + 1)/(2*D)), 2)[0] + 1 - else: + else: # N = 0 if D < 0: - return [(S.Zero, S.Zero)] + return [(0, 0)] elif D == 0: - return [(S.Zero, t)] + return [(0, t)] else: - if isinstance(sqrt(D), Integer): - return [(sqrt(D)*t, t), (-sqrt(D)*t, t)] + sD, _exact = integer_nthroot(D, 2) + if _exact: + return [(sD*t, t), (-sD*t, t)] else: - return [(S.Zero, S.Zero)] + return [(0, 0)] for y in range(L1, L2): - if isinstance(sqrt(N + D*y**2), Integer): - x = sqrt(N + D*y**2) + try: + x, _exact = integer_nthroot(N + D*y**2, 2) + except ValueError: + _exact = False + if _exact: sol.append((x, y)) if not equivalent(x, y, -x, y, D, N): sol.append((-x, y)) @@ -1384,7 +1436,7 @@ def length(P, Q, D): continued fraction representation of `\\frac{P + \sqrt{D}}{Q}`. It is important to remember that this does NOT return the length of the - periodic part but the addition of the legths of the two parts as mentioned + periodic part but the sum of the legths of the two parts as mentioned above. Usage @@ -1406,35 +1458,21 @@ def length(P, Q, D): >>> length(-2 , 4, 5) # (-2 + sqrt(5))/4 3 >>> length(-5, 4, 17) # (-5 + sqrt(17))/4 - 4 - """ - x = P + sqrt(D) - y = Q - - x = sympify(x) - v, res = [], [] - q = x/y - - if q < 0: - v.append(q) - res.append(floor(q)) - q = q - floor(q) - num, den = rad_rationalize(1, q) - q = num / den - - while 1: - v.append(q) - a = int(q) - res.append(a) - - if q == a: - return len(res) - - num, den = rad_rationalize(1,(q - a)) - q = num / den + 5 - if q in v: - return len(res) + See Also + ======== + sympy.ntheory.continued_fraction.continued_fraction_periodic + """ + from sympy.ntheory.continued_fraction import continued_fraction_periodic + v = continued_fraction_periodic(P, Q, D) + if type(v[-1]) is list: + rpt = len(v[-1]) + nonrpt = len(v) - 1 + else: + rpt = 0 + nonrpt = len(v) + return rpt + nonrpt def transformation_to_DN(eq): @@ -1477,7 +1515,7 @@ def transformation_to_DN(eq): will give an equation of the form `x^2 - Dy^2 = N`. >>> from sympy.abc import X, Y - >>> from sympy import Matrix, simplify, Subs + >>> from sympy import Matrix, simplify >>> u = (A*Matrix([X, Y]) + B)[0] # Transformation for x >>> u X/26 + 3*Y/26 - 6/13 @@ -1488,7 +1526,7 @@ def transformation_to_DN(eq): Next we will substitute these formulas for `x` and `y` and do ``simplify()``. - >>> eq = simplify(Subs(x**2 - 3*x*y - y**2 - 2*y + 1, (x, y), (u, v)).doit()) + >>> eq = simplify((x**2 - 3*x*y - y**2 - 2*y + 1).subs(zip((x, y), (u, v)))) >>> eq X**2/676 - Y**2/52 + 17/13 @@ -1513,65 +1551,52 @@ def transformation_to_DN(eq): http://www.jpr2718.org/ax2p.pdf """ - - var, coeff, diop_type = classify_diop(eq) + var, coeff, diop_type = classify_diop(eq, _dict=False) if diop_type == "binary_quadratic": return _transformation_to_DN(var, coeff) def _transformation_to_DN(var, coeff): - x, y = var[:2] + x, y = var a = coeff[x**2] b = coeff[x*y] c = coeff[y**2] d = coeff[x] e = coeff[y] - f = coeff[Integer(1)] + f = coeff[1] - g = igcd(a, igcd(b, igcd(c, igcd(d, igcd(e, f))))) - a = a // g - b = b // g - c = c // g - d = d // g - e = e // g - f = f // g + a, b, c, d, e, f = [as_int(i) for i in _remove_gcd(a, b, c, d, e, f)] X, Y = symbols("X, Y", integer=True) - if b != Integer(0): - B = (S(2*a)/b).p - C = (S(2*a)/b).q - A = (S(a)/B**2).p - T = (S(a)/B**2).q + if b: + B, C = _rational_pq(2*a, b) + A, T = _rational_pq(a, B**2) # eq_1 = A*B*X**2 + B*(c*T - A*C**2)*Y**2 + d*T*X + (B*e*T - d*T*C)*Y + f*T*B - coeff = {X**2: A*B, X*Y: 0, Y**2: B*(c*T - A*C**2), X: d*T, Y: B*e*T - d*T*C, Integer(1): f*T*B} + coeff = {X**2: A*B, X*Y: 0, Y**2: B*(c*T - A*C**2), X: d*T, Y: B*e*T - d*T*C, 1: f*T*B} A_0, B_0 = _transformation_to_DN([X, Y], coeff) return Matrix(2, 2, [S(1)/B, -S(C)/B, 0, 1])*A_0, Matrix(2, 2, [S(1)/B, -S(C)/B, 0, 1])*B_0 else: - if d != Integer(0): - B = (S(2*a)/d).p - C = (S(2*a)/d).q - A = (S(a)/B**2).p - T = (S(a)/B**2).q + if d: + B, C = _rational_pq(2*a, d) + A, T = _rational_pq(a, B**2) # eq_2 = A*X**2 + c*T*Y**2 + e*T*Y + f*T - A*C**2 - coeff = {X**2: A, X*Y: 0, Y**2: c*T, X: 0, Y: e*T, Integer(1): f*T - A*C**2} + coeff = {X**2: A, X*Y: 0, Y**2: c*T, X: 0, Y: e*T, 1: f*T - A*C**2} A_0, B_0 = _transformation_to_DN([X, Y], coeff) return Matrix(2, 2, [S(1)/B, 0, 0, 1])*A_0, Matrix(2, 2, [S(1)/B, 0, 0, 1])*B_0 + Matrix([-S(C)/B, 0]) else: - if e != Integer(0): - B = (S(2*c)/e).p - C = (S(2*c)/e).q - A = (S(c)/B**2).p - T = (S(c)/B**2).q + if e: + B, C = _rational_pq(2*c, e) + A, T = _rational_pq(c, B**2) # eq_3 = a*T*X**2 + A*Y**2 + f*T - A*C**2 - coeff = {X**2: a*T, X*Y: 0, Y**2: A, X: 0, Y: 0, Integer(1): f*T - A*C**2} + coeff = {X**2: a*T, X*Y: 0, Y**2: A, X: 0, Y: 0, 1: f*T - A*C**2} A_0, B_0 = _transformation_to_DN([X, Y], coeff) return Matrix(2, 2, [1, 0, 0, S(1)/B])*A_0, Matrix(2, 2, [1, 0, 0, S(1)/B])*B_0 + Matrix([0, -S(C)/B]) @@ -1621,81 +1646,54 @@ def find_DN(eq): John P.Robertson, May 8, 2003, Page 7 - 11. http://www.jpr2718.org/ax2p.pdf """ - var, coeff, diop_type = classify_diop(eq) + var, coeff, diop_type = classify_diop(eq, _dict=False) if diop_type == "binary_quadratic": return _find_DN(var, coeff) def _find_DN(var, coeff): - x, y = var[:2] + x, y = var X, Y = symbols("X, Y", integer=True) - A , B = _transformation_to_DN(var, coeff) + A, B = _transformation_to_DN(var, coeff) u = (A*Matrix([X, Y]) + B)[0] v = (A*Matrix([X, Y]) + B)[1] - eq = x**2*coeff[x**2] + x*y*coeff[x*y] + y**2*coeff[y**2] + x*coeff[x] + y*coeff[y] + coeff[Integer(1)] - - simplified = _mexpand(Subs(eq, (x, y), (u, v)).doit()) + eq = x**2*coeff[x**2] + x*y*coeff[x*y] + y**2*coeff[y**2] + x*coeff[x] + y*coeff[y] + coeff[1] - coeff = dict([reversed(t.as_independent(*[X, Y])) for t in simplified.args]) + simplified = _mexpand(eq.subs(zip((x, y), (u, v)))) - for term in [X**2, Y**2, Integer(1)]: - if term not in coeff.keys(): - coeff[term] = Integer(0) + coeff = simplified.as_coefficients_dict() - return -coeff[Y**2]/coeff[X**2], -coeff[Integer(1)]/coeff[X**2] + return -coeff[Y**2]/coeff[X**2], -coeff[1]/coeff[X**2] def check_param(x, y, a, t): """ - Check if there is a number modulo ``a`` such that ``x`` and ``y`` are both - integers. If exist, then find a parametric representation for ``x`` and - ``y``. + If there is a number modulo ``a`` such that ``x`` and ``y`` are both + integers, then return a parametric representation for ``x`` and ``y`` + else return (None, None). Here ``x`` and ``y`` are functions of ``t``. """ - k, m, n = symbols("k, m, n", integer=True) - p = Wild("p", exclude=[k]) - q = Wild("q", exclude=[k]) - ok = False - - for i in range(a): - - z_x = _mexpand(Subs(x, t, a*k + i).doit()).match(p*k + q) - z_y = _mexpand(Subs(y, t, a*k + i).doit()).match(p*k + q) - - if (isinstance(z_x[p], Integer) and isinstance(z_x[q], Integer) and - isinstance(z_y[p], Integer) and isinstance(z_y[q], Integer)): - ok = True - break - - if ok == True: - - x_param = x.match(p*t + q) - y_param = y.match(p*t + q) - - if x_param[p] == 0 or y_param[p] == 0: - if x_param[p] == 0: - l1, junk = Poly(y).clear_denoms() - else: - l1 = 1 + from sympy.simplify.simplify import clear_coefficients - if y_param[p] == 0: - l2, junk = Poly(x).clear_denoms() - else: - l2 = 1 + if x.is_number and not x.is_Integer: + return (None, None) - return x*ilcm(l1, l2), y*ilcm(l1, l2) + if y.is_number and not y.is_Integer: + return (None, None) - eq = S(m - x_param[q])/x_param[p] - S(n - y_param[q])/y_param[p] + m, n = symbols("m, n", integer=True) + c, p = (m*x + n*y).as_content_primitive() + if a % c.q: + return (None, None) - lcm_denom, junk = Poly(eq).clear_denoms() - eq = eq * lcm_denom + # clear_coefficients(mx + b, R)[1] -> (R - b)/m + eq = clear_coefficients(x, m)[1] - clear_coefficients(y, n)[1] + junk, eq = eq.as_content_primitive() - return diop_solve(eq, t)[0], diop_solve(eq, t)[1] - else: - return (None, None) + return diop_solve(eq, t) def diop_ternary_quadratic(eq): @@ -1732,18 +1730,18 @@ def diop_ternary_quadratic(eq): >>> diop_ternary_quadratic(x**2 - 49*y**2 - z**2 + 13*z*y -8*x*y) (9, 1, 5) """ - var, coeff, diop_type = classify_diop(eq) + var, coeff, diop_type = classify_diop(eq, _dict=False) - if diop_type == "homogeneous_ternary_quadratic": + if diop_type in ( + "homogeneous_ternary_quadratic", + "homogeneous_ternary_quadratic_normal"): return _diop_ternary_quadratic(var, coeff) def _diop_ternary_quadratic(_var, coeff): - x, y, z = _var[:3] - - var = [x]*3 - var[0], var[1], var[2] = _var[0], _var[1], _var[2] + x, y, z = _var + var = [x, y, z] # Equations of the form B*x*y + C*z*x + E*y*z = 0 and At least two of the # coefficients A, B, C are non-zero. @@ -1754,8 +1752,8 @@ def _diop_ternary_quadratic(_var, coeff): # using methods for binary quadratic diophantine equations. Let's select the # solution which minimizes |x| + |z| - if coeff[x**2] == 0 and coeff[y**2] == 0 and coeff[z**2] == 0: - if coeff[x*z] != 0: + if not any(coeff[i**2] for i in var): + if coeff[x*z]: sols = diophantine(coeff[x*y]*x + coeff[y*z]*z - x*z) s = sols.pop() min_sum = abs(s[0]) + abs(s[1]) @@ -1771,7 +1769,7 @@ def _diop_ternary_quadratic(_var, coeff): var[0], var[1] = _var[1], _var[0] y_0, x_0, z_0 = _diop_ternary_quadratic(var, coeff) - return simplified(x_0, y_0, z_0) + return _remove_gcd(x_0, y_0, z_0) if coeff[x**2] == 0: # If the coefficient of x is zero change the variables @@ -1784,7 +1782,7 @@ def _diop_ternary_quadratic(_var, coeff): y_0, x_0, z_0 = _diop_ternary_quadratic(var, coeff) else: - if coeff[x*y] != 0 or coeff[x*z] != 0: + if coeff[x*y] or coeff[x*z]: # Apply the transformation x --> X - (B*y + C*z)/(2*A) A = coeff[x**2] B = coeff[x*y] @@ -1804,11 +1802,11 @@ def _diop_ternary_quadratic(_var, coeff): X_0, y_0, z_0 = _diop_ternary_quadratic(var, _coeff) - if X_0 == None: + if X_0 is None: return (None, None, None) - l = (S(B*y_0 + C*z_0)/(2*A)).q - x_0, y_0, z_0 = X_0*l - (S(B*y_0 + C*z_0)/(2*A)).p, y_0*l, z_0*l + p, q = _rational_pq(B*y_0 + C*z_0, 2*A) + x_0, y_0, z_0 = X_0*q - p, y_0*q, z_0*q elif coeff[z*y] != 0: if coeff[y**2] == 0: @@ -1817,8 +1815,7 @@ def _diop_ternary_quadratic(_var, coeff): A = coeff[x**2] E = coeff[y*z] - b = (S(-E)/A).p - a = (S(-E)/A).q + b, a = _rational_pq(-E, A) x_0, y_0, z_0 = b, a, b @@ -1836,32 +1833,44 @@ def _diop_ternary_quadratic(_var, coeff): # Ax**2 + D*y**2 + F*z**2 = 0, C may be zero x_0, y_0, z_0 = _diop_ternary_quadratic_normal(var, coeff) - return simplified(x_0, y_0, z_0) + return _remove_gcd(x_0, y_0, z_0) def transformation_to_normal(eq): """ - Returns the transformation Matrix from general ternary quadratic equation - `eq` to normal form. - - General form of the ternary quadratic equation is `ax^2 + by^2 cz^2 + dxy + - eyz + fxz`. This function returns a 3X3 transformation Matrix which - transforms the former equation to the form `ax^2 + by^2 + cz^2 = 0`. This - is not used in solving ternary quadratics. Only implemented for the sake - of completeness. + Returns the transformation Matrix that converts a general ternary + quadratic equation `eq` (`ax^2 + by^2 + cz^2 + dxy + eyz + fxz`) + to a form without cross terms: `ax^2 + by^2 + cz^2 = 0`. This is + not used in solving ternary quadratics; it is only implemented for + the sake of completeness. """ - var, coeff, diop_type = classify_diop(eq) + var, coeff, diop_type = classify_diop(eq, _dict=False) - if diop_type == "homogeneous_ternary_quadratic": + if diop_type in ( + "homogeneous_ternary_quadratic", + "homogeneous_ternary_quadratic_normal"): return _transformation_to_normal(var, coeff) def _transformation_to_normal(var, coeff): - _var = [var[0]]*3 - _var[1], _var[2] = var[1], var[2] - - x, y, z = var[:3] + _var = list(var) # copy + x, y, z = var + + if not any(coeff[i**2] for i in var): + # https://math.stackexchange.com/questions/448051/transform-quadratic-ternary-form-to-normal-form/448065#448065 + a = coeff[x*y] + b = coeff[y*z] + c = coeff[x*z] + swap = False + if not a: # b can't be 0 or else there aren't 3 vars + swap = True + a, b = b, a + T = Matrix(((1, 1, -b/a), (1, -1, -c/a), (0, 0, 1))) + if swap: + T.row_swap(0, 1) + T.col_swap(0, 1) + return T if coeff[x**2] == 0: # If the coefficient of x is zero change the variables @@ -1879,65 +1888,52 @@ def _transformation_to_normal(var, coeff): T.col_swap(0, 1) return T - else: - # Apply the transformation x --> X - (B*Y + C*Z)/(2*A) - if coeff[x*y] != 0 or coeff[x*z] != 0: - A = coeff[x**2] - B = coeff[x*y] - C = coeff[x*z] - D = coeff[y**2] - E = coeff[y*z] - F = coeff[z**2] - - _coeff = dict() + # Apply the transformation x --> X - (B*Y + C*Z)/(2*A) + if coeff[x*y] != 0 or coeff[x*z] != 0: + A = coeff[x**2] + B = coeff[x*y] + C = coeff[x*z] + D = coeff[y**2] + E = coeff[y*z] + F = coeff[z**2] - _coeff[x**2] = 4*A**2 - _coeff[y**2] = 4*A*D - B**2 - _coeff[z**2] = 4*A*F - C**2 - _coeff[y*z] = 4*A*E - 2*B*C - _coeff[x*y] = 0 - _coeff[x*z] = 0 + _coeff = dict() - T_0 = _transformation_to_normal(_var, _coeff) - return Matrix(3, 3, [1, S(-B)/(2*A), S(-C)/(2*A), 0, 1, 0, 0, 0, 1]) * T_0 + _coeff[x**2] = 4*A**2 + _coeff[y**2] = 4*A*D - B**2 + _coeff[z**2] = 4*A*F - C**2 + _coeff[y*z] = 4*A*E - 2*B*C + _coeff[x*y] = 0 + _coeff[x*z] = 0 - elif coeff[y*z] != 0: - if coeff[y**2] == 0: - if coeff[z**2] == 0: - # Equations of the form A*x**2 + E*yz = 0. - # Apply transformation y -> Y + Z ans z -> Y - Z - return Matrix(3, 3, [1, 0, 0, 0, 1, 1, 0, 1, -1]) + T_0 = _transformation_to_normal(_var, _coeff) + return Matrix(3, 3, [1, S(-B)/(2*A), S(-C)/(2*A), 0, 1, 0, 0, 0, 1])*T_0 - else: - # Ax**2 + E*y*z + F*z**2 = 0 - _var[0], _var[2] = var[2], var[0] - T = _transformtion_to_normal(_var, coeff) - T.row_swap(0, 2) - T.col_swap(0, 2) - return T + elif coeff[y*z] != 0: + if coeff[y**2] == 0: + if coeff[z**2] == 0: + # Equations of the form A*x**2 + E*yz = 0. + # Apply transformation y -> Y + Z ans z -> Y - Z + return Matrix(3, 3, [1, 0, 0, 0, 1, 1, 0, 1, -1]) else: - # A*x**2 + D*y**2 + E*y*z + F*z**2 = 0, F may be zero - _var[0], _var[1] = var[1], var[0] + # Ax**2 + E*y*z + F*z**2 = 0 + _var[0], _var[2] = var[2], var[0] T = _transformation_to_normal(_var, coeff) - T.row_swap(0, 1) - T.col_swap(0, 1) + T.row_swap(0, 2) + T.col_swap(0, 2) return T else: - return Matrix(3, 3, [1, 0, 0, 0, 1, 0, 0, 0, 1]) - - -def simplified(x, y, z): - """ - Simplify the solution `(x, y, z)`. - """ - if x == None or y == None or z == None: - return (x, y, z) - - g = igcd(x, igcd(y, z)) + # A*x**2 + D*y**2 + E*y*z + F*z**2 = 0, F may be zero + _var[0], _var[1] = var[1], var[0] + T = _transformation_to_normal(_var, coeff) + T.row_swap(0, 1) + T.col_swap(0, 1) + return T - return x // g, y // g, z // g + else: + return Matrix.eye(3) def parametrize_ternary_quadratic(eq): @@ -1969,41 +1965,48 @@ def parametrize_ternary_quadratic(eq): Press, Cambridge, 1998. """ - var, coeff, diop_type = classify_diop(eq) + var, coeff, diop_type = classify_diop(eq, _dict=False) - if diop_type == "homogeneous_ternary_quadratic": + if diop_type in ( + "homogeneous_ternary_quadratic", + "homogeneous_ternary_quadratic_normal"): x_0, y_0, z_0 = _diop_ternary_quadratic(var, coeff) - return _parametrize_ternary_quadratic((x_0, y_0, z_0), var, coeff) + return _parametrize_ternary_quadratic( + (x_0, y_0, z_0), var, coeff) def _parametrize_ternary_quadratic(solution, _var, coeff): + # called for a*x**2 + b*y**2 + c*z**2 + d*x*y + e*y*z + f*x*z = 0 + assert 1 not in coeff - x, y, z = _var[:3] + x, y, z = _var - x_0, y_0, z_0 = solution[:3] + x_0, y_0, z_0 = solution - v = [x]*3 - v[0], v[1], v[2] = _var[0], _var[1], _var[2] + v = list(_var) # copy - if x_0 == None: + if x_0 is None: + return (None, None, None) + + if solution.count(0) >= 2: + # if there are 2 zeros the the equation reduces + # to k*X**2 == 0 where X is x, y, or z so X must + # be zero, too. So there is only the trivial + # solution. return (None, None, None) if x_0 == 0: - if y_0 == 0: - v[0], v[2] = v[2], v[0] - z_p, y_p, x_p = _parametrize_ternary_quadratic((z_0, y_0, x_0), v, coeff) - return x_p, y_p, z_p - else: - v[0], v[1] = v[1], v[0] - y_p, x_p, z_p = _parametrize_ternary_quadratic((y_0, x_0, z_0), v, coeff) - return x_p, y_p, z_p + v[0], v[1] = v[1], v[0] + y_p, x_p, z_p = _parametrize_ternary_quadratic( + (y_0, x_0, z_0), v, coeff) + return x_p, y_p, z_p - x, y, z = v[:3] + x, y, z = v r, p, q = symbols("r, p, q", integer=True) - eq = x**2*coeff[x**2] + y**2*coeff[y**2] + z**2*coeff[z**2] + x*y*coeff[x*y] + y*z*coeff[y*z] + z*x*coeff[z*x] - eq_1 = Subs(eq, (x, y, z), (r*x_0, r*y_0 + p, r*z_0 + q)).doit() - eq_1 = _mexpand(eq_1) + eq = sum(k*v for k, v in coeff.items()) + eq_1 = _mexpand(eq.subs(zip( + (x, y, z), (r*x_0, r*y_0 + p, r*z_0 + q)))) A, B = eq_1.as_independent(r, as_Add=True) @@ -2042,38 +2045,29 @@ def diop_ternary_quadratic_normal(eq): >>> diop_ternary_quadratic_normal(34*x**2 - 3*y**2 - 301*z**2) (4, 9, 1) """ - var, coeff, diop_type = classify_diop(eq) - - if diop_type == "homogeneous_ternary_quadratic": + var, coeff, diop_type = classify_diop(eq, _dict=False) + if diop_type == "homogeneous_ternary_quadratic_normal": return _diop_ternary_quadratic_normal(var, coeff) def _diop_ternary_quadratic_normal(var, coeff): - x, y, z = var[:3] + x, y, z = var a = coeff[x**2] b = coeff[y**2] c = coeff[z**2] - - if a*b*c == 0: - raise ValueError("Try factoring out you equation or using diophantine()") - - g = igcd(a, igcd(b, c)) - - a = a // g - b = b // g - c = c // g - - a_0 = square_factor(a) - b_0 = square_factor(b) - c_0 = square_factor(c) - - a_1 = a // a_0**2 - b_1 = b // b_0**2 - c_1 = c // c_0**2 - - a_2, b_2, c_2 = pairwise_prime(a_1, b_1, c_1) + try: + assert len([k for k in coeff if coeff[k]]) == 3 + assert all(coeff[i**2] for i in var) + except AssertionError: + raise ValueError(filldedent(''' + coeff dict is not consistent with assumption of this routine: + coefficients should be those of an expression in the form + a*x**2 + b*y**2 + c*z**2 where a*b*c != 0.''')) + + (sqf_of_a, sqf_of_b, sqf_of_c), (a_1, b_1, c_1), (a_2, b_2, c_2) = \ + sqf_normal(a, b, c, steps=True) A = -a_2*c_2 B = -b_2*c_2 @@ -2082,20 +2076,19 @@ def _diop_ternary_quadratic_normal(var, coeff): if A < 0 and B < 0: return (None, None, None) - if (sqrt_mod(-b_2*c_2, a_2) == None or sqrt_mod(-c_2*a_2, b_2) == None or - sqrt_mod(-a_2*b_2, c_2) == None): + if ( + sqrt_mod(-b_2*c_2, a_2) is None or + sqrt_mod(-c_2*a_2, b_2) is None or + sqrt_mod(-a_2*b_2, c_2) is None): return (None, None, None) z_0, x_0, y_0 = descent(A, B) - if divisible(z_0, c_2) == True: - z_0 = z_0 // abs(c_2) - else: - x_0 = x_0*(S(z_0)/c_2).q - y_0 = y_0*(S(z_0)/c_2).q - z_0 = (S(z_0)/c_2).p + z_0, q = _rational_pq(z_0, abs(c_2)) + x_0 *= q + y_0 *= q - x_0, y_0, z_0 = simplified(x_0, y_0, z_0) + x_0, y_0, z_0 = _remove_gcd(x_0, y_0, z_0) # Holzer reduction if sign(a) == sign(b): @@ -2109,126 +2102,118 @@ def _diop_ternary_quadratic_normal(var, coeff): y_0 = reconstruct(a_1, c_1, y_0) z_0 = reconstruct(a_1, b_1, z_0) - l = ilcm(a_0, ilcm(b_0, c_0)) + sq_lcm = ilcm(sqf_of_a, sqf_of_b, sqf_of_c) - x_0 = abs(x_0*l//a_0) - y_0 = abs(y_0*l//b_0) - z_0 = abs(z_0*l//c_0) + x_0 = abs(x_0*sq_lcm//sqf_of_a) + y_0 = abs(y_0*sq_lcm//sqf_of_b) + z_0 = abs(z_0*sq_lcm//sqf_of_c) - return simplified(x_0, y_0, z_0) + return _remove_gcd(x_0, y_0, z_0) -def square_factor(a): +def sqf_normal(a, b, c, steps=False): """ - Returns an integer `c` s.t. `a = c^2k, \ c,k \in Z`. Here `k` is square - free. + Return `a', b', c'`, the coefficients of the square-free normal + form of `ax^2 + by^2 + cz^2 = 0`, where `a', b', c'` are pairwise + prime. If `steps` is True then also return three tuples: + `sq`, `sqf`, and `(a', b', c')` where `sq` contains the square + factors of `a`, `b` and `c` after removing the `gcd(a, b, c)`; + `sqf` contains the values of `a`, `b` and `c` after removing + both the `gcd(a, b, c)` and the square factors. + + The solutions for `ax^2 + by^2 + cz^2 = 0` can be + recovered from the solutions of `a'x^2 + b'y^2 + c'z^2 = 0`. Examples ======== - >>> from sympy.solvers.diophantine import square_factor - >>> square_factor(24) - 2 - >>> square_factor(36) - 6 - >>> square_factor(1) - 1 - """ - f = factorint(abs(a)) - c = 1 - - for p, e in f.items(): - c = c * p**(e//2) - - return c - - -def pairwise_prime(a, b, c): - """ - Transform `ax^2 + by^2 + cz^2 = 0` into an equivalent equation - `a'x^2 + b'y^2 + c'z^2 = 0` where `a', b', c'` are pairwise relatively - prime. + >>> from sympy.solvers.diophantine import sqf_normal + >>> sqf_normal(2 * 3**2 * 5, 2 * 5 * 11, 2 * 7**2 * 11) + (11, 1, 5) + >>> sqf_normal(2 * 3**2 * 5, 2 * 5 * 11, 2 * 7**2 * 11, True) + ((3, 1, 7), (5, 55, 11), (11, 1, 5)) - Returns a tuple containing `a', b', c'`. `\gcd(a, b, c)` should equal `1` - for this to work. The solutions for `ax^2 + by^2 + cz^2 = 0` can be - recovered from the solutions of `a'x^2 + b'y^2 + c'z^2 = 0`. + References + ========== - Examples - ======== + .. [1] Legendre's Theorem, Legrange's Descent, + http://public.csusm.edu/aitken_html/notes/legendre.pdf - >>> from sympy.solvers.diophantine import pairwise_prime - >>> pairwise_prime(6, 15, 10) - (5, 2, 3) See Also ======== - make_prime(), reocnstruct() - """ - a, b, c = make_prime(a, b, c) - b, c, a = make_prime(b, c, a) - c, a, b = make_prime(c, a, b) - - return a, b, c + reconstruct() + """ + ABC = A, B, C = _remove_gcd(a, b, c) + sq = tuple(square_factor(i) for i in ABC) + sqf = A, B, C = tuple([i//j**2 for i,j in zip(ABC, sq)]) + pc = igcd(A, B) + A /= pc + B /= pc + pa = igcd(B, C) + B /= pa + C /= pa + pb = igcd(A, C) + A /= pb + B /= pb + + A *= pa + B *= pb + C *= pc + + if steps: + return (sq, sqf, (A, B, C)) + else: + return A, B, C -def make_prime(a, b, c): +def square_factor(a): """ - Transform the equation `ax^2 + by^2 + cz^2 = 0` to an equivalent equation - `a'x^2 + b'y^2 + c'z^2 = 0` with `\gcd(a', b') = 1`. - - Returns a tuple `(a', b', c')` which satisfies above conditions. Note that - in the returned tuple `\gcd(a', c')` and `\gcd(b', c')` can take any value. + Returns an integer `c` s.t. `a = c^2k, \ c,k \in Z`. Here `k` is square + free. `a` can be given as an integer or a dictionary of factors. Examples ======== - >>> from sympy.solvers.diophantine import make_prime - >>> make_prime(4, 2, 7) - (2, 1, 14) + >>> from sympy.solvers.diophantine import square_factor + >>> square_factor(24) + 2 + >>> square_factor(-36*3) + 6 + >>> square_factor(1) + 1 + >>> square_factor({3: 2, 2: 1, -1: 1}) # -18 + 3 See Also ======== - - pairwaise_prime(), reconstruct() + sympy.ntheory.factor_.core """ - g = igcd(a, b) - - if g != 1: - f = factorint(g) - for p, e in f.items(): - a = a // p**e - b = b // p**e + f = a if isinstance(a, dict) else factorint(a) + return Mul(*[p**(e//2) for p, e in f.items()]) - if e % 2 == 1: - c = p*c - return a, b, c - - -def reconstruct(a, b, z): +def reconstruct(A, B, z): """ Reconstruct the `z` value of an equivalent solution of `ax^2 + by^2 + cz^2` - from the `z` value of a solution of a transformed version of the above - equation. + from the `z` value of a solution of the square-free normal form of the + equation, `a'*x^2 + b'*y^2 + c'*z^2`, where `a'`, `b'` and `c'` are square + free and `gcd(a', b', c') == 1`. """ - g = igcd(a, b) - - if g != 1: - f = factorint(g) - for p, e in f.items(): - if e %2 == 0: - z = z*p**(e//2) - else: - z = z*p**((e//2)+1) - + f = factorint(igcd(A, B)) + for p, e in f.items(): + if e != 1: + raise ValueError('a and b should be square-free') + z *= p return z def ldescent(A, B): """ - Uses Lagrange's method to find a non trivial solution to - `w^2 = Ax^2 + By^2`. + Return a non-trivial solution to `w^2 = Ax^2 + By^2` using + Lagrange's method; return None if there is no such solution. + . Here, `A \\neq 0` and `B \\neq 0` and `A` and `B` are square free. Output a tuple `(w_0, x_0, y_0)` which is a solution to the above equation. @@ -2255,17 +2240,21 @@ def ldescent(A, B): London Mathematical Society Student Texts 41, Cambridge University Press, Cambridge, 1998. .. [2] Efficient Solution of Rational Conices, J. E. Cremona and D. Rusin, - Mathematics of Computation, Volume 00, Number 0. + [online], Available: + http://eprints.nottingham.ac.uk/60/1/kvxefz87.pdf """ if abs(A) > abs(B): w, y, x = ldescent(B, A) return w, x, y if A == 1: - return (S.One, S.One, 0) + return (1, 1, 0) if B == 1: - return (S.One, 0, S.One) + return (1, 0, 1) + + if B == -1: # and A == -1 + return r = sqrt_mod(A, B) @@ -2279,27 +2268,24 @@ def ldescent(A, B): B_0 = None for i in div: - if isinstance(sqrt(abs(Q) // i), Integer): - B_0, d = sign(Q)*i, sqrt(abs(Q) // i) + sQ, _exact = integer_nthroot(abs(Q) // i, 2) + if _exact: + B_0, d = sign(Q)*i, sQ break - if B_0 != None: + if B_0 is not None: W, X, Y = ldescent(A, B_0) - return simplified((-A*X + r*W), (r*X - W), Y*(B_0*d)) - # In this module Descent will always be called with inputs which have solutions. + return _remove_gcd((-A*X + r*W), (r*X - W), Y*(B_0*d)) def descent(A, B): """ - Lagrange's `descent()` with lattice-reduction to find solutions to - `x^2 = Ay^2 + Bz^2`. + Returns a non-trivial solution, (x, y, z), to `x^2 = Ay^2 + Bz^2` + using Lagrange's descent method with lattice-reduction. `A` and `B` + are assumed to be valid for such a solution to exist. - Here `A` and `B` should be square free and pairwise prime. Always should be - called with suitable ``A`` and ``B`` so that the above equation has - solutions. - - This is more faster than the normal Lagrange's descent algorithm because - the gaussian reduction is used. + This is faster than the normal Lagrange's descent algorithm because + the Gaussian reduction is used. Examples ======== @@ -2327,8 +2313,6 @@ def descent(A, B): return (1, 0, 1) if A == 1: return (1, 1, 0) - if B == -1: - return (None, None, None) if B == -A: return (0, 1, 1) if B == A: @@ -2344,7 +2328,7 @@ def descent(A, B): x_1, z_1, y_1 = descent(A, t_1) - return simplified(x_0*x_1 + A*z_0*z_1, z_0*x_1 + x_0*z_1, t_1*t_2*y_1) + return _remove_gcd(x_0*x_1 + A*z_0*z_1, z_0*x_1 + x_0*z_1, t_1*t_2*y_1) def gaussian_reduce(w, a, b): @@ -2361,9 +2345,9 @@ def gaussian_reduce(w, a, b): ========== .. [1] Gaussian lattice Reduction [online]. Available: - http://home.ie.cuhk.edu.hk/~wkshum/wordpress/?p=404 + http://home.ie.cuhk.edu.hk/~wkshum/wordpress/?p=404 .. [2] Efficient Solution of Rational Conices, J. E. Cremona and D. Rusin, - Mathematics of Computation, Volume 00, Number 0. + Mathematics of Computation, Volume 00, Number 0. """ u = (0, 1) v = (1, 0) @@ -2394,8 +2378,8 @@ def dot(u, v, w, a, b): `v = (v_{1}, v_{2})` which is defined in order to reduce solution of the congruence equation `X^2 - aZ^2 \equiv 0 \ (mod \ b)`. """ - u_1, u_2 = u[:2] - v_1, v_2 = v[:2] + u_1, u_2 = u + v_1, v_2 = v return (w*u_1 + b*u_2)*(w*v_1 + b*v_2) + abs(a)*u_1*v_1 @@ -2405,39 +2389,72 @@ def norm(u, w, a, b): defined by `u \cdot v = (wu_{1} + bu_{2})(w*v_{1} + bv_{2}) + |a|*u_{1}*v_{1}` where `u = (u_{1}, u_{2})` and `v = (v_{1}, v_{2})`. """ - u_1, u_2 = u[:2] + u_1, u_2 = u return sqrt(dot((u_1, u_2), (u_1, u_2), w, a, b)) -def holzer(x_0, y_0, z_0, a, b, c): - """ - Simplify the solution `(x_{0}, y_{0}, z_{0})` of the equation - `ax^2 + by^2 = cz^2` with `a, b, c > 0` and `z_{0}^2 \geq \mid ab \mid` to - a new reduced solution `(x, y, z)` such that `z^2 \leq \mid ab \mid`. +def holzer(x, y, z, a, b, c): """ - while z_0 > sqrt(a*b): + Simplify the solution `(x, y, z)` of the equation + `ax^2 + by^2 = cz^2` with `a, b, c > 0` and `z^2 \geq \mid ab \mid` to + a new reduced solution `(x', y', z')` such that `z'^2 \leq \mid ab \mid`. - if c % 2 == 0: - k = c // 2 - u_0, v_0 = base_solution_linear(k, y_0, -x_0) + The algorithm is an interpretation of Mordell's reduction as described + on page 8 of Cremona and Rusin's paper [1]_ and the work of Mordell in + reference [2]_. - else: - k = 2*c - u_0, v_0 = base_solution_linear(c, y_0, -x_0) - - w = -(a*u_0*x_0 + b*v_0*y_0) // (c*z_0) + References + ========== - if c % 2 == 1: - if w % 2 != (a*u_0 + b*v_0) % 2: - w = w + 1 + .. [1] Efficient Solution of Rational Conices, J. E. Cremona and D. Rusin, + Mathematics of Computation, Volume 00, Number 0. + .. [2] Diophantine Equations, L. J. Mordell, page 48. - x = (x_0*(a*u_0**2 + b*v_0**2 + c*w**2) - 2*u_0*(a*u_0*x_0 + b*v_0*y_0 + c*w*z_0)) // k - y = (y_0*(a*u_0**2 + b*v_0**2 + c*w**2) - 2*v_0*(a*u_0*x_0 + b*v_0*y_0 + c*w*z_0)) // k - z = (z_0*(a*u_0**2 + b*v_0**2 + c*w**2) - 2*w*(a*u_0*x_0 + b*v_0*y_0 + c*w*z_0)) // k + """ + if _odd(c): + k = 2*c + else: + k = c//2 + + small = a*b*c + step = 0 + while True: + t1, t2, t3 = a*x**2, b*y**2, c*z**2 + # check that it's a solution + if t1 + t2 != t3: + if step == 0: + raise ValueError('bad starting solution') + break x_0, y_0, z_0 = x, y, z + if max(t1, t2, t3) <= small: + # Holzer condition + break - return x_0, y_0, z_0 + uv = u, v = base_solution_linear(k, y_0, -x_0) + if None in uv: + break + + p, q = -(a*u*x_0 + b*v*y_0), c*z_0 + r = Rational(p, q) + if _even(c): + w = _nint_or_floor(p, q) + assert abs(w - r) <= S.Half + else: + w = p//q # floor + if _odd(a*u + b*v + c*w): + w += 1 + assert abs(w - r) <= S.One + + A = (a*u**2 + b*v**2 + c*w**2) + B = (a*u*x_0 + b*v*y_0 + c*w*z_0) + x = Rational(x_0*A - 2*u*B, k) + y = Rational(y_0*A - 2*v*B, k) + z = Rational(z_0*A - 2*w*B, k) + assert all(i.is_Integer for i in (x, y, z)) + step += 1 + + return tuple([int(i) for i in (x_0, y_0, z_0)]) def diop_general_pythagorean(eq, param=symbols("m", integer=True)): @@ -2465,7 +2482,7 @@ def diop_general_pythagorean(eq, param=symbols("m", integer=True)): >>> diop_general_pythagorean(9*a**2 - 4*b**2 + 16*c**2 + 25*d**2 + e**2) (10*m1**2 + 10*m2**2 + 10*m3**2 - 10*m4**2, 15*m1**2 + 15*m2**2 + 15*m3**2 + 15*m4**2, 15*m1*m4, 12*m2*m4, 60*m3*m4) """ - var, coeff, diop_type = classify_diop(eq) + var, coeff, diop_type = classify_diop(eq, _dict=False) if diop_type == "general_pythagorean": return _diop_general_pythagorean(var, coeff, param) @@ -2475,7 +2492,7 @@ def _diop_general_pythagorean(var, coeff, t): if sign(coeff[var[0]**2]) + sign(coeff[var[1]**2]) + sign(coeff[var[2]**2]) < 0: for key in coeff.keys(): - coeff[key] = coeff[key] * -1 + coeff[key] = -coeff[key] n = len(var) index = 0 @@ -2484,26 +2501,19 @@ def _diop_general_pythagorean(var, coeff, t): if sign(coeff[v**2]) == -1: index = i - m = symbols(str(t) + "1:" + str(n), integer=True) - l = [] - ith = 0 - - for m_i in m: - ith = ith + m_i**2 - - l.append(ith - 2*m[n - 2]**2) - - for i in range(n - 2): - l.append(2*m[i]*m[n-2]) - - sol = l[:index] + [ith] + l[index:] + m = symbols('%s1:%i' % (t, n), integer=True) + ith = sum(m_i**2 for m_i in m) + L = [ith - 2*m[n - 2]**2] + L.extend([2*m[i]*m[n-2] for i in range(n - 2)]) + sol = L[:index] + [ith] + L[index:] lcm = 1 for i, v in enumerate(var): if i == index or (index > 0 and i == 0) or (index == 0 and i == 1): lcm = ilcm(lcm, sqrt(abs(coeff[v**2]))) else: - lcm = ilcm(lcm, sqrt(coeff[v**2]) if sqrt(coeff[v**2]) % 2 else sqrt(coeff[v**2]) // 2) + s = sqrt(coeff[v**2]) + lcm = ilcm(lcm, s if _odd(s) else s//2) for i, v in enumerate(var): sol[i] = (lcm*sol[i]) / sqrt(abs(coeff[v**2])) @@ -2515,17 +2525,14 @@ def diop_general_sum_of_squares(eq, limit=1): """ Solves the equation `x_{1}^2 + x_{2}^2 + . . . + x_{n}^2 - k = 0`. - Returns at most ``limit`` number of solutions. Currently there is no way to - set ``limit`` using higher level API's like ``diophantine()`` or - ``diop_solve()`` but that will be fixed soon. + Returns at most ``limit`` number of solutions. Usage ===== ``general_sum_of_squares(eq, limit)`` : Here ``eq`` is an expression which is assumed to be zero. Also, ``eq`` should be in the form, - `x_{1}^2 + x_{2}^2 + . . . + x_{n}^2 - k = 0`. At most ``limit`` number of - solutions are returned. + `x_{1}^2 + x_{2}^2 + . . . + x_{n}^2 - k = 0`. Details ======= @@ -2539,60 +2546,109 @@ def diop_general_sum_of_squares(eq, limit=1): >>> from sympy.solvers.diophantine import diop_general_sum_of_squares >>> from sympy.abc import a, b, c, d, e, f >>> diop_general_sum_of_squares(a**2 + b**2 + c**2 + d**2 + e**2 - 2345) - set([(0, 48, 5, 4, 0)]) + set([(2, 4, 4, 10, 47)]) Reference ========= - .. [1] Representing an Integer as a sum of three squares, [online], + .. [1] Representing an integer as a sum of three squares, [online], Available: http://www.proofwiki.org/wiki/Integer_as_Sum_of_Three_Squares """ - var, coeff, diop_type = classify_diop(eq) + var, coeff, diop_type = classify_diop(eq, _dict=False) if diop_type == "general_sum_of_squares": - return _diop_general_sum_of_squares(var, coeff, limit) - + return _diop_general_sum_of_squares(var, -coeff[1], limit) -def _diop_general_sum_of_squares(var, coeff, limit=1): +def _diop_general_sum_of_squares(var, k, limit=1): + # solves Eq(sum(i**2 for i in var), k) n = len(var) - k = -int(coeff[Integer(1)]) - s = set([]) + if n < 3: + raise ValueError('n must be greater than 2') - if k < 0: - return set([]) + s = set() - if n == 3: - s.add(sum_of_three_squares(k)) - elif n == 4: - s.add(sum_of_four_squares(k)) - else: + if k < 0 or limit < 1: + return s + + sign = [-1 if x.is_nonpositive else 1 for x in var] + negs = sign.count(-1) != 0 - m = n // 4 - f = partition(k, m, True) + took = 0 + for t in sum_of_squares(k, n, zeros=True): + if negs: + s.add(tuple([sign[i]*j for i, j in enumerate(t)])) + else: + s.add(t) + took += 1 + if took == limit: + break + return s - for j in range(limit): - soln = [] - try: - l = next(f) - except StopIteration: - break +def diop_general_sum_of_even_powers(eq, limit=1): + """ + Solves the equation `x_{1}^e + x_{2}^e + . . . + x_{n}^e - k = 0` + where `e` is an even, integer power. + + Returns at most ``limit`` number of solutions. + + Usage + ===== + + ``general_sum_of_even_powers(eq, limit)`` : Here ``eq`` is an expression which + is assumed to be zero. Also, ``eq`` should be in the form, + `x_{1}^e + x_{2}^e + . . . + x_{n}^e - k = 0`. + + Examples + ======== + + >>> from sympy.solvers.diophantine import diop_general_sum_of_even_powers + >>> from sympy.abc import a, b + >>> diop_general_sum_of_even_powers(a**4 + b**4 - (2**4 + 3**4)) + set([(2, 3)]) + + See Also + ======== + power_representation() + """ + var, coeff, diop_type = classify_diop(eq, _dict=False) + + if diop_type == "general_sum_of_even_powers": + for k in coeff.keys(): + if k.is_Pow and coeff[k]: + p = k.exp + return _diop_general_sum_of_even_powers(var, p, -coeff[1], limit) - for n_i in l: - a, b, c, d = sum_of_four_squares(n_i) - soln = soln + [a, b, c, d] - soln = soln + [0] * (n % 4) +def _diop_general_sum_of_even_powers(var, p, n, limit=1): + # solves Eq(sum(i**2 for i in var), n) + k = len(var) - s.add(tuple(soln)) + s = set() + if n < 0 or limit < 1: + return s + + sign = [-1 if x.is_nonpositive else 1 for x in var] + negs = sign.count(-1) != 0 + + took = 0 + for t in power_representation(n, p, k): + if negs: + s.add(tuple([sign[i]*j for i, j in enumerate(t)])) + else: + s.add(t) + took += 1 + if took == limit: + break return s -## Functions below this comment can be more suitably grouped under an Additive number theory module -## rather than the Diophantine equation module. +## Functions below this comment can be more suitably grouped under +## an Additive number theory module rather than the Diophantine +## equation module. def partition(n, k=None, zeros=False): @@ -2600,12 +2656,11 @@ def partition(n, k=None, zeros=False): Returns a generator that can be used to generate partitions of an integer `n`. - A partition of `n` is a set of positive integers which add upto `n`. For - example, partitions of 3 are 3 , 1 + 2, 1 + 1+ 1. A partition is returned + A partition of `n` is a set of positive integers which add up to `n`. For + example, partitions of 3 are 3, 1 + 2, 1 + 1 + 1. A partition is returned as a tuple. If ``k`` equals None, then all possible partitions are returned irrespective of their size, otherwise only the partitions of size ``k`` are - returned. If there are no partions of `n` with size `k` then an empty tuple - is returned. If the ``zero`` parameter is set to True then a suitable + returned. If the ``zero`` parameter is set to True then a suitable number of zeros are added at the end of every partition of size less than ``k``. @@ -2631,101 +2686,51 @@ def partition(n, k=None, zeros=False): (1, 1, 1, 2) >>> g = partition(5, 3) >>> next(g) - (3, 1, 1) + (1, 1, 3) >>> next(g) - (2, 2, 1) - - Reference - ========= + (1, 2, 2) + >>> g = partition(5, 3, zeros=True) + >>> next(g) + (0, 0, 5) - .. [1] Generating Integer Partitions, [online], - Available: http://jeromekelleher.net/partitions.php """ - if n < 1: - yield tuple() - - if k is not None: - if k < 1: - yield tuple() - - elif k > n: - if zeros: - for i in range(1, n): - for t in partition(n, i): - yield (t,) + (0,) * (k - i) - else: - yield tuple() - - else: - a = [1 for i in range(k)] - a[0] = n - k + 1 - - yield tuple(a) - - i = 1 - while a[0] >= n // k + 1: - j = 0 - - while j < i and j + 1 < k: - a[j] = a[j] - 1 - a[j + 1] = a[j + 1] + 1 - - yield tuple(a) - - j = j + 1 - - i = i + 1 - - if zeros: - for m in range(1, k): - for a in partition(n, m): - yield tuple(a) + (0,) * (k - m) - + from sympy.utilities.iterables import ordered_partitions + if not zeros or k is None: + for i in ordered_partitions(n, k): + yield tuple(i) else: - a = [0 for i in range(n + 1)] - l = 1 - y = n - 1 - - while l != 0: - x = a[l - 1] + 1 - l -= 1 - - while 2*x <= y: - a[l] = x - y -= x - l += 1 - - m = l + 1 - while x <= y: - a[l] = x - a[m] = y - yield tuple(a[:l + 2]) - x += 1 - y -= 1 - - a[l] = x + y - y = x + y - 1 - yield tuple(a[:l + 1]) + for m in range(1, k + 1): + for i in ordered_partitions(n, m): + i = tuple(i) + yield (0,)*(k - len(i)) + i def prime_as_sum_of_two_squares(p): """ - Represent a prime `p` which is congruent to 1 mod 4, as a sum of two - squares. + Represent a prime `p` as a unique sum of two squares; this can + only be done if the prime is congruent to 1 mod 4. Examples ======== >>> from sympy.solvers.diophantine import prime_as_sum_of_two_squares + >>> prime_as_sum_of_two_squares(7) # can't be done >>> prime_as_sum_of_two_squares(5) - (2, 1) + (1, 2) Reference ========= .. [1] Representing a number as a sum of four squares, [online], - Available: http://www.schorn.ch/howto.html + Available: http://schorn.ch/lagrange.html + + See Also + ======== + sum_of_squares() """ + if not p % 4 == 1: + return + if p % 8 == 5: b = 2 else: @@ -2740,7 +2745,7 @@ def prime_as_sum_of_two_squares(p): while b**2 > p: a, b = b, a % b - return (b, a % b) + return (int(a % b), int(b)) # convert from long def sum_of_three_squares(n): @@ -2748,7 +2753,7 @@ def sum_of_three_squares(n): Returns a 3-tuple `(a, b, c)` such that `a^2 + b^2 + c^2 = n` and `a, b, c \geq 0`. - Returns (None, None, None) if `n = 4^a(8m + 7)` for some `a, m \in Z`. See + Returns None if `n = 4^a(8m + 7)` for some `a, m \in Z`. See [1]_ for more details. Usage @@ -2761,13 +2766,17 @@ def sum_of_three_squares(n): >>> from sympy.solvers.diophantine import sum_of_three_squares >>> sum_of_three_squares(44542) - (207, 37, 18) + (18, 37, 207) References ========== .. [1] Representing a number as a sum of three squares, [online], - Available: http://www.schorn.ch/howto.html + Available: http://schorn.ch/lagrange.html + + See Also + ======== + sum_of_squares() """ special = {1:(1, 0, 0), 2:(1, 1, 0), 3:(1, 1, 1), 10: (1, 3, 0), 34: (3, 3, 4), 58:(3, 7, 0), 85:(6, 7, 0), 130:(3, 11, 0), 214:(3, 6, 13), 226:(8, 9, 9), 370:(8, 9, 15), @@ -2779,47 +2788,46 @@ def sum_of_three_squares(n): if n == 0: return (0, 0, 0) - while n % 4 == 0: - v = v + 1 - n = n // 4 + v = multiplicity(4, n) + n //= 4**v if n % 8 == 7: - return (None, None, None) + return if n in special.keys(): x, y, z = special[n] - return (2**v*x, 2**v*y, 2**v*z) + return _sorted_tuple(2**v*x, 2**v*y, 2**v*z) - l = int(sqrt(n)) + s, _exact = integer_nthroot(n, 2) - if n == l**2: - return (2**v*l, 0, 0) + if _exact: + return (2**v*s, 0, 0) x = None if n % 8 == 3: - l = l if l % 2 else l - 1 + s = s if _odd(s) else s - 1 - for i in range(l, -1, -2): + for i in range(s, -1, -2): if isprime((n - i**2) // 2): x = i break y, z = prime_as_sum_of_two_squares((n - x**2) // 2) - return (2**v*x, 2**v*(y + z), 2**v*abs(y - z)) + return _sorted_tuple(2**v*x, 2**v*(y + z), 2**v*abs(y - z)) if n % 8 == 2 or n % 8 == 6: - l = l if l % 2 else l - 1 + s = s if _odd(s) else s - 1 else: - l = l - 1 if l % 2 else l + s = s - 1 if _odd(s) else s - for i in range(l, -1, -2): + for i in range(s, -1, -2): if isprime(n - i**2): x = i break y, z = prime_as_sum_of_two_squares(n - x**2) - return (2**v*x, 2**v*y, 2**v*z) + return _sorted_tuple(2**v*x, 2**v*y, 2**v*z) def sum_of_four_squares(n): @@ -2838,23 +2846,25 @@ def sum_of_four_squares(n): >>> from sympy.solvers.diophantine import sum_of_four_squares >>> sum_of_four_squares(3456) - (8, 48, 32, 8) + (8, 8, 32, 48) >>> sum_of_four_squares(1294585930293) - (0, 1137796, 2161, 1234) + (0, 1234, 2161, 1137796) References ========== .. [1] Representing a number as a sum of four squares, [online], - Available: http://www.schorn.ch/howto.html + Available: http://schorn.ch/lagrange.html + + See Also + ======== + sum_of_squares() """ if n == 0: return (0, 0, 0, 0) - v = 0 - while n % 4 == 0: - v = v + 1 - n = n // 4 + v = multiplicity(4, n) + n //= 4**v if n % 8 == 7: d = 2 @@ -2867,70 +2877,218 @@ def sum_of_four_squares(n): x, y, z = sum_of_three_squares(n) - return (2**v*d, 2**v*x, 2**v*y, 2**v*z) + return _sorted_tuple(2**v*d, 2**v*x, 2**v*y, 2**v*z) def power_representation(n, p, k, zeros=False): """ - Returns a generator for finding k-tuples `(n_{1}, n_{2}, . . . n_{k})` such - that `n = n_{1}^p + n_{2}^p + . . . n_{k}^p`. - - Here `n` is a non-negative integer. StopIteration exception is raised after - all the solutions are generated, so should always be used within a try- - catch block. + Returns a generator for finding k-tuples of integers, + `(n_{1}, n_{2}, . . . n_{k})`, such that + `n = n_{1}^p + n_{2}^p + . . . n_{k}^p`. Usage ===== - ``power_representation(n, p, k, zeros)``: Represent number ``n`` as a sum - of ``k``, ``p``th powers. If ``zeros`` is true, then the solutions will - contain zeros. + ``power_representation(n, p, k, zeros)``: Represent non-negative number + ``n`` as a sum of ``k`` ``p``th powers. If ``zeros`` is true, then the + solutions is allowed to contain zeros. Examples ======== >>> from sympy.solvers.diophantine import power_representation - >>> f = power_representation(1729, 3, 2) # Represent 1729 as a sum of two cubes + + Represent 1729 as a sum of two cubes: + + >>> f = power_representation(1729, 3, 2) >>> next(f) - (12, 1) + (1, 12) >>> next(f) - (10, 9) + (9, 10) + + If the flag `zeros` is True, the solution may contain tuples with + zeros; any such solutions will be generated after the solutions + without zeros: + + >>> list(power_representation(125, 2, 3, zeros=True)) + [(3, 4, 10), (5, 6, 8), (0, 2, 11), (0, 5, 10)] + + For even `p` the `permute_sign` function can be used to get all + signed values: + + >>> from sympy.utilities.iterables import permute_signs + >>> list(permute_signs((1, 12))) + [(1, 12), (-1, 12), (1, -12), (-1, -12)] + + All possible signed permutations can also be obtained: + + >>> from sympy.utilities.iterables import signed_permutations + >>> list(signed_permutations((1, 12))) + [(1, 12), (-1, 12), (1, -12), (-1, -12), (12, 1), (-12, 1), (12, -1), (-12, -1)] """ - if p < 1 or k < 1 or n < 1: - raise ValueError("Expected: n > 0 and k >= 1 and p >= 1") + n, p, k = [as_int(i) for i in (n, p, k)] + + if n < 0: + if p % 2: + for t in power_representation(-n, p, k, zeros): + yield tuple(-i for i in t) + return + + if p < 1 or k < 1: + raise ValueError(filldedent(''' + Expecting positive integers for `(p, k)`, but got `(%s, %s)`''' + % (p, k))) + + if n == 0: + if zeros: + yield (0,)*k + return if k == 1: - if perfect_power(n): - yield (perfect_power(n)[0],) + if p == 1: + yield (n,) else: - yield tuple() - - elif p == 1: - for t in partition(n, k, zeros): + be = perfect_power(n) + if be: + b, e = be + d, r = divmod(e, p) + if not r: + yield (b**d,) + return + + if p == 1: + for t in partition(n, k, zeros=zeros): + yield t + return + + if p == 2: + feasible = _can_do_sum_of_squares(n, k) + if not feasible: + return + if not zeros and n > 33 and k >= 5 and k <= n and n - k in ( + 13, 10, 7, 5, 4, 2, 1): + '''Todd G. Will, "When Is n^2 a Sum of k Squares?", [online]. + Available: https://www.maa.org/sites/default/files/Will-MMz-201037918.pdf''' + return + if feasible is 1: # it's prime and k == 2 + yield prime_as_sum_of_two_squares(n) + return + + if k == 2 and p > 2: + be = perfect_power(n) + if be and be[1] % p == 0: + return # Fermat: a**n + b**n = c**n has no solution for n > 2 + + if n >= k: + a = integer_nthroot(n - (k - 1), p)[0] + for t in pow_rep_recursive(a, k, n, [], p): yield t - else: - l = [] + if zeros: a = integer_nthroot(n, p)[0] + for i in range(1, k): + for t in pow_rep_recursive(a, i, n, [], p): + yield (0,) * (k - i) + t - for t in pow_rep_recursive(a, k, n, [], p): - yield t - if zeros: - for i in range(2, k): - for t in pow_rep_recursive(a, i, n, [], p): - yield t + (0,) * (k - i) +sum_of_powers = power_representation def pow_rep_recursive(n_i, k, n_remaining, terms, p): if k == 0 and n_remaining == 0: - yield tuple(terms) + yield _sorted_tuple(*terms) else: if n_i >= 1 and k > 0 and n_remaining >= 0: if n_i**p <= n_remaining: for t in pow_rep_recursive(n_i, k - 1, n_remaining - n_i**p, terms + [n_i], p): - yield t + yield _sorted_tuple(*t) for t in pow_rep_recursive(n_i - 1, k, n_remaining, terms, p): - yield t + yield _sorted_tuple(*t) + + +def sum_of_squares(n, k, zeros=False): + """Return a generator that yields the k-tuples of nonnegative + values, the squares of which sum to n. If zeros is False (default) + then the solution will not contain zeros. The nonnegative + elements of a tuple are sorted. + + * If k == 1 and n is square, (n,) is returned. + + * If k == 2 then n can only be written as a sum of squares if + every prime in the factorization of n that has the form + 4*k + 3 has an even multiplicity. If n is prime then + it can only be written as a sum of two squares if it is + in the form 4*k + 1. + + * if k == 3 then n can be written as a sum of squares if it does + not have the form 4**m*(8*k + 7). + + * all integers can be written as the sum of 4 squares. + + * if k > 4 then n can be partitioned and each partition can + be written as a sum of 4 squares; if n is not evenly divisible + by 4 then n can be written as a sum of squares only if the + an additional partition can be written as as sum of squares. + For example, if k = 6 then n is partitioned into two parts, + the first being written as a sum of 4 squares and the second + being written as a sum of 2 squares -- which can only be + done if the contition above for k = 2 can be met, so this will + automatically reject certain partitions of n. + + Examples + ======== + + >>> from sympy.solvers.diophantine import sum_of_squares + >>> list(sum_of_squares(25, 2)) + [(3, 4)] + >>> list(sum_of_squares(25, 2, True)) + [(3, 4), (0, 5)] + >>> list(sum_of_squares(25, 4)) + [(1, 2, 2, 4)] + + See Also + ======== + sympy.utilities.iterables.signed_permutations + """ + for t in power_representation(n, 2, k, zeros): + yield t + + +def _can_do_sum_of_squares(n, k): + """Return True if n can be written as the sum of k squares, + False if it can't, or 1 if k == 2 and n is prime (in which + case it *can* be written as a sum of two squares). A False + is returned only if it can't be written as k-squares, even + if 0s are allowed. + """ + if k < 1: + return False + if n < 0: + return False + if n == 0: + return True + if k == 1: + return is_square(n) + if k == 2: + if n in (1, 2): + return True + if isprime(n): + if n % 4 == 1: + return 1 # signal that it was prime + return False + else: + f = factorint(n) + for p, m in f.items(): + # we can proceed iff no prime factor in the form 4*k + 3 + # has an odd multiplicity + if (p % 4 == 3) and m % 2: + return False + return True + if k == 3: + if (n//4**multiplicity(4, n)) % 8 == 7: + return False + # every number can be written as a sum of 4 squares; for k > 4 partitions + # can be 0 + return True diff --git a/sympy/tensor/array/arrayop.py b/sympy/tensor/array/arrayop.py index ce53884225..b9737f924c 100644 --- a/sympy/tensor/array/arrayop.py +++ b/sympy/tensor/array/arrayop.py @@ -123,39 +123,16 @@ def tensorcontraction(array, *contraction_axes): cum_shape[rank - i - 1] = _cumul _cumul *= int(array.shape[rank - i - 1]) - # DEFINITION: by absolute position it is meant the position along the one - # dimensional array containing all the tensor components. - - # Possible future work on this module: move computation of absolute - # positions to a class method. - - # Determine absolute positions of the uncontracted indices: remaining_indices = [[cum_shape[i]*j for j in range(array.shape[i])] for i in range(rank) if i not in taken_dims] - # Determine absolute positions of the contracted indices: - summed_deltas = [] - for axes_group in contraction_axes: - lidx = [] - for js in range(array.shape[axes_group[0]]): - lidx.append(sum([cum_shape[ig] * js for ig in axes_group])) - summed_deltas.append(lidx) - - # Compute the contracted array: - # - # 1. external for loops on all uncontracted indices. - # Uncontracted indices are determined by the combinatorial product of - # the absolute positions of the remaining indices. - # 2. internal loop on all contracted indices. - # It sum the values of the absolute contracted index and the absolute - # uncontracted index for the external loop. contracted_array = [] for icontrib in itertools.product(*remaining_indices): - index_base_position = sum(icontrib) + i = sum(icontrib) isum = S.Zero - for sum_to_index in itertools.product(*summed_deltas): - isum += array[index_base_position + sum(sum_to_index)] - + for axes_group in contraction_axes: + for js in range(array.shape[axes_group[0]]): + isum += array[i + sum([cum_shape[ig]*js for ig in axes_group])] contracted_array.append(isum) if len(remaining_indices) == 0: diff --git a/sympy/utilities/iterables.py b/sympy/utilities/iterables.py index 1ee65bf5d8..98aecd3f98 100644 --- a/sympy/utilities/iterables.py +++ b/sympy/utilities/iterables.py @@ -1309,7 +1309,7 @@ def multiset_partitions(multiset, m=None): def partitions(n, m=None, k=None, size=False): - """Generate all partitions of integer n (>= 0). + """Generate all partitions of positive integer, n. Parameters ========== @@ -1342,7 +1342,8 @@ def partitions(n, m=None, k=None, size=False): {1: 6} The maximum number of parts in the partition (the sum of the values in - the returned dict) are limited with m: + the returned dict) are limited with m (default value, None, gives + partitions from 1 through n): >>> for p in partitions(6, m=2): # doctest: +SKIP ... print(p) @@ -1377,20 +1378,34 @@ def partitions(n, m=None, k=None, size=False): sympy.combinatorics.partitions.IntegerPartition """ - if n < 0: - raise ValueError("n must be >= 0") - if m == 0: - raise ValueError("m must be > 0") - m = min(m or n, n) - if m < 1: - raise ValueError("maximum numbers in partition, m, must be > 0") - k = min(k or n, n) - if k < 1: - raise ValueError("maximum value in partition, k, must be > 0") + if ( + n <= 0 or + m is not None and m < 1 or + k is not None and k < 1 or + m and k and m*k < n): + # the empty set is the only way to handle these inputs + # and returning {} to represent it is consistent with + # the counting convention, e.g. nT(0) == 1. + if size: + yield 0, {} + else: + yield {} + return + + if m is None: + m = n + else: + m = min(m, n) - if m*k < n: + if n == 0: + if size: + yield 1, {0: 1} + else: + yield {0: 1} return + k = min(k or n, n) + n, m, k = as_int(n), as_int(m), as_int(k) q, r = divmod(n, k) ms = {k: q} @@ -1445,6 +1460,140 @@ def partitions(n, m=None, k=None, size=False): yield ms +def ordered_partitions(n, m=None, sort=True): + """Generates ordered partitions of integer ``n``. + + Parameters + ========== + + ``m`` : integer (default gives partitions of all sizes) else only + those with size m. In addition, if ``m`` is not None then + partitions are generated *in place* (see examples). + ``sort`` : bool (default True) controls whether partitions are + returned in sorted order when ``m`` is not None; when False, + the partitions are returned as fast as possible with elements + sorted, but when m|n the partitions will not be in + ascending lexicographical order. + + Examples + ======== + + >>> from sympy.utilities.iterables import ordered_partitions + + All partitions of 5 in ascending lexicographical: + + >>> for p in ordered_partitions(5): + ... print(p) + [1, 1, 1, 1, 1] + [1, 1, 1, 2] + [1, 1, 3] + [1, 2, 2] + [1, 4] + [2, 3] + [5] + + Only partitions of 5 with two parts: + + >>> for p in ordered_partitions(5, 2): + ... print(p) + [1, 4] + [2, 3] + + When ``m`` is given, a given list objects will be used more than + once for speed reasons so you will not see the correct partitions + unless you make a copy of each as it is generated: + + >>> [p for p in ordered_partitions(7, 3)] + [[1, 1, 1], [1, 1, 1], [1, 1, 1], [2, 2, 2]] + >>> [list(p) for p in ordered_partitions(7, 3)] + [[1, 1, 5], [1, 2, 4], [1, 3, 3], [2, 2, 3]] + + When ``n`` is a multiple of ``m``, the elements are still sorted + but the partitions themselves will be *unordered* if sort is False; + the default is to return them in ascending lexicographical order. + + >>> for p in ordered_partitions(6, 2): + ... print(p) + [1, 5] + [2, 4] + [3, 3] + + But if speed is more important than ordering, sort can be set to + False: + + >>> for p in ordered_partitions(6, 2, sort=False): + ... print(p) + [1, 5] + [3, 3] + [2, 4] + + References + ========== + + .. [1] Generating Integer Partitions, [online], + Available: http://jeromekelleher.net/generating-integer-partitions.html + .. [2] Jerome Kelleher and Barry O'Sullivan, "Generating All + Partitions: A Comparison Of Two Encodings", [online], + Available: http://arxiv.org/pdf/0909.2331v2.pdf + """ + if n < 1 or m is not None and m < 1: + # the empty set is the only way to handle these inputs + # and returning {} to represent it is consistent with + # the counting convention, e.g. nT(0) == 1. + yield [] + return + + if m is None: + # The list `a`'s leading elements contain the partition in which + # y is the biggest element and x is either the same as y or the + # 2nd largest element; v and w are adjacent element indices + # to which x and y are being assigned, respectively. + a = [1]*n + y = -1 + v = n + while v > 0: + v -= 1 + x = a[v] + 1 + while y >= 2 * x: + a[v] = x + y -= x + v += 1 + w = v + 1 + while x <= y: + a[v] = x + a[w] = y + yield a[:w + 1] + x += 1 + y -= 1 + a[v] = x + y + y = a[v] - 1 + yield a[:w] + elif m == 1: + yield [n] + elif n == m: + yield [1]*n + else: + # recursively generate partitions of size m + for b in range(1, n//m + 1): + a = [b]*m + x = n - b*m + if not x: + if sort: + yield a + elif not sort and x <= m: + for ax in ordered_partitions(x, sort=False): + mi = len(ax) + a[-mi:] = [i + b for i in ax] + yield a + a[-mi:] = [b]*mi + else: + for mi in range(1, m): + for ax in ordered_partitions(x, mi, sort=True): + a[-mi:] = [i + b for i in ax] + yield a + a[-mi:] = [b]*mi + + def binary_partitions(n): """ Generates the binary partition of n. @@ -2084,3 +2233,38 @@ def partition(lista, bins): else: raise ValueError( 'ordered must be one of 00, 01, 10 or 11, not %s' % ordered) + + +def permute_signs(t): + """Return iterator in which the signs of non-zero elements + of t are permuted. + + Examples + ======== + + >>> from sympy.utilities.iterables import permute_signs + >>> list(permute_signs((0, 1, 2))) + [(0, 1, 2), (0, -1, 2), (0, 1, -2), (0, -1, -2)] + """ + for signs in cartes(*[(1, -1)]*(len(t) - t.count(0))): + signs = list(signs) + yield type(t)([i*signs.pop() if i else i for i in t]) + + +def signed_permutations(t): + """Return iterator in which the signs of non-zero elements + of t and the order of the elements are permuted. + + Examples + ======== + + >>> from sympy.utilities.iterables import signed_permutations + >>> list(signed_permutations((0, 1, 2))) + [(0, 1, 2), (0, -1, 2), (0, 1, -2), (0, -1, -2), (0, 2, 1), + (0, -2, 1), (0, 2, -1), (0, -2, -1), (1, 0, 2), (-1, 0, 2), + (1, 0, -2), (-1, 0, -2), (1, 2, 0), (-1, 2, 0), (1, -2, 0), + (-1, -2, 0), (2, 0, 1), (-2, 0, 1), (2, 0, -1), (-2, 0, -1), + (2, 1, 0), (-2, 1, 0), (2, -1, 0), (-2, -1, 0)] + """ + return (type(t)(i) for j in permutations(t) + for i in permute_signs(j))
diophantine misses trivial solution ```py In [6]: diophantine((3*(x**2 + y**2 + z**2) - 14*(x*y + y*z + z*x))) Out[6]: set([]) ``` See http://math.stackexchange.com/questions/1133888/hard-number-theory-problem. The only solution is `(0, 0, 0)`. Nothing in the docstring indicates that it should skip trivial solutions.
sympy/sympy
diff --git a/sympy/core/tests/test_expr.py b/sympy/core/tests/test_expr.py index 2f3247f44a..2ad2d33f5b 100644 --- a/sympy/core/tests/test_expr.py +++ b/sympy/core/tests/test_expr.py @@ -1305,6 +1305,7 @@ def test_as_coeff_Add(): assert (Integer(3) + x).as_coeff_Add() == (Integer(3), x) assert (Rational(3, 4) + x).as_coeff_Add() == (Rational(3, 4), x) assert (Float(5.0) + x).as_coeff_Add() == (Float(5.0), x) + assert (Float(5.0) + x).as_coeff_Add(rational=True) == (0, Float(5.0) + x) assert (Integer(3) + x + y).as_coeff_Add() == (Integer(3), x + y) assert (Rational(3, 4) + x + y).as_coeff_Add() == (Rational(3, 4), x + y) diff --git a/sympy/printing/tests/test_llvmjit.py b/sympy/printing/tests/test_llvmjit.py index 5bb9134855..59d9c63259 100644 --- a/sympy/printing/tests/test_llvmjit.py +++ b/sympy/printing/tests/test_llvmjit.py @@ -1,6 +1,5 @@ from sympy.external import import_module -from sympy.utilities.pytest import raises import ctypes @@ -134,92 +133,11 @@ def test_callback_alt_two(): assert isclose(jit_res, res) -def test_multiple_statements(): - # Match return from CSE - e = [[(b, 4.0*a)], [b + 5]] - f = g.llvm_callable([a], e) - b_val = e[0][0][1].subs({a: 1.5}) - res = float(e[1][0].subs({b: b_val}).evalf()) - jit_res = f(1.5) - assert isclose(jit_res, res) - - f_callback = g.llvm_callable([a], e, callback_type='scipy.integrate.test') - m = ctypes.c_int(1) - array_type = ctypes.c_double * 1 - array = array_type(1.5) - jit_callback_res = f_callback(m, array) - assert isclose(jit_callback_res, res) - - -def test_cse(): - e = a*a + b*b + sympy.exp(-a*a - b*b) - e2 = sympy.cse(e) - f = g.llvm_callable([a, b], e2) - res = float(e.subs({a: 2.3, b: 0.1}).evalf()) - jit_res = f(2.3, 0.1) - - assert isclose(jit_res, res) - - -def eval_cse(e, sub_dict): - tmp_dict = dict() - for tmp_name, tmp_expr in e[0]: - e2 = tmp_expr.subs(sub_dict) - e3 = e2.subs(tmp_dict) - tmp_dict[tmp_name] = e3 - return [e.subs(sub_dict).subs(tmp_dict) for e in e[1]] - - -def test_cse_multiple(): - e1 = a*a - e2 = a*a + b*b - e3 = sympy.cse([e1, e2]) - - raises(NotImplementedError, - lambda: g.llvm_callable([a, b], e3, callback_type='scipy.integrate')) - - f = g.llvm_callable([a, b], e3) - jit_res = f(0.1, 1.5) - assert len(jit_res) == 2 - res = eval_cse(e3, {a: 0.1, b: 1.5}) - assert isclose(res[0], jit_res[0]) - assert isclose(res[1], jit_res[1]) - - -def test_callback_cubature_multiple(): - e1 = a*a - e2 = a*a + b*b - e3 = sympy.cse([e1, e2, 4*e2]) - f = g.llvm_callable([a, b], e3, callback_type='cubature') - - # Number of input variables - ndim = 2 - # Number of output expression values - outdim = 3 - - m = ctypes.c_int(ndim) - fdim = ctypes.c_int(outdim) - array_type = ctypes.c_double * ndim - out_array_type = ctypes.c_double * outdim - inp = {a: 0.2, b: 1.5} - array = array_type(inp[a], inp[b]) - out_array = out_array_type() - jit_ret = f(m, array, None, fdim, out_array) - - assert jit_ret == 0 - - res = eval_cse(e3, inp) - - assert isclose(out_array[0], res[0]) - assert isclose(out_array[1], res[1]) - assert isclose(out_array[2], res[2]) - - -def test_symbol_not_found(): - e = a*a + b - raises(LookupError, lambda: g.llvm_callable([a], e)) - - def test_bad_callback(): e = a - raises(ValueError, lambda: g.llvm_callable([a], e, callback_type='bad_callback')) + try: + g.llvm_callable([a], e, callback_type='bad_callback') + except ValueError: + pass + else: + assert False, "Should raise exception with unknown callback" diff --git a/sympy/simplify/tests/test_simplify.py b/sympy/simplify/tests/test_simplify.py index 63f3a209c6..ddb48b6988 100644 --- a/sympy/simplify/tests/test_simplify.py +++ b/sympy/simplify/tests/test_simplify.py @@ -644,3 +644,7 @@ def test_clear_coefficients(): assert clear_coefficients(4*y*(6*x + 3) - 2, x) == (y*(2*x + 1), x/12 + S(1)/6) assert clear_coefficients(sqrt(2) - 2) == (sqrt(2), 2) assert clear_coefficients(4*sqrt(2) - 2) == (sqrt(2), S.Half) + assert clear_coefficients(S(3), x) == (0, x - 3) + assert clear_coefficients(S.Infinity, x) == (S.Infinity, x) + assert clear_coefficients(-S.Pi, x) == (S.Pi, -x) + assert clear_coefficients(2 - S.Pi/3, x) == (pi, -3*x + 6) diff --git a/sympy/solvers/tests/test_diophantine.py b/sympy/solvers/tests/test_diophantine.py index 3617763686..95ed44ad4b 100644 --- a/sympy/solvers/tests/test_diophantine.py +++ b/sympy/solvers/tests/test_diophantine.py @@ -1,27 +1,65 @@ +from sympy import (Add, factor_list, igcd, Matrix, Mul, S, simplify, + Symbol, symbols, Eq, pi, factorint, oo, powsimp) +from sympy.core.function import _mexpand +from sympy.core.compatibility import range +from sympy.functions.elementary.trigonometric import sin from sympy.solvers.diophantine import (descent, diop_bf_DN, diop_DN, diop_solve, diophantine, divisible, equivalent, find_DN, ldescent, length, - pairwise_prime, partition, power_representation, + reconstruct, partition, power_representation, prime_as_sum_of_two_squares, square_factor, sum_of_four_squares, - sum_of_three_squares, transformation_to_DN, transformation_to_normal) + sum_of_three_squares, transformation_to_DN, transformation_to_normal, + classify_diop, base_solution_linear, cornacchia, sqf_normal, + diop_ternary_quadratic_normal, _diop_ternary_quadratic_normal, + gaussian_reduce, holzer,diop_general_pythagorean, + _diop_general_sum_of_squares, _nint_or_floor, _odd, _even, + _remove_gcd, check_param, parametrize_ternary_quadratic, + diop_ternary_quadratic, diop_linear, diop_quadratic, + diop_general_sum_of_squares, sum_of_powers, sum_of_squares, + diop_general_sum_of_even_powers, _can_do_sum_of_squares) +from sympy.utilities import default_sort_key -from sympy import (Add, factor_list, igcd, Integer, Matrix, Mul, S, simplify, - Subs, Symbol, symbols) +from sympy.utilities.pytest import slow, raises, XFAIL + +a, b, c, d, p, q, x, y, z, w, t, u, v, X, Y, Z = symbols( + "a, b, c, d, p, q, x, y, z, w, t, u, v, X, Y, Z", integer=True) +t_0, t_1, t_2, t_3, t_4, t_5, t_6 = symbols("t_:7", integer=True) +m1, m2, m3 = symbols('m1:4', integer=True) +n1 = symbols('n1', integer=True) -from sympy.core.function import _mexpand -from sympy.core.compatibility import range -from sympy.functions.elementary.trigonometric import sin -from sympy.utilities.pytest import slow, raises -from sympy.utilities import default_sort_key -x, y, z, w, t, X, Y, Z = symbols("x, y, z, w, t, X, Y, Z", integer=True) -t_0, t_1, t_2, t_3, t_4, t_5, t_6 = symbols("t_0, t_1, t_2, t_3, t_4, t_5, t_6", integer=True) +def diop_simplify(eq): + return _mexpand(powsimp(_mexpand(eq))) + def test_input_format(): raises(TypeError, lambda: diophantine(sin(x))) + raises(TypeError, lambda: diophantine(3)) + raises(TypeError, lambda: diophantine(x/pi - 3)) + def test_univariate(): - assert diop_solve((x - 1)*(x - 2)**2) == set([(Integer(1),), (Integer(2),)]) - assert diop_solve((x - 1)*(x - 2)) == set([(Integer(1),), (Integer(2),)]) + assert diop_solve((x - 1)*(x - 2)**2) == set([(1,), (2,)]) + assert diop_solve((x - 1)*(x - 2)) == set([(1,), (2,)]) + + +def test_classify_diop(): + raises(TypeError, lambda: classify_diop(x**2/3 - 1)) + raises(ValueError, lambda: classify_diop(1)) + raises(NotImplementedError, lambda: classify_diop(w*x*y*z - 1)) + assert classify_diop(14*x**2 + 15*x - 42) == ( + [x], {1: -42, x: 15, x**2: 14}, 'univariate') + assert classify_diop(x*y + z) == ( + [x, y, z], {x*y: 1, z: 1}, 'inhomogeneous_ternary_quadratic') + assert classify_diop(x*y + z + w + x**2) == ( + [w, x, y, z], {x*y: 1, w: 1, x**2: 1, z: 1}, 'inhomogeneous_general_quadratic') + assert classify_diop(x*y + x*z + x**2 + 1) == ( + [x, y, z], {x*y: 1, x*z: 1, x**2: 1, 1: 1}, 'inhomogeneous_general_quadratic') + assert classify_diop(x*y + z + w + 42) == ( + [w, x, y, z], {x*y: 1, w: 1, 1: 42, z: 1}, 'inhomogeneous_general_quadratic') + assert classify_diop(x*y + z*w) == ( + [w, x, y, z], {x*y: 1, w*z: 1}, 'homogeneous_general_quadratic') + assert classify_diop(x*y**2 + 1) == ( + [x, y], {x*y**2: 1, 1: 1}, 'cubic_thue') def test_linear(): @@ -36,33 +74,37 @@ def test_linear(): assert diop_solve(y + x - 0) == (t_0, -t_0) assert diop_solve(0*x - y - 5) == (-5,) assert diop_solve(3*y + 2*x - 5) == (3*t_0 - 5, -2*t_0 + 5) - assert diop_solve(2*x - 3*y - 5) == (-3*t_0 - 5, -2*t_0 - 5) - assert diop_solve(-2*x - 3*y - 5) == (-3*t_0 + 5, 2*t_0 - 5) + assert diop_solve(2*x - 3*y - 5) == (3*t_0 - 5, 2*t_0 - 5) + assert diop_solve(-2*x - 3*y - 5) == (3*t_0 + 5, -2*t_0 - 5) assert diop_solve(7*x + 5*y) == (5*t_0, -7*t_0) assert diop_solve(2*x + 4*y) == (2*t_0, -t_0) assert diop_solve(4*x + 6*y - 4) == (3*t_0 - 2, -2*t_0 + 2) assert diop_solve(4*x + 6*y - 3) == (None, None) - assert diop_solve(0*x + 3*y - 4*z + 5) == (-4*t_0 + 5, -3*t_0 + 5) - assert diop_solve(4*x + 3*y - 4*z + 5) == (t_0, -4*t_1 + 5, t_0 - 3*t_1 + 5) + assert diop_solve(0*x + 3*y - 4*z + 5) == (4*t_0 + 5, 3*t_0 + 5) + assert diop_solve(4*x + 3*y - 4*z + 5) == (t_0, 8*t_0 + 4*t_1 + 5, 7*t_0 + 3*t_1 + 5) + assert diop_solve(4*x + 3*y - 4*z + 5, None) == (0, 5, 5) assert diop_solve(4*x + 2*y + 8*z - 5) == (None, None, None) - assert diop_solve(5*x + 7*y - 2*z - 6) == (t_0, -7*t_0 - 2*t_1 + 6, -22*t_0 - 7*t_1 + 18) + assert diop_solve(5*x + 7*y - 2*z - 6) == (t_0, -3*t_0 + 2*t_1 + 6, -8*t_0 + 7*t_1 + 18) assert diop_solve(3*x - 6*y + 12*z - 9) == (2*t_0 + 3, t_0 + 2*t_1, t_1) - assert diop_solve(6*w + 9*x + 20*y - z) == (t_0, t_1, -t_1 - t_2, 6*t_0 - 11*t_1 - 20*t_2) + assert diop_solve(6*w + 9*x + 20*y - z) == (t_0, t_1, t_1 + t_2, 6*t_0 + 29*t_1 + 20*t_2) + + # to ignore constant factors, use diophantine + raises(TypeError, lambda: diop_solve(x/2)) def test_quadratic_simple_hyperbolic_case(): # Simple Hyperbolic case: A = C = 0 and B != 0 assert diop_solve(3*x*y + 34*x - 12*y + 1) == \ - set([(-Integer(133), -Integer(11)), (Integer(5), -Integer(57))]) + set([(-133, -11), (5, -57)]) assert diop_solve(6*x*y + 2*x + 3*y + 1) == set([]) - assert diop_solve(-13*x*y + 2*x - 4*y - 54) == set([(Integer(27), Integer(0))]) - assert diop_solve(-27*x*y - 30*x - 12*y - 54) == set([(-Integer(14), -Integer(1))]) - assert diop_solve(2*x*y + 5*x + 56*y + 7) == set([(-Integer(161), -Integer(3)),\ - (-Integer(47),-Integer(6)), (-Integer(35), -Integer(12)), (-Integer(29), -Integer(69)),\ - (-Integer(27), Integer(64)), (-Integer(21), Integer(7)),(-Integer(9), Integer(1)),\ - (Integer(105), -Integer(2))]) + assert diop_solve(-13*x*y + 2*x - 4*y - 54) == set([(27, 0)]) + assert diop_solve(-27*x*y - 30*x - 12*y - 54) == set([(-14, -1)]) + assert diop_solve(2*x*y + 5*x + 56*y + 7) == set([(-161, -3),\ + (-47,-6), (-35, -12), (-29, -69),\ + (-27, 64), (-21, 7),(-9, 1),\ + (105, -2)]) assert diop_solve(6*x*y + 9*x + 2*y + 3) == set([]) - assert diop_solve(x*y + x + y + 1) == set([(-Integer(1), t), (t, -Integer(1))]) + assert diop_solve(x*y + x + y + 1) == set([(-1, t), (t, -1)]) assert diophantine(48*x*y) @@ -71,12 +113,12 @@ def test_quadratic_elliptical_case(): # Two test cases highlighted require lot of memory due to quadratic_congruence() method. # This above method should be replaced by Pernici's square_mod() method when his PR gets merged. - #assert diop_solve(42*x**2 + 8*x*y + 15*y**2 + 23*x + 17*y - 4915) == set([(-Integer(11), -Integer(1))]) + #assert diop_solve(42*x**2 + 8*x*y + 15*y**2 + 23*x + 17*y - 4915) == set([(-11, -1)]) assert diop_solve(4*x**2 + 3*y**2 + 5*x - 11*y + 12) == set([]) - assert diop_solve(x**2 + y**2 + 2*x + 2*y + 2) == set([(-Integer(1), -Integer(1))]) - #assert diop_solve(15*x**2 - 9*x*y + 14*y**2 - 23*x - 14*y - 4950) == set([(-Integer(15), Integer(6))]) + assert diop_solve(x**2 + y**2 + 2*x + 2*y + 2) == set([(-1, -1)]) + #assert diop_solve(15*x**2 - 9*x*y + 14*y**2 - 23*x - 14*y - 4950) == set([(-15, 6)]) assert diop_solve(10*x**2 + 12*x*y + 12*y**2 - 34) == \ - set([(Integer(1), -Integer(2)), (-Integer(1), -Integer(1)),(Integer(1), Integer(1)), (-Integer(1), Integer(2))]) + set([(1, -2), (-1, -1),(1, 1), (-1, 2)]) def test_quadratic_parabolic_case(): @@ -115,7 +157,10 @@ def test_quadratic_non_perfect_square(): def test_issue_9106(): - assert check_integrality(-48 - 2*x*(3*x - 1) + y*(3*y - 1)) + eq = -48 - 2*x*(3*x - 1) + y*(3*y - 1) + v = (x, y) + for sol in diophantine(eq): + assert not diop_simplify(eq.xreplace(dict(zip(v, sol)))) @slow @@ -151,8 +196,8 @@ def test_DN(): # When equation is x**2 + y**2 = N # Solutions are interchangeable - assert diop_DN(-1, 5) == [(2, 1)] - assert diop_DN(-1, 169) == [(12, 5), (0, 13)] + assert diop_DN(-1, 5) == [(1, 2)] + assert diop_DN(-1, 169) == [(5, 12), (0, 13)] # D > 0 and D is not a square @@ -184,8 +229,8 @@ def test_DN(): assert diop_DN(13, 27) == [(220, 61), (40, 11), (768, 213), (12, 3)] assert set(diop_DN(157, 12)) == \ - set([(Integer(13), Integer(1)), (Integer(10663), Integer(851)), (Integer(579160), Integer(46222)), \ - (Integer(483790960),Integer(38610722)), (Integer(26277068347), Integer(2097138361)), (Integer(21950079635497), Integer(1751807067011))]) + set([(13, 1), (10663, 851), (579160, 46222), \ + (483790960,38610722), (26277068347, 2097138361), (21950079635497, 1751807067011)]) assert diop_DN(13, 25) == [(3245, 900)] assert diop_DN(192, 18) == [] assert diop_DN(23, 13) == [(-6, 1), (6, 1)] @@ -199,6 +244,9 @@ def test_DN(): assert diop_DN(123, -23) == [(-10, 1), (10, 1)] + assert diop_DN(0, 0, t) == [(0, t)] + assert diop_DN(0, -1, t) == [] + def test_bf_pell(): assert diop_bf_DN(13, -4) == [(3, 1), (-3, 1), (36, 10)] @@ -208,15 +256,22 @@ def test_bf_pell(): assert diop_bf_DN(89, -8) == [(9, 1), (-9, 1)] assert diop_bf_DN(21257, -1) == [(13913102721304, 95427381109)] assert diop_bf_DN(340, -4) == [(756, 41)] + assert diop_bf_DN(-1, 0, t) == [(0, 0)] + assert diop_bf_DN(0, 0, t) == [(0, t)] + assert diop_bf_DN(4, 0, t) == [(2*t, t), (-2*t, t)] + assert diop_bf_DN(3, 0, t) == [(0, 0)] + assert diop_bf_DN(1, -2, t) == [] def test_length(): + assert length(2, 1, 0) == 1 assert length(-2, 4, 5) == 3 - assert length(-5, 4, 17) == 4 + assert length(-5, 4, 17) == 5 assert length(0, 4, 13) == 6 - assert length(-31, 8, 613) == 67 + assert length(-31, 8, 613) == 69 assert length(7, 13, 11) == 23 assert length(-40, 5, 23) == 4 + assert length(1, 6, 4) == 2 def is_pell_transformation_ok(eq): @@ -230,7 +285,7 @@ def is_pell_transformation_ok(eq): A, B = transformation_to_DN(eq) u = (A*Matrix([X, Y]) + B)[0] v = (A*Matrix([X, Y]) + B)[1] - simplified = _mexpand(Subs(eq, (x, y), (u, v)).doit()) + simplified = diop_simplify(eq.subs(zip((x, y), (u, v)))) coeff = dict([reversed(t.as_independent(*[X, Y])) for t in simplified.args]) @@ -238,12 +293,13 @@ def is_pell_transformation_ok(eq): if term in coeff.keys(): return False - for term in [X**2, Y**2, Integer(1)]: + for term in [X**2, Y**2, 1]: if term not in coeff.keys(): - coeff[term] = Integer(0) + coeff[term] = 0 if coeff[X**2] != 0: - return isinstance(S(coeff[Y**2])/coeff[X**2], Integer) and isinstance(S(coeff[Integer(1)])/coeff[X**2], Integer) + return divisible(coeff[Y**2], coeff[X**2]) and \ + divisible(coeff[1], coeff[X**2]) return True @@ -276,6 +332,7 @@ def test_ldescent(): for a, b in u: w, x, y = ldescent(a, b) assert a*x**2 + b*y**2 == w**2 + assert ldescent(-1, -1) is None def test_diop_ternary_quadratic_normal(): @@ -296,7 +353,7 @@ def test_diop_ternary_quadratic_normal(): def is_normal_transformation_ok(eq): A = transformation_to_normal(eq) X, Y, Z = A*Matrix([x, y, z]) - simplified = _mexpand(Subs(eq, (x, y, z), (X, Y, Z)).doit()) + simplified = diop_simplify(eq.subs(zip((x, y, z), (X, Y, Z)))) coeff = dict([reversed(t.as_independent(*[X, Y, Z])) for t in simplified.args]) for term in [X*Y, Y*Z, X*Z]: @@ -314,6 +371,9 @@ def test_transformation_to_normal(): assert is_normal_transformation_ok(x**2 + 23*x*y - 34*y*z + 12*x*z) assert is_normal_transformation_ok(z**2 + 34*x*y - 23*y*z + x*z) assert is_normal_transformation_ok(x**2 + y**2 + z**2 - x*y - y*z - x*z) + assert is_normal_transformation_ok(x**2 + 2*y*z + 3*z**2) + assert is_normal_transformation_ok(x*y + 2*x*z + 3*y*z) + assert is_normal_transformation_ok(2*x*z + 3*y*z) def test_diop_ternary_quadratic(): @@ -333,15 +393,23 @@ def test_diop_ternary_quadratic(): assert check_solutions(x**2 + 3*y**2 + z**2 - 13*x*y - 16*y*z + 12*x*z) assert check_solutions(x*y - 7*y*z + 13*x*z) - -def test_pairwise_prime(): - assert pairwise_prime(6, 10, 15) == (5, 3, 2) - assert pairwise_prime(2, 3, 5) == (2, 3, 5) - assert pairwise_prime(1, 4, 7) == (1, 4, 7) - assert pairwise_prime(4, 6, 5) == (1, 6, 5) - assert pairwise_prime(6, 10, -15) == (5, 3, -2) - assert pairwise_prime(-6, -10, -15) == (-5, -3, -2) - assert pairwise_prime(4, -6, -5) == (1, -6, -5) + assert diop_ternary_quadratic_normal(x**2 + y**2 + z**2) == (None, None, None) + assert diop_ternary_quadratic_normal(x**2 + y**2) is None + raises(ValueError, lambda: + _diop_ternary_quadratic_normal((x, y, z), + {x*y: 1, x**2: 2, y**2: 3, z**2: 0})) + eq = -2*x*y - 6*x*z + 7*y**2 - 3*y*z + 4*z**2 + assert diop_ternary_quadratic(eq) == (7, 2, 0) + assert diop_ternary_quadratic_normal(4*x**2 + 5*y**2 - z**2) == \ + (1, 0, 2) + assert diop_ternary_quadratic(x*y + 2*y*z) == \ + (-2, 0, n1) + eq = -5*x*y - 8*x*z - 3*y*z + 8*z**2 + assert parametrize_ternary_quadratic(eq) == \ + (64*p**2 - 24*p*q, -64*p*q + 64*q**2, 40*p*q) + # this cannot be tested with diophantine because it will + # factor into a product + assert diop_solve(x*y + 2*y*z) == (-4*p*q, -2*n1*p**2 + 2*p**2, 2*p*q) def test_square_factor(): @@ -355,6 +423,7 @@ def test_square_factor(): assert square_factor(52) == 2 assert square_factor(49) == 7 assert square_factor(392) == 14 + assert square_factor(factorint(-12)) == 2 def test_parametrize_ternary_quadratic(): @@ -385,6 +454,13 @@ def test_descent(): for a, b in u: w, x, y = descent(a, b) assert a*x**2 + b*y**2 == w**2 + # the docstring warns against bad input, so these are expected results + # - can't both be negative + raises(TypeError, lambda: descent(-1, -3)) + # A can't be zero unless B != 1 + raises(ZeroDivisionError, lambda: descent(0, 3)) + # supposed to be square-free + raises(TypeError, lambda: descent(4, 3)) def test_diophantine(): @@ -405,6 +481,54 @@ def test_diophantine(): assert check_solutions(y**2 - 7*x*y + 4*y*z) assert check_solutions(x**2 - 2*x + 1) + assert diophantine(x - y) == diophantine(Eq(x, y)) + assert diophantine(3*x*pi - 2*y*pi) == set([(2*t_0, 3*t_0)]) + assert diophantine(x**2 + y**2 + z**2 - 14) == set([(1, 2, 3)]) + assert diophantine(x**2 + 15*x/14 - 3) == set() + # test issue 11049 + eq = 92*x**2 - 99*y**2 - z**2 + coeff = eq.as_coefficients_dict() + assert _diop_ternary_quadratic_normal((x, y, z), coeff) == \ + (9, 7, 51) + assert diophantine(eq) == set([( + 891*p**2 + 9*q**2, -693*p**2 - 102*p*q + 7*q**2, + 5049*p**2 - 1386*p*q - 51*q**2)]) + eq = 2*x**2 + 2*y**2 - z**2 + coeff = eq.as_coefficients_dict() + assert _diop_ternary_quadratic_normal((x, y, z), coeff) == \ + (1, 1, 2) + assert diophantine(eq) == set([( + 2*p**2 - q**2, -2*p**2 + 4*p*q - q**2, + 4*p**2 - 4*p*q + 2*q**2)]) + eq = 411*x**2+57*y**2-221*z**2 + coeff = eq.as_coefficients_dict() + assert _diop_ternary_quadratic_normal((x, y, z), coeff) == \ + (2021, 2645, 3066) + assert diophantine(eq) == \ + set([(115197*p**2 - 446641*q**2, -150765*p**2 + 1355172*p*q - + 584545*q**2, 174762*p**2 - 301530*p*q + 677586*q**2)]) + eq = 573*x**2+267*y**2-984*z**2 + coeff = eq.as_coefficients_dict() + assert _diop_ternary_quadratic_normal((x, y, z), coeff) == \ + (49, 233, 127) + assert diophantine(eq) == \ + set([(4361*p**2 - 16072*q**2, -20737*p**2 + 83312*p*q - 76424*q**2, + 11303*p**2 - 41474*p*q + 41656*q**2)]) + # this produces factors during reconstruction + eq = x**2 + 3*y**2 - 12*z**2 + coeff = eq.as_coefficients_dict() + assert _diop_ternary_quadratic_normal((x, y, z), coeff) == \ + (0, 2, 1) + assert diophantine(eq) == \ + set([(24*p*q, 2*p**2 - 24*q**2, p**2 + 12*q**2)]) + # solvers have not been written for every type + raises(NotImplementedError, lambda: diophantine(x*y**2 + 1)) + + # rational expressions + assert diophantine(1/x) == set() + assert diophantine(1/x + 1/y - S.Half) + set([(6, 3), (-2, 1), (4, 4), (1, -2), (3, 6)]) + def test_general_pythagorean(): from sympy.abc import a, b, c, d, e @@ -418,51 +542,56 @@ def test_general_pythagorean(): assert check_solutions(16*a**2 - b**2 + 9*c**2 + d**2 + 25*e**2) -def test_diop_general_sum_of_squares(): - from sympy.abc import a, b, c, d, e, f, g, h, i - - assert check_solutions(a**2 + b**2 + c**2 - 5) - assert check_solutions(a**2 + b**2 + c**2 - 57) - assert check_solutions(a**2 + b**2 + c**2 - 349560695) - assert check_solutions(a**2 + b**2 + c**2 + d**2 - 304) - assert check_solutions(a**2 + b**2 + c**2 + d**2 - 23345) - assert check_solutions(a**2 + b**2 + c**2 + d**2 - 23345494) - assert check_solutions(a**2 + b**2 + c**2 + d**2 + e**2 - 1344545) - assert check_solutions(a**2 + b**2 + c**2 + d**2 + e**2 + f**2 - 6933949) - assert check_solutions(a**2 + b**2 + c**2 + d**2 + e**2 + f**2 + g**2 - 753934) - assert check_solutions(a**2 + b**2 + c**2 + d**2 + e**2 + f**2 + g**2 + h**2 - 5) - assert check_solutions(a**2 + b**2 + c**2 + d**2 + e**2 + f**2 + g**2 + h**2 + i**2 - 693940) - - -def test_partition(): - tests = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10] - - for test in tests: - f = partition(test) - while True: - try: - l = next(f) - except StopIteration: - break - - tests_k = [8, 10] - - for test in tests_k: - for k in range(8): - f = partition(test, k) - - while True: - try: - l = next(f) - assert len(l) == k - except StopIteration: - break +def test_diop_general_sum_of_squares_quick(): + for i in range(3, 10): + assert check_solutions(sum(i**2 for i in symbols(':%i' % i)) - i) + raises(ValueError, lambda: _diop_general_sum_of_squares((x, y), 2)) + assert _diop_general_sum_of_squares((x, y, z), -2) == set() + eq = x**2 + y**2 + z**2 - (1 + 4 + 9) + assert diop_general_sum_of_squares(eq) == \ + set([(1, 2, 3)]) + eq = u**2 + v**2 + x**2 + y**2 + z**2 - 1313 + assert len(diop_general_sum_of_squares(eq, 3)) == 3 + # issue 11016 + var = symbols(':5') + (symbols('6', negative=True),) + eq = Add(*[i**2 for i in var]) - 112 + assert diophantine(eq) == set( + [(0, 1, 1, 5, 6, -7), (1, 1, 1, 3, 6, -8), (2, 3, 3, 4, + 5, -7), (0, 1, 1, 1, 3, -10), (0, 0, 4, 4, 4, -8), (1, 2, 3, + 3, 5, -8), (0, 1, 2, 3, 7, -7), (2, 2, 4, 4, 6, -6), (1, 1, + 3, 4, 6, -7), (0, 2, 3, 3, 3, -9), (0, 0, 2, 2, 2, -10), (1, + 1, 2, 3, 4, -9), (0, 1, 1, 2, 5, -9), (0, 0, 2, 6, 6, -6), + (1, 3, 4, 5, 5, -6), (0, 2, 2, 2, 6, -8), (0, 3, 3, 3, 6, + -7), (0, 2, 3, 5, 5, -7), (0, 1, 5, 5, 5, -6)]) + # handle negated squares with signsimp + assert diophantine(12 - x**2 - y**2 - z**2) == set([(2, 2, 2)]) + # diophantine handles simplification, so classify_diop should + # not have to look for additional patterns that are removed + # by diophantine + eq = a**2 + b**2 + c**2 + d**2 - 4 + raises(NotImplementedError, lambda: classify_diop(-eq)) + + +def test_diop_partition(): + for n in [8, 10]: + for k in range(1, 8): + for p in partition(n, k): + assert len(p) == k + assert [p for p in partition(3, 5)] == [] + assert [list(p) for p in partition(3, 5, 1)] == [ + [0, 0, 0, 0, 3], [0, 0, 0, 1, 2], [0, 0, 1, 1, 1]] + assert list(partition(0)) == [()] + assert list(partition(1, 0)) == [()] + assert [list(i) for i in partition(3)] == [[1, 1, 1], [1, 2], [3]] def test_prime_as_sum_of_two_squares(): for i in [5, 13, 17, 29, 37, 41, 2341, 3557, 34841, 64601]: a, b = prime_as_sum_of_two_squares(i) assert a**2 + b**2 == i + assert prime_as_sum_of_two_squares(7) is None + ans = prime_as_sum_of_two_squares(800029) + assert ans == (450, 773) and type(ans[0]) is int def test_sum_of_three_squares(): @@ -471,17 +600,25 @@ def test_sum_of_three_squares(): a, b, c = sum_of_three_squares(i) assert a**2 + b**2 + c**2 == i - assert sum_of_three_squares(7) == (None, None, None) - assert sum_of_three_squares((4**5)*15) == (None, None, None) + assert sum_of_three_squares(7) is None + assert sum_of_three_squares((4**5)*15) is None + assert sum_of_three_squares(25) == (5, 0, 0) + assert sum_of_three_squares(4) == (0, 0, 2) def test_sum_of_four_squares(): from random import randint - for i in range(10): - n = randint(1, 100000000000000) - a, b, c, d = sum_of_four_squares(n) - assert a**2 + b**2 + c**2 + d**2 == n + # this should never fail + n = randint(1, 100000000000000) + assert sum(i**2 for i in sum_of_four_squares(n)) == n + + assert sum_of_four_squares(0) == (0, 0, 0, 0) + assert sum_of_four_squares(14) == (0, 1, 2, 3) + assert sum_of_four_squares(15) == (1, 1, 2, 3) + assert sum_of_four_squares(18) == (1, 2, 2, 3) + assert sum_of_four_squares(19) == (0, 1, 3, 3) + assert sum_of_four_squares(48) == (0, 4, 4, 4) def test_power_representation(): @@ -505,6 +642,29 @@ def test_power_representation(): except StopIteration: break + assert list(power_representation(20, 2, 4, True)) == \ + [(1, 1, 3, 3), (0, 0, 2, 4)] + raises(ValueError, lambda: list(power_representation(1.2, 2, 2))) + raises(ValueError, lambda: list(power_representation(2, 0, 2))) + raises(ValueError, lambda: list(power_representation(2, 2, 0))) + assert list(power_representation(-1, 2, 2)) == [] + assert list(power_representation(1, 1, 1)) == [(1,)] + assert list(power_representation(3, 2, 1)) == [] + assert list(power_representation(4, 2, 1)) == [(2,)] + assert list(power_representation(3**4, 4, 6, zeros=True)) == \ + [(1, 2, 2, 2, 2, 2), (0, 0, 0, 0, 0, 3)] + assert list(power_representation(3**4, 4, 5, zeros=False)) == [] + assert list(power_representation(-2, 3, 2)) == [(-1, -1)] + assert list(power_representation(-2, 4, 2)) == [] + assert list(power_representation(0, 3, 2, True)) == [(0, 0)] + assert list(power_representation(0, 3, 2, False)) == [] + # when we are dealing with squares, do feasibility checks + assert len(list(power_representation(4**10*(8*10 + 7), 2, 3))) == 0 + # there will be a recursion error if these aren't recognized + big = 2**30 + for i in [13, 10, 7, 5, 4, 2, 1]: + assert list(sum_of_powers(big, 2, big - i)) == [] + def test_assumptions(): """ @@ -529,44 +689,182 @@ def check_solutions(eq): """ s = diophantine(eq) - terms = factor_list(eq)[1] + factors = Mul.make_args(eq) var = list(eq.free_symbols) var.sort(key=default_sort_key) - okay = True - - while len(s) and okay: + while s: solution = s.pop() - - okay = False - - for term in terms: - subeq = term[0] - - if simplify(_mexpand(Subs(subeq, var, solution).doit())) == 0: - okay = True + for f in factors: + if diop_simplify(f.subs(zip(var, solution))) == 0: break + else: + return False + return True - return okay - - -def check_integrality(eq): - """ - Check that the solutions returned by diophantine() are integers. - This should be seldom needed except for general quadratic - equations which are solved with rational transformations. - """ - def _check_values(x): - """ Check a number of values. """ - for i in range(-4, 4): - if not isinstance(simplify(x.subs(t, i)), Integer): - return False - return True - - for soln in diophantine(eq, param=t): - for x in soln: - if not _check_values(x): - return False - return True +def test_diopcoverage(): + eq = (2*x + y + 1)**2 + assert diop_solve(eq) == set([(t_0, -2*t_0 - 1)]) + eq = 2*x**2 + 6*x*y + 12*x + 4*y**2 + 18*y + 18 + assert diop_solve(eq) == set([(t_0, -t_0 - 3), (2*t_0 - 3, -t_0)]) + assert diop_quadratic(x + y**2 - 3) == set([(-t**2 + 3, -t)]) + + assert diop_linear(x + y - 3) == (t_0, 3 - t_0) + + assert base_solution_linear(0, 1, 2, t=None) == (0, 0) + ans = (3*t - 1, -2*t + 1) + assert base_solution_linear(4, 8, 12, t) == ans + assert base_solution_linear(4, 8, 12, t=None) == tuple(_.subs(t, 0) for _ in ans) + + assert cornacchia(1, 1, 20) is None + assert cornacchia(1, 1, 5) == set([(1, 2)]) + assert cornacchia(1, 2, 17) == set([(3, 2)]) + + raises(ValueError, lambda: reconstruct(4, 20, 1)) + + assert gaussian_reduce(4, 1, 3) == (1, 1) + eq = -w**2 - x**2 - y**2 + z**2 + + assert diop_general_pythagorean(eq) == \ + diop_general_pythagorean(-eq) == \ + (m1**2 + m2**2 - m3**2, 2*m1*m3, + 2*m2*m3, m1**2 + m2**2 + m3**2) + + assert check_param(S(3) + x/3, S(4) + x/2, S(2), x) == (None, None) + assert check_param(S(3)/2, S(4) + x, S(2), x) == (None, None) + assert check_param(S(4) + x, S(3)/2, S(2), x) == (None, None) + + assert _nint_or_floor(16, 10) == 2 + assert _odd(1) == (not _even(1)) == True + assert _odd(0) == (not _even(0)) == False + assert _remove_gcd(2, 4, 6) == (1, 2, 3) + raises(TypeError, lambda: _remove_gcd((2, 4, 6))) + assert sqf_normal(2 * 3**2 * 5, 2 * 5 * 11, 2 * 7**2 * 11) == \ + (11, 1, 5) + + # it's ok if these pass some day when the solvers are implemented + raises(NotImplementedError, lambda: diophantine(x**2 + y**2 + x*y + 2*y*z - 12)) + raises(NotImplementedError, lambda: diophantine(x**3 + y**2)) + + +def test_holzer(): + # if the input is good, don't let it diverge in holzer() + # (but see test_fail_holzer below) + assert holzer(2, 7, 13, 4, 79, 23) == (2, 7, 13) + + # None in uv condition met; solution is not Holzer reduced + # so this will hopefully change but is here for coverage + assert holzer(2, 6, 2, 1, 1, 10) == (2, 6, 2) + + raises(ValueError, lambda: holzer(2, 7, 14, 4, 79, 23)) + + +@XFAIL +def test_fail_holzer(): + eq = lambda x, y, z: a*x**2 + b*y**2 - c*z**2 + a, b, c = 4, 79, 23 + x, y, z = xyz = 26, 1, 11 + X, Y, Z = ans = 2, 7, 13 + assert eq(*xyz) == 0 + assert eq(*ans) == 0 + assert max(a*x**2, b*y**2, c*z**2) <= a*b*c + assert max(a*X**2, b*Y**2, c*Z**2) <= a*b*c + h = holzer(x, y, z, a, b, c) + assert h == ans # it would be nice to get the smaller soln + + +def test_issue_9539(): + assert diophantine(6*w + 9*y + 20*x - z) == \ + set([(t_0, t_1, t_1 + t_2, 6*t_0 + 29*t_1 + 9*t_2)]) + + +def test_issue_8943(): + assert diophantine( + (3*(x**2 + y**2 + z**2) - 14*(x*y + y*z + z*x))) == \ + set([(0, 0, 0)]) + + +def test_diop_sum_of_even_powers(): + eq = x**4 + y**4 + z**4 - 2673 + assert diop_solve(eq) == set([(3, 6, 6), (2, 4, 7)]) + assert diop_general_sum_of_even_powers(eq, 2) == set( + [(3, 6, 6), (2, 4, 7)]) + raises(NotImplementedError, lambda: diop_general_sum_of_even_powers(-eq, 2)) + neg = symbols('neg', negative=True) + eq = x**4 + y**4 + neg**4 - 2673 + assert diop_general_sum_of_even_powers(eq) == set([(-2, 4, 7)]) + assert diophantine(x**4 + y**4 + 2) == set() + assert diop_general_sum_of_even_powers(x**4 + y**4 - 2, limit=0) == set() + + +def test_sum_of_squares_powers(): + tru = set([ + (0, 0, 1, 1, 11), (0, 0, 5, 7, 7), (0, 1, 3, 7, 8), (0, 1, 4, 5, 9), + (0, 3, 4, 7, 7), (0, 3, 5, 5, 8), (1, 1, 2, 6, 9), (1, 1, 6, 6, 7), + (1, 2, 3, 3, 10), (1, 3, 4, 4, 9), (1, 5, 5, 6, 6), (2, 2, 3, 5, 9), + (2, 3, 5, 6, 7), (3, 3, 4, 5, 8)]) + eq = u**2 + v**2 + x**2 + y**2 + z**2 - 123 + ans = diop_general_sum_of_squares(eq, oo) # allow oo to be used + assert len(ans) == 14 + + raises(ValueError, lambda: list(sum_of_squares(10, -1))) + assert list(sum_of_squares(-10, 2)) == [] + assert list(sum_of_squares(2, 3)) == [] + assert list(sum_of_squares(0, 3, True)) == [(0, 0, 0)] + assert list(sum_of_squares(0, 3)) == [] + assert list(sum_of_squares(4, 1)) == [(2,)] + assert list(sum_of_squares(5, 1)) == [] + assert list(sum_of_squares(50, 2)) == [(1, 7), (5, 5)] + assert list(sum_of_squares(11, 5, True)) == [ + (1, 1, 1, 2, 2), (0, 0, 1, 1, 3)] + assert list(sum_of_squares(8, 8)) == [(1, 1, 1, 1, 1, 1, 1, 1)] + + assert [len(list(sum_of_squares(i, 5, True))) for i in range(30)] == [ + 1, 1, 1, 1, 2, + 2, 1, 1, 2, 2, + 2, 2, 2, 3, 2, + 1, 3, 3, 3, 3, + 4, 3, 3, 2, 2, + 4, 4, 4, 4, 5] + assert [len(list(sum_of_squares(i, 5))) for i in range(30)] == [ + 0, 0, 0, 0, 0, + 1, 0, 0, 1, 0, + 0, 1, 0, 1, 1, + 0, 1, 1, 0, 1, + 2, 1, 1, 1, 1, + 1, 1, 1, 1, 3] + for i in range(30): + s1 = set(sum_of_squares(i, 5, True)) + assert not s1 or all(sum(j**2 for j in t) == i for t in s1) + s2 = set(sum_of_squares(i, 5)) + assert all(sum(j**2 for j in t) == i for t in s2) + + raises(ValueError, lambda: list(sum_of_powers(2, -1, 1))) + raises(ValueError, lambda: list(sum_of_powers(2, 1, -1))) + assert list(sum_of_powers(-2, 3, 2)) == [(-1, -1)] + assert list(sum_of_powers(-2, 4, 2)) == [] + assert list(sum_of_powers(2, 1, 1)) == [(2,)] + assert list(sum_of_powers(2, 1, 3, True)) == [(0, 0, 2), (0, 1, 1)] + assert list(sum_of_powers(5, 1, 2, True)) == [(0, 5), (1, 4), (2, 3)] + assert list(sum_of_powers(6, 2, 2)) == [] + assert list(sum_of_powers(3**5, 3, 1)) == [] + assert list(sum_of_powers(3**6, 3, 1)) == [(9,)] and (9**3 == 3**6) + assert list(sum_of_powers(2**1000, 5, 2)) == [] + + +def test__can_do_sum_of_squares(): + assert _can_do_sum_of_squares(3, -1) is False + assert _can_do_sum_of_squares(-3, 1) is False + assert _can_do_sum_of_squares(0, 1) + assert _can_do_sum_of_squares(4, 1) + assert _can_do_sum_of_squares(1, 2) + assert _can_do_sum_of_squares(2, 2) + assert _can_do_sum_of_squares(3, 2) is False + + +def test_issue_9538(): + eq = x - 3*y + 2 + assert diophantine(eq, syms=[y,x]) == set([(t_0, 3*t_0 - 2)]) + raises(TypeError, lambda: diophantine(eq, syms=set([y,x]))) diff --git a/sympy/tensor/array/tests/test_arrayop.py b/sympy/tensor/array/tests/test_arrayop.py index 7992120eb3..bd63fb881f 100644 --- a/sympy/tensor/array/tests/test_arrayop.py +++ b/sympy/tensor/array/tests/test_arrayop.py @@ -65,22 +65,3 @@ def test_derivative_by_array(): assert derive_by_array([[x, y], [z, t]], [x, y]) == Array([[[1, 0], [0, 0]], [[0, 1], [0, 0]]]) assert derive_by_array([[x, y], [z, t]], [[x, y], [z, t]]) == Array([[[[1, 0], [0, 0]], [[0, 1], [0, 0]]], [[[0, 0], [1, 0]], [[0, 0], [0, 1]]]]) - - -def test_issue_emerged_while_discussing_10972(): - ua = Array([-1,0]) - Fa = Array([[0, 1], [-1, 0]]) - po = tensorproduct(Fa, ua, Fa, ua) - assert tensorcontraction(po, (1, 2), (4, 5)) == Array([[0, 0], [0, 1]]) - - sa = symbols('a0:144') - po = Array(sa, [2, 2, 3, 3, 2, 2]) - assert tensorcontraction(po, (0, 1), (2, 3), (4, 5)) == sa[0] + sa[108] + sa[111] + sa[124] + sa[127] + sa[140] + sa[143] + sa[16] + sa[19] + sa[3] + sa[32] + sa[35] - assert tensorcontraction(po, (0, 1, 4, 5), (2, 3)) == sa[0] + sa[111] + sa[127] + sa[143] + sa[16] + sa[32] - assert tensorcontraction(po, (0, 1), (4, 5)) == Array([[sa[0] + sa[108] + sa[111] + sa[3], sa[112] + sa[115] + sa[4] + sa[7], - sa[11] + sa[116] + sa[119] + sa[8]], [sa[12] + sa[120] + sa[123] + sa[15], - sa[124] + sa[127] + sa[16] + sa[19], sa[128] + sa[131] + sa[20] + sa[23]], - [sa[132] + sa[135] + sa[24] + sa[27], sa[136] + sa[139] + sa[28] + sa[31], - sa[140] + sa[143] + sa[32] + sa[35]]]) - assert tensorcontraction(po, (0, 1), (2, 3)) == Array([[sa[0] + sa[108] + sa[124] + sa[140] + sa[16] + sa[32], sa[1] + sa[109] + sa[125] + sa[141] + sa[17] + sa[33]], - [sa[110] + sa[126] + sa[142] + sa[18] + sa[2] + sa[34], sa[111] + sa[127] + sa[143] + sa[19] + sa[3] + sa[35]]]) diff --git a/sympy/utilities/tests/test_iterables.py b/sympy/utilities/tests/test_iterables.py index 4b0a944abb..ddb406c5e6 100644 --- a/sympy/utilities/tests/test_iterables.py +++ b/sympy/utilities/tests/test_iterables.py @@ -15,7 +15,7 @@ multiset_permutations, necklaces, numbered_symbols, ordered, partitions, permutations, postfixes, postorder_traversal, prefixes, reshape, rotate_left, rotate_right, runs, sift, subsets, take, topological_sort, - unflatten, uniq, variations) + unflatten, uniq, variations, ordered_partitions) from sympy.utilities.enumerative import ( factoring_visitor, multiset_partitions_taocp ) @@ -390,6 +390,15 @@ def test(): def test_partitions(): + ans = [[{}], [(0, {})]] + for i in range(2): + assert list(partitions(0, size=i)) == ans[i] + assert list(partitions(1, 0, size=i)) == ans[i] + assert list(partitions(6, 2, 2, size=i)) == ans[i] + assert list(partitions(6, 2, None, size=i)) != ans[i] + assert list(partitions(6, None, 2, size=i)) != ans[i] + assert list(partitions(6, 2, 0, size=i)) == ans[i] + assert [p.copy() for p in partitions(6, k=2)] == [ {2: 3}, {1: 2, 2: 2}, {1: 4, 2: 1}, {1: 6}] @@ -397,8 +406,6 @@ def test_partitions(): {3: 2}, {1: 1, 2: 1, 3: 1}, {1: 3, 3: 1}, {2: 3}, {1: 2, 2: 2}, {1: 4, 2: 1}, {1: 6}] - assert [p.copy() for p in partitions(6, k=2, m=2)] == [] - assert [p.copy() for p in partitions(8, k=4, m=3)] == [ {4: 2}, {1: 1, 3: 1, 4: 1}, {2: 2, 4: 1}, {2: 1, 3: 2}] == [ i.copy() for i in partitions(8, k=4, m=3) if all(k <= 4 for k in i) @@ -411,7 +418,6 @@ def test_partitions(): {1: 1, 3: 1}, {2: 2}, {1: 2, 2: 1}, {1: 4}] == [ i.copy() for i in partitions(4) if all(k <= 3 for k in i)] - raises(ValueError, lambda: list(partitions(3, 0))) # Consistency check on output of _partitions and RGS_unrank. # This provides a sanity test on both routines. Also verifies that @@ -422,7 +428,7 @@ def test_partitions(): i = 0 for m, q in _set_partitions(n): assert q == RGS_unrank(i, n) - i = i+1 + i += 1 assert i == RGS_enum(n) def test_binary_partitions(): @@ -707,3 +713,16 @@ def test__partition(): ['b', 'e'], ['a', 'c'], ['d']] output = (3, [1, 0, 1, 2, 0]) assert _partition('abcde', *output) == [['b', 'e'], ['a', 'c'], ['d']] + + +def test_ordered_partitions(): + from sympy.functions.combinatorial.numbers import nT + f = ordered_partitions + assert list(f(0, 1)) == [[]] + assert list(f(1, 0)) == [[]] + for i in range(1, 7): + for j in [None] + list(range(1, i)): + assert ( + sum(1 for p in f(i, j, 1)) == + sum(1 for p in f(i, j, 0)) == + nT(i, j))
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_hyperlinks", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 3, "issue_text_score": 2, "test_score": -1 }, "num_modified_files": 13 }
1.0
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "mpmath>=0.19", "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.7", "reqs_path": [ "requirements/base.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
certifi @ file:///croot/certifi_1671487769961/work/certifi exceptiongroup==1.2.2 importlib-metadata==6.7.0 iniconfig==2.0.0 mpmath==1.3.0 packaging==24.0 pluggy==1.2.0 pytest==7.4.4 -e git+https://github.com/sympy/sympy.git@70f5f1ff0a1d6a204544845680bb3ce780fa17c7#egg=sympy tomli==2.0.1 typing_extensions==4.7.1 zipp==3.15.0
name: sympy channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2022.12.7=py37h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=22.3.1=py37h06a4308_0 - python=3.7.16=h7a1cb2a_0 - readline=8.2=h5eee18b_0 - setuptools=65.6.3=py37h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.38.4=py37h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - exceptiongroup==1.2.2 - importlib-metadata==6.7.0 - iniconfig==2.0.0 - mpmath==1.3.0 - packaging==24.0 - pluggy==1.2.0 - pytest==7.4.4 - tomli==2.0.1 - typing-extensions==4.7.1 - zipp==3.15.0 prefix: /opt/conda/envs/sympy
[ "sympy/core/tests/test_expr.py::test_basic", "sympy/core/tests/test_expr.py::test_ibasic", "sympy/core/tests/test_expr.py::test_relational", "sympy/core/tests/test_expr.py::test_relational_assumptions", "sympy/core/tests/test_expr.py::test_relational_noncommutative", "sympy/core/tests/test_expr.py::test_basic_nostr", "sympy/core/tests/test_expr.py::test_series_expansion_for_uniform_order", "sympy/core/tests/test_expr.py::test_leadterm", "sympy/core/tests/test_expr.py::test_as_leading_term", "sympy/core/tests/test_expr.py::test_leadterm2", "sympy/core/tests/test_expr.py::test_leadterm3", "sympy/core/tests/test_expr.py::test_as_leading_term2", "sympy/core/tests/test_expr.py::test_as_leading_term3", "sympy/core/tests/test_expr.py::test_as_leading_term4", "sympy/core/tests/test_expr.py::test_as_leading_term_stub", "sympy/core/tests/test_expr.py::test_atoms", "sympy/core/tests/test_expr.py::test_is_polynomial", "sympy/core/tests/test_expr.py::test_is_rational_function", "sympy/core/tests/test_expr.py::test_is_algebraic_expr", "sympy/core/tests/test_expr.py::test_SAGE1", "sympy/core/tests/test_expr.py::test_SAGE2", "sympy/core/tests/test_expr.py::test_SAGE3", "sympy/core/tests/test_expr.py::test_len", "sympy/core/tests/test_expr.py::test_doit", "sympy/core/tests/test_expr.py::test_attribute_error", "sympy/core/tests/test_expr.py::test_args", "sympy/core/tests/test_expr.py::test_noncommutative_expand_issue_3757", "sympy/core/tests/test_expr.py::test_as_numer_denom", "sympy/core/tests/test_expr.py::test_as_independent", "sympy/core/tests/test_expr.py::test_replace", "sympy/core/tests/test_expr.py::test_find", "sympy/core/tests/test_expr.py::test_count", "sympy/core/tests/test_expr.py::test_has_basics", "sympy/core/tests/test_expr.py::test_has_multiple", "sympy/core/tests/test_expr.py::test_has_piecewise", "sympy/core/tests/test_expr.py::test_has_iterative", "sympy/core/tests/test_expr.py::test_has_integrals", "sympy/core/tests/test_expr.py::test_has_tuple", "sympy/core/tests/test_expr.py::test_has_units", "sympy/core/tests/test_expr.py::test_has_polys", "sympy/core/tests/test_expr.py::test_has_physics", "sympy/core/tests/test_expr.py::test_as_poly_as_expr", "sympy/core/tests/test_expr.py::test_nonzero", "sympy/core/tests/test_expr.py::test_is_number", "sympy/core/tests/test_expr.py::test_as_coeff_add", "sympy/core/tests/test_expr.py::test_as_coeff_mul", "sympy/core/tests/test_expr.py::test_as_coeff_exponent", "sympy/core/tests/test_expr.py::test_extractions", "sympy/core/tests/test_expr.py::test_nan_extractions", "sympy/core/tests/test_expr.py::test_coeff", "sympy/core/tests/test_expr.py::test_coeff2", "sympy/core/tests/test_expr.py::test_coeff2_0", "sympy/core/tests/test_expr.py::test_coeff_expand", "sympy/core/tests/test_expr.py::test_integrate", "sympy/core/tests/test_expr.py::test_as_base_exp", "sympy/core/tests/test_expr.py::test_issue_4963", "sympy/core/tests/test_expr.py::test_action_verbs", "sympy/core/tests/test_expr.py::test_as_powers_dict", "sympy/core/tests/test_expr.py::test_as_coefficients_dict", "sympy/core/tests/test_expr.py::test_args_cnc", "sympy/core/tests/test_expr.py::test_new_rawargs", "sympy/core/tests/test_expr.py::test_issue_5226", "sympy/core/tests/test_expr.py::test_free_symbols", "sympy/core/tests/test_expr.py::test_issue_5300", "sympy/core/tests/test_expr.py::test_as_coeff_Mul", "sympy/core/tests/test_expr.py::test_as_coeff_Add", "sympy/core/tests/test_expr.py::test_expr_sorting", "sympy/core/tests/test_expr.py::test_as_ordered_factors", "sympy/core/tests/test_expr.py::test_as_ordered_terms", "sympy/core/tests/test_expr.py::test_sort_key_atomic_expr", "sympy/core/tests/test_expr.py::test_issue_4199", "sympy/core/tests/test_expr.py::test_eval_interval_zoo", "sympy/core/tests/test_expr.py::test_primitive", "sympy/core/tests/test_expr.py::test_issue_5843", "sympy/core/tests/test_expr.py::test_is_constant", "sympy/core/tests/test_expr.py::test_equals", "sympy/core/tests/test_expr.py::test_random", "sympy/core/tests/test_expr.py::test_round", "sympy/core/tests/test_expr.py::test_round_exception_nostr", "sympy/core/tests/test_expr.py::test_extract_branch_factor", "sympy/core/tests/test_expr.py::test_identity_removal", "sympy/core/tests/test_expr.py::test_float_0", "sympy/core/tests/test_expr.py::test_issue_6325", "sympy/core/tests/test_expr.py::test_issue_7426", "sympy/core/tests/test_expr.py::test_issue_10161", "sympy/core/tests/test_expr.py::test_issue_10755", "sympy/simplify/tests/test_simplify.py::test_issue_7263", "sympy/simplify/tests/test_simplify.py::test_simplify_expr", "sympy/simplify/tests/test_simplify.py::test_issue_3557", "sympy/simplify/tests/test_simplify.py::test_simplify_other", "sympy/simplify/tests/test_simplify.py::test_simplify_complex", "sympy/simplify/tests/test_simplify.py::test_simplify_ratio", "sympy/simplify/tests/test_simplify.py::test_simplify_measure", "sympy/simplify/tests/test_simplify.py::test_simplify_issue_1308", "sympy/simplify/tests/test_simplify.py::test_issue_5652", "sympy/simplify/tests/test_simplify.py::test_simplify_fail1", "sympy/simplify/tests/test_simplify.py::test_nthroot", "sympy/simplify/tests/test_simplify.py::test_nthroot1", "sympy/simplify/tests/test_simplify.py::test_separatevars", "sympy/simplify/tests/test_simplify.py::test_separatevars_advanced_factor", "sympy/simplify/tests/test_simplify.py::test_hypersimp", "sympy/simplify/tests/test_simplify.py::test_nsimplify", "sympy/simplify/tests/test_simplify.py::test_issue_9448", "sympy/simplify/tests/test_simplify.py::test_extract_minus_sign", "sympy/simplify/tests/test_simplify.py::test_diff", "sympy/simplify/tests/test_simplify.py::test_logcombine_1", "sympy/simplify/tests/test_simplify.py::test_logcombine_complex_coeff", "sympy/simplify/tests/test_simplify.py::test_posify", "sympy/simplify/tests/test_simplify.py::test_issue_4194", "sympy/simplify/tests/test_simplify.py::test_as_content_primitive", "sympy/simplify/tests/test_simplify.py::test_signsimp", "sympy/simplify/tests/test_simplify.py::test_besselsimp", "sympy/simplify/tests/test_simplify.py::test_Piecewise", "sympy/simplify/tests/test_simplify.py::test_polymorphism", "sympy/simplify/tests/test_simplify.py::test_issue_from_PR1599", "sympy/simplify/tests/test_simplify.py::test_issue_6811", "sympy/simplify/tests/test_simplify.py::test_issue_6920", "sympy/simplify/tests/test_simplify.py::test_issue_7001", "sympy/simplify/tests/test_simplify.py::test_inequality_no_auto_simplify", "sympy/simplify/tests/test_simplify.py::test_issue_9398", "sympy/simplify/tests/test_simplify.py::test_issue_9324_simplify", "sympy/simplify/tests/test_simplify.py::test_simplify_function_inverse", "sympy/simplify/tests/test_simplify.py::test_clear_coefficients", "sympy/solvers/tests/test_diophantine.py::test_input_format", "sympy/solvers/tests/test_diophantine.py::test_univariate", "sympy/solvers/tests/test_diophantine.py::test_classify_diop", "sympy/solvers/tests/test_diophantine.py::test_linear", "sympy/solvers/tests/test_diophantine.py::test_quadratic_simple_hyperbolic_case", "sympy/solvers/tests/test_diophantine.py::test_quadratic_elliptical_case", "sympy/solvers/tests/test_diophantine.py::test_quadratic_parabolic_case", "sympy/solvers/tests/test_diophantine.py::test_quadratic_perfect_square", "sympy/solvers/tests/test_diophantine.py::test_bf_pell", "sympy/solvers/tests/test_diophantine.py::test_length", "sympy/solvers/tests/test_diophantine.py::test_transformation_to_pell", "sympy/solvers/tests/test_diophantine.py::test_find_DN", "sympy/solvers/tests/test_diophantine.py::test_ldescent", "sympy/solvers/tests/test_diophantine.py::test_transformation_to_normal", "sympy/solvers/tests/test_diophantine.py::test_square_factor", "sympy/solvers/tests/test_diophantine.py::test_parametrize_ternary_quadratic", "sympy/solvers/tests/test_diophantine.py::test_no_square_ternary_quadratic", "sympy/solvers/tests/test_diophantine.py::test_diophantine", "sympy/solvers/tests/test_diophantine.py::test_general_pythagorean", "sympy/solvers/tests/test_diophantine.py::test_diop_general_sum_of_squares_quick", "sympy/solvers/tests/test_diophantine.py::test_diop_partition", "sympy/solvers/tests/test_diophantine.py::test_prime_as_sum_of_two_squares", "sympy/solvers/tests/test_diophantine.py::test_sum_of_three_squares", "sympy/solvers/tests/test_diophantine.py::test_sum_of_four_squares", "sympy/solvers/tests/test_diophantine.py::test_power_representation", "sympy/solvers/tests/test_diophantine.py::test_assumptions", "sympy/solvers/tests/test_diophantine.py::test_holzer", "sympy/solvers/tests/test_diophantine.py::test_issue_9539", "sympy/solvers/tests/test_diophantine.py::test_diop_sum_of_even_powers", "sympy/solvers/tests/test_diophantine.py::test_sum_of_squares_powers", "sympy/solvers/tests/test_diophantine.py::test__can_do_sum_of_squares", "sympy/solvers/tests/test_diophantine.py::test_issue_9538", "sympy/tensor/array/tests/test_arrayop.py::test_tensorproduct", "sympy/tensor/array/tests/test_arrayop.py::test_tensorcontraction", "sympy/tensor/array/tests/test_arrayop.py::test_derivative_by_array", "sympy/utilities/tests/test_iterables.py::test_postorder_traversal", "sympy/utilities/tests/test_iterables.py::test_flatten", "sympy/utilities/tests/test_iterables.py::test_group", "sympy/utilities/tests/test_iterables.py::test_subsets", "sympy/utilities/tests/test_iterables.py::test_variations", "sympy/utilities/tests/test_iterables.py::test_cartes", "sympy/utilities/tests/test_iterables.py::test_filter_symbols", "sympy/utilities/tests/test_iterables.py::test_numbered_symbols", "sympy/utilities/tests/test_iterables.py::test_sift", "sympy/utilities/tests/test_iterables.py::test_take", "sympy/utilities/tests/test_iterables.py::test_dict_merge", "sympy/utilities/tests/test_iterables.py::test_prefixes", "sympy/utilities/tests/test_iterables.py::test_postfixes", "sympy/utilities/tests/test_iterables.py::test_topological_sort", "sympy/utilities/tests/test_iterables.py::test_rotate", "sympy/utilities/tests/test_iterables.py::test_multiset_partitions", "sympy/utilities/tests/test_iterables.py::test_multiset_combinations", "sympy/utilities/tests/test_iterables.py::test_multiset_permutations", "sympy/utilities/tests/test_iterables.py::test_partitions", "sympy/utilities/tests/test_iterables.py::test_binary_partitions", "sympy/utilities/tests/test_iterables.py::test_bell_perm", "sympy/utilities/tests/test_iterables.py::test_involutions", "sympy/utilities/tests/test_iterables.py::test_derangements", "sympy/utilities/tests/test_iterables.py::test_necklaces", "sympy/utilities/tests/test_iterables.py::test_bracelets", "sympy/utilities/tests/test_iterables.py::test_generate_oriented_forest", "sympy/utilities/tests/test_iterables.py::test_unflatten", "sympy/utilities/tests/test_iterables.py::test_common_prefix_suffix", "sympy/utilities/tests/test_iterables.py::test_minlex", "sympy/utilities/tests/test_iterables.py::test_ordered", "sympy/utilities/tests/test_iterables.py::test_runs", "sympy/utilities/tests/test_iterables.py::test_reshape", "sympy/utilities/tests/test_iterables.py::test_uniq", "sympy/utilities/tests/test_iterables.py::test_kbins", "sympy/utilities/tests/test_iterables.py::test_has_dups", "sympy/utilities/tests/test_iterables.py::test__partition", "sympy/utilities/tests/test_iterables.py::test_ordered_partitions" ]
[ "sympy/core/tests/test_expr.py::test_call_2", "sympy/core/tests/test_expr.py::test_float_0_fail", "sympy/simplify/tests/test_simplify.py::test_factorial_simplify", "sympy/simplify/tests/test_simplify.py::test_simplify_float_vs_integer", "sympy/solvers/tests/test_diophantine.py::test_quadratic_non_perfect_square", "sympy/solvers/tests/test_diophantine.py::test_issue_9106", "sympy/solvers/tests/test_diophantine.py::test_quadratic_non_perfect_slow", "sympy/solvers/tests/test_diophantine.py::test_DN", "sympy/solvers/tests/test_diophantine.py::test_diop_ternary_quadratic_normal", "sympy/solvers/tests/test_diophantine.py::test_diop_ternary_quadratic", "sympy/solvers/tests/test_diophantine.py::test_descent", "sympy/solvers/tests/test_diophantine.py::test_diopcoverage", "sympy/solvers/tests/test_diophantine.py::test_fail_holzer", "sympy/solvers/tests/test_diophantine.py::test_issue_8943" ]
[]
[]
BSD
499
requests__requests-kerberos-69
e2c671733a491be5f32fb8d1c938f8edec4eda44
2016-04-12 03:13:40
e2c671733a491be5f32fb8d1c938f8edec4eda44
Lukasa: This looks reasonable enough to me, modulo the outstanding work on the exception. Ping me again when you've dealt with that? nitzmahone: Right on, will do. Thanks for the look... nitzmahone: @Lukasa - ready for review. Lukasa: Fab, thanks! Left multiple notes inline, but this is good progress. =D nitzmahone: @Lukasa - thanks for looking at another round, let's see if this is more to your liking... Refactored generate_request_header a bit- now it raises with detail instead of returning null on failure (caller can decide what to do with it). Used base EnvironmentException as a temporary "surrogate" error we can raise for the failure code cases since it has an errno (the handler translates those back into KerberosExchangeError). Also restructured a bit to eliminate repetition that gets worse with error raising. Not sure how we'd want to inject detail into the response in the non-preemptive case, so left that one as-is. Lukasa: Cool, that's some definite progress! I've made some more notes. =) nitzmahone: @Lukasa - feedback implemented, ready for another look (hopefully sans dumb late-night issues this time) Lukasa: Ok, I think this looks good to me. :sparkles: @rbcarson, do you want to take a look? nitzmahone: @Lukasa - I have one other small unrelated change that Ansible needs (passing a non-default principal to the auth plugin init), but it'd be a conflict with this until it's merged (or I can include in this PR). Assuming those get merged, what's the chances of getting releases cut for this and requests_ntlm? Lukasa: @nitzmahone I'm happy to do new releases. Feel free to open a new PR once we've got this one merged. =) rbcarson: @nitzmahone Is there an easy way to reproduce the problem solved by this PR? I have access to Windows Server, SharePoint, etc.
diff --git a/README.rst b/README.rst index ddf24af..83183e2 100644 --- a/README.rst +++ b/README.rst @@ -62,6 +62,19 @@ authentication, you can do that as well: >>> r = requests.get("http://example.org", auth=kerberos_auth) ... +Preemptive Authentication +------------------------- + +``HTTPKerberosAuth`` can be forced to preemptively initiate the Kerberos GSS exchange and present a Kerberos ticket on the initial request (and all subsequent). By default, authentication only occurs after a ``401 Unauthorized`` response containing a Kerberos or Negotiate challenge is received from the origin server. This can cause mutual authentication failures for hosts that use a persistent connection (eg, Windows/WinRM), as no Kerberos challenges are sent after the initial auth handshake. This behavior can be altered by setting ``force_preemptive=True``: + +.. code-block:: pycon + + >>> import requests + >>> from requests_kerberos import HTTPKerberosAuth, REQUIRED + >>> kerberos_auth = HTTPKerberosAuth(mutual_authentication=REQUIRED, force_preemptive=True) + >>> r = requests.get("https://windows.example.org/wsman", auth=kerberos_auth) + ... + Logging ------- diff --git a/requests_kerberos/exceptions.py b/requests_kerberos/exceptions.py index f76a422..51e11ec 100644 --- a/requests_kerberos/exceptions.py +++ b/requests_kerberos/exceptions.py @@ -10,3 +10,6 @@ from requests.exceptions import RequestException class MutualAuthenticationError(RequestException): """Mutual Authentication Error""" + +class KerberosExchangeError(RequestException): + """Kerberos Exchange Failed Error""" diff --git a/requests_kerberos/kerberos_.py b/requests_kerberos/kerberos_.py index 166adc8..4aa44ed 100644 --- a/requests_kerberos/kerberos_.py +++ b/requests_kerberos/kerberos_.py @@ -11,11 +11,10 @@ from requests.compat import urlparse, StringIO from requests.structures import CaseInsensitiveDict from requests.cookies import cookiejar_from_dict -from .exceptions import MutualAuthenticationError +from .exceptions import MutualAuthenticationError, KerberosExchangeError log = logging.getLogger(__name__) - # Different types of mutual authentication: # with mutual_authentication set to REQUIRED, all responses will be # authenticated with the exception of errors. Errors will have their contents @@ -31,7 +30,6 @@ REQUIRED = 1 OPTIONAL = 2 DISABLED = 3 - class SanitizedResponse(Response): """The :class:`Response <Response>` object, which contains a server's response to an HTTP request. @@ -86,21 +84,22 @@ class HTTPKerberosAuth(AuthBase): object.""" def __init__( self, mutual_authentication=REQUIRED, - service="HTTP", delegate=False): + service="HTTP", delegate=False, force_preemptive=False): self.context = {} self.mutual_authentication = mutual_authentication self.delegate = delegate self.pos = None self.service = service + self.force_preemptive = force_preemptive - def generate_request_header(self, response): + def generate_request_header(self, response, host, is_preemptive=False): """ Generates the GSSAPI authentication token with kerberos. - If any GSSAPI step fails, return None. + If any GSSAPI step fails, raise KerberosExchangeError + with failure detail. """ - host = urlparse(response.url).hostname # Flags used by kerberos module. gssflags = kerberos.GSS_C_MUTUAL_FLAG | kerberos.GSS_C_SEQUENCE_FLAG @@ -108,48 +107,52 @@ class HTTPKerberosAuth(AuthBase): gssflags |= kerberos.GSS_C_DELEG_FLAG try: + kerb_stage = "authGSSClientInit()" result, self.context[host] = kerberos.authGSSClientInit( "{0}@{1}".format(self.service, host), gssflags=gssflags) - except kerberos.GSSError as error: - log.error("generate_request_header(): authGSSClientInit() failed:") - log.exception(error) - return None - if result < 1: - log.error("generate_request_header(): authGSSClientInit() failed: " - "{0}".format(result)) - return None + if result < 1: + raise EnvironmentError(result, kerb_stage) - try: + # if we have a previous response from the server, use it to continue + # the auth process, otherwise use an empty value + negotiate_resp_value = '' if is_preemptive else _negotiate_value(response) + + kerb_stage = "authGSSClientStep()" result = kerberos.authGSSClientStep(self.context[host], - _negotiate_value(response)) - except kerberos.GSSError as error: - log.exception( - "generate_request_header(): authGSSClientStep() failed:") - log.exception(error) - return None + negotiate_resp_value) - if result < 0: - log.error( - "generate_request_header(): authGSSClientStep() failed: " - "{0}".format(result)) - return None + if result < 0: + raise EnvironmentError(result, kerb_stage) - try: + kerb_stage = "authGSSClientResponse()" gss_response = kerberos.authGSSClientResponse(self.context[host]) + + return "Negotiate {0}".format(gss_response) + except kerberos.GSSError as error: log.exception( - "generate_request_header(): authGSSClientResponse() failed:") + "generate_request_header(): {0} failed:".format(kerb_stage)) log.exception(error) - return None + raise KerberosExchangeError("%s failed: %s" % (kerb_stage, str(error.args))) - return "Negotiate {0}".format(gss_response) + except EnvironmentError as error: + # ensure we raised this for translation to KerberosExchangeError + # by comparing errno to result, re-raise if not + if error.errno != result: + raise + message = "{0} failed, result: {1}".format(kerb_stage, result) + log.error("generate_request_header(): {0}".format(message)) + raise KerberosExchangeError(message) def authenticate_user(self, response, **kwargs): """Handles user authentication with gssapi/kerberos""" - auth_header = self.generate_request_header(response) - if auth_header is None: + host = urlparse(response.url).hostname + + try: + auth_header = self.generate_request_header(response, host) + except KerberosExchangeError: # GSS Failure, return existing response return response @@ -285,6 +288,17 @@ class HTTPKerberosAuth(AuthBase): response.request.deregister_hook('response', self.handle_response) def __call__(self, request): + if self.force_preemptive: + # add Authorization header before we receive a 401 + # by the 401 handler + host = urlparse(request.url).hostname + + auth_header = self.generate_request_header(None, host, is_preemptive=True) + + log.debug("HTTPKerberosAuth: Preemptive Authorization header: {0}".format(auth_header)) + + request.headers['Authorization'] = auth_header + request.register_hook('response', self.handle_response) try: self.pos = request.body.tell()
Mutual auth REQUIRED fails w/ persistent connection When communicating with a Windows wsman endpoint using requests-kerberos, we should be able to set mutual auth to REQUIRED. However, the server stops sending auth responses on persistent connections after a successful exchange, which causes requests-kerberos to fail for mutual auth on subsequent responses (because the WWW-Authenticate response header is missing). If authentication is repeated on every request, the server will emit the proper headers in the response, but that behavior isn't currently implemented by requests-kerberos. I'm not familiar enough with the various RFCs to be sure, but I think this behavior falls into a gray area that's not well documented. I'm guessing one of two things should happen here: 1) Mutual auth REQUIRED forces reauthentication on each request (sub-optimal for performance, probably more so in delegated cases). A fix for this could also probably easily address #65. 2) Add stateful auth code to requests-kerberos that recognizes a successful auth exchange and won't try to validate mutual auth for implied-prior-auth responses on persistent connections. It'd still need to watch for 401s and reauthenticate accordingly (eg, proxies w/o Proxy-Support: Session-Based-Authentication), and it might need to be connection-aware to avoid a connection-hijacking attack via a compromised proxy (assuming requests surfaces what we'd need to know). I can take a stab at implementing one of these behaviors, but I'm not sure which one is more correct. I'd lean toward 2) for performance, but implementing connection-aware behavior could be problematic. 1) should be more straightforward, but may have a performance penalty.
requests/requests-kerberos
diff --git a/test_requests_kerberos.py b/test_requests_kerberos.py index 38833d0..b31f644 100644 --- a/test_requests_kerberos.py +++ b/test_requests_kerberos.py @@ -4,6 +4,7 @@ """Tests for requests_kerberos.""" from mock import Mock, patch +from requests.compat import urlparse import requests @@ -73,6 +74,33 @@ class KerberosTestCase(unittest.TestCase): requests_kerberos.kerberos_._negotiate_value(response) is None ) + def test_force_preemptive(self): + with patch.multiple(kerberos_module_name, + authGSSClientInit=clientInit_complete, + authGSSClientResponse=clientResponse, + authGSSClientStep=clientStep_continue): + auth = requests_kerberos.HTTPKerberosAuth(force_preemptive=True) + + request = requests.Request(url="http://www.example.org") + + auth.__call__(request) + + self.assertTrue('Authorization' in request.headers) + self.assertEqual(request.headers.get('Authorization'), 'Negotiate GSSRESPONSE') + + def test_no_force_preemptive(self): + with patch.multiple(kerberos_module_name, + authGSSClientInit=clientInit_complete, + authGSSClientResponse=clientResponse, + authGSSClientStep=clientStep_continue): + auth = requests_kerberos.HTTPKerberosAuth() + + request = requests.Request(url="http://www.example.org") + + auth.__call__(request) + + self.assertTrue('Authorization' not in request.headers) + def test_generate_request_header(self): with patch.multiple(kerberos_module_name, authGSSClientInit=clientInit_complete, @@ -81,9 +109,10 @@ class KerberosTestCase(unittest.TestCase): response = requests.Response() response.url = "http://www.example.org/" response.headers = {'www-authenticate': 'negotiate token'} + host = urlparse(response.url).hostname auth = requests_kerberos.HTTPKerberosAuth() self.assertEqual( - auth.generate_request_header(response), + auth.generate_request_header(response, host), "Negotiate GSSRESPONSE" ) clientInit_complete.assert_called_with( @@ -102,10 +131,10 @@ class KerberosTestCase(unittest.TestCase): response = requests.Response() response.url = "http://www.example.org/" response.headers = {'www-authenticate': 'negotiate token'} + host = urlparse(response.url).hostname auth = requests_kerberos.HTTPKerberosAuth() - self.assertEqual( - auth.generate_request_header(response), - None + self.assertRaises(requests_kerberos.exceptions.KerberosExchangeError, + auth.generate_request_header, response, host ) clientInit_error.assert_called_with( "[email protected]", @@ -123,10 +152,10 @@ class KerberosTestCase(unittest.TestCase): response = requests.Response() response.url = "http://www.example.org/" response.headers = {'www-authenticate': 'negotiate token'} + host = urlparse(response.url).hostname auth = requests_kerberos.HTTPKerberosAuth() - self.assertEqual( - auth.generate_request_header(response), - None + self.assertRaises(requests_kerberos.exceptions.KerberosExchangeError, + auth.generate_request_header, response, host ) clientInit_complete.assert_called_with( "[email protected]", @@ -506,15 +535,16 @@ class KerberosTestCase(unittest.TestCase): def test_generate_request_header_custom_service(self): with patch.multiple(kerberos_module_name, - authGSSClientInit=clientInit_error, + authGSSClientInit=clientInit_complete, authGSSClientResponse=clientResponse, authGSSClientStep=clientStep_continue): response = requests.Response() response.url = "http://www.example.org/" response.headers = {'www-authenticate': 'negotiate token'} + host = urlparse(response.url).hostname auth = requests_kerberos.HTTPKerberosAuth(service="barfoo") - auth.generate_request_header(response), - clientInit_error.assert_called_with( + auth.generate_request_header(response, host), + clientInit_complete.assert_called_with( "[email protected]", gssflags=( kerberos.GSS_C_MUTUAL_FLAG |
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_issue_reference", "has_many_modified_files", "has_many_hunks", "has_pytest_match_arg" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 2, "test_score": 2 }, "num_modified_files": 3 }
0.8
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "mock" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.9", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
certifi==2025.1.31 charset-normalizer==3.4.1 exceptiongroup==1.2.2 idna==3.10 iniconfig==2.1.0 mock==5.2.0 packaging==24.2 pluggy==1.5.0 pykerberos==1.2.4 pytest==8.3.5 requests==2.32.3 -e git+https://github.com/requests/requests-kerberos.git@e2c671733a491be5f32fb8d1c938f8edec4eda44#egg=requests_kerberos tomli==2.2.1 urllib3==2.3.0
name: requests-kerberos channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - certifi==2025.1.31 - charset-normalizer==3.4.1 - exceptiongroup==1.2.2 - idna==3.10 - iniconfig==2.1.0 - mock==5.2.0 - packaging==24.2 - pluggy==1.5.0 - pykerberos==1.2.4 - pytest==8.3.5 - requests==2.32.3 - tomli==2.2.1 - urllib3==2.3.0 prefix: /opt/conda/envs/requests-kerberos
[ "test_requests_kerberos.py::KerberosTestCase::test_force_preemptive", "test_requests_kerberos.py::KerberosTestCase::test_generate_request_header", "test_requests_kerberos.py::KerberosTestCase::test_generate_request_header_custom_service", "test_requests_kerberos.py::KerberosTestCase::test_generate_request_header_init_error", "test_requests_kerberos.py::KerberosTestCase::test_generate_request_header_step_error" ]
[]
[ "test_requests_kerberos.py::KerberosTestCase::test_authenticate_server", "test_requests_kerberos.py::KerberosTestCase::test_authenticate_user", "test_requests_kerberos.py::KerberosTestCase::test_delegation", "test_requests_kerberos.py::KerberosTestCase::test_handle_401", "test_requests_kerberos.py::KerberosTestCase::test_handle_other", "test_requests_kerberos.py::KerberosTestCase::test_handle_response_200", "test_requests_kerberos.py::KerberosTestCase::test_handle_response_200_mutual_auth_optional_hard_failure", "test_requests_kerberos.py::KerberosTestCase::test_handle_response_200_mutual_auth_optional_soft_failure", "test_requests_kerberos.py::KerberosTestCase::test_handle_response_200_mutual_auth_required_failure", "test_requests_kerberos.py::KerberosTestCase::test_handle_response_200_mutual_auth_required_failure_2", "test_requests_kerberos.py::KerberosTestCase::test_handle_response_401", "test_requests_kerberos.py::KerberosTestCase::test_handle_response_401_rejected", "test_requests_kerberos.py::KerberosTestCase::test_handle_response_500_mutual_auth_optional_failure", "test_requests_kerberos.py::KerberosTestCase::test_handle_response_500_mutual_auth_required_failure", "test_requests_kerberos.py::KerberosTestCase::test_negotate_value_extraction", "test_requests_kerberos.py::KerberosTestCase::test_negotate_value_extraction_none", "test_requests_kerberos.py::KerberosTestCase::test_no_force_preemptive" ]
[]
ISC License
500