1010
1111from cachecontrol .controller import logger as cache_control_logger
1212from poetry .core .packages .package import Package
13- from poetry .core .packages .utils .link import Link
1413from poetry .core .version .exceptions import InvalidVersion
1514
1615from poetry .repositories .exceptions import PackageNotFound
2625
2726if TYPE_CHECKING :
2827 from packaging .utils import NormalizedName
29- from poetry .core .constraints .version import Version
3028 from poetry .core .constraints .version import VersionConstraint
3129
32- SUPPORTED_PACKAGE_TYPES = {"sdist" , "bdist_wheel" }
33-
3430
3531class PyPiRepository (HTTPRepository ):
3632 def __init__ (
3733 self ,
3834 url : str = "https://pypi.org/" ,
3935 disable_cache : bool = False ,
40- fallback : bool = True ,
4136 pool_size : int = requests .adapters .DEFAULT_POOLSIZE ,
4237 ) -> None :
4338 super ().__init__ (
@@ -48,7 +43,6 @@ def __init__(
4843 )
4944
5045 self ._base_url = url
51- self ._fallback = fallback
5246
5347 def search (self , query : str ) -> list [Package ]:
5448 results = []
@@ -110,79 +104,6 @@ def _get_package_info(self, name: NormalizedName) -> dict[str, Any]:
110104
111105 return info
112106
113- def find_links_for_package (self , package : Package ) -> list [Link ]:
114- json_data = self ._get (f"pypi/{ package .name } /{ package .version } /json" )
115- if json_data is None :
116- return []
117-
118- links = []
119- for url in json_data ["urls" ]:
120- if url ["packagetype" ] in SUPPORTED_PACKAGE_TYPES :
121- h = f"sha256={ url ['digests' ]['sha256' ]} "
122- links .append (Link (url ["url" ] + "#" + h , yanked = self ._get_yanked (url )))
123-
124- return links
125-
126- def _get_release_info (
127- self , name : NormalizedName , version : Version
128- ) -> dict [str , Any ]:
129- from poetry .inspection .info import PackageInfo
130-
131- self ._log (f"Getting info for { name } ({ version } ) from PyPI" , "debug" )
132-
133- json_data = self ._get (f"pypi/{ name } /{ version } /json" )
134- if json_data is None :
135- raise PackageNotFound (f"Package [{ name } ] not found." )
136-
137- info = json_data ["info" ]
138-
139- data = PackageInfo (
140- name = info ["name" ],
141- version = info ["version" ],
142- summary = info ["summary" ],
143- requires_dist = info ["requires_dist" ],
144- requires_python = info ["requires_python" ],
145- yanked = self ._get_yanked (info ),
146- cache_version = str (self .CACHE_VERSION ),
147- )
148-
149- try :
150- version_info = json_data ["urls" ]
151- except KeyError :
152- version_info = []
153-
154- files = info .get ("files" , [])
155- for file_info in version_info :
156- if file_info ["packagetype" ] in SUPPORTED_PACKAGE_TYPES :
157- files .append (
158- {
159- "file" : file_info ["filename" ],
160- "hash" : "sha256:" + file_info ["digests" ]["sha256" ],
161- }
162- )
163- data .files = files
164-
165- if self ._fallback and data .requires_dist is None :
166- self ._log (
167- "No dependencies found, downloading metadata and/or archives" ,
168- level = "debug" ,
169- )
170- # No dependencies set (along with other information)
171- # This might be due to actually no dependencies
172- # or badly set metadata when uploading.
173- # So, we need to make sure there is actually no
174- # dependencies by introspecting packages.
175- page = self .get_page (name )
176- links = list (page .links_for_version (name , version ))
177- info = self ._get_info_from_links (links )
178-
179- data .requires_dist = info .requires_dist
180-
181- if not data .requires_python :
182- data .requires_python = info .requires_python
183-
184- return data .asdict ()
185-
186107 def _get_page (self , name : NormalizedName ) -> SimpleJsonPage :
187108 source = self ._base_url + f"simple/{ name } /"
188109 info = self .get_package_info (name )
0 commit comments