@@ -83,12 +83,106 @@ def parse_requirements(
8383
8484 The second element is extra_pip_args should be passed to `whl_library`.
8585 """
86- evaluate_markers = evaluate_markers or (lambda _ctx , _requirements : {})
86+ b = parse_requirements_builder (
87+ evaluate_markers = evaluate_markers ,
88+ get_index_urls = get_index_urls ,
89+ extract_url_srcs = extract_url_srcs ,
90+ logger = logger ,
91+ )
92+ for f , plats in requirements_by_platform .items ():
93+ for p in plats :
94+ b = b .with_requirement (f , p )
95+ b = b .with_pip_args (extra_pip_args )
96+ return b .build (ctx )
97+
98+ def parse_requirements_builder (
99+ * ,
100+ evaluate_markers = None ,
101+ get_index_urls = None ,
102+ extract_url_srcs = True ,
103+ logger = None ):
104+ """Create a builder for incremental configuration of the parsing.
105+
106+ Args:
107+ get_index_urls: Callable[[ctx, list[str]], dict], a callable to get all
108+ of the distribution URLs from a PyPI index. Accepts ctx and
109+ distribution names to query.
110+ evaluate_markers: A function to use to evaluate the requirements.
111+ Accepts a dict where keys are requirement lines to evaluate against
112+ the platforms stored as values in the input dict. Returns the same
113+ dict, but with values being platforms that are compatible with the
114+ requirements line.
115+ extract_url_srcs: A boolean to enable extracting URLs from requirement
116+ lines to enable using bazel downloader.
117+ logger: repo_utils.logger or None, a simple struct to log diagnostic messages.
118+
119+ Returns:
120+ A builder with methods:
121+ * with_requirement - add a requirement to be included in building.
122+ * with_pip_args - add pip args to be included in building.
123+ * build - parse the requirements and return the appropriate parameters to create the whl_libraries.
124+ """
125+
126+ # buildifier: disable=uninitialized
127+ self = struct (
128+ # buildable components
129+ requirements_by_platform = {},
130+ extra_pip_args = {},
131+ # other params
132+ evaluate_markers = evaluate_markers or (lambda _ctx , _requirements : {}),
133+ get_index_urls = get_index_urls ,
134+ extract_url_srcs = extract_url_srcs ,
135+ logger = logger ,
136+ # go/keep-sorted start
137+ build = lambda ctx : _builder_build (self , ctx ),
138+ with_requirement = lambda file , platform : _builder_with_requirement (self , file , platform ),
139+ with_pip_args = lambda args : _builder_with_pip_args (self , args ),
140+ # go/keep-sorted end
141+ )
142+ return self
143+
144+ def _builder_with_requirement (self , file , platform ):
145+ """Add a requirement"""
146+ self .requirements_by_platform .setdefault (file , []).append (platform )
147+ return self
148+
149+ def _builder_with_pip_args (self , args , platform = None ):
150+ """Add pip arguments by platform"""
151+ self .extra_pip_args .setdefault (platform , []).extend (args )
152+ return self
153+
154+ def _builder_build (self , ctx ):
155+ """Get the requirements with platforms that the requirements apply to.
156+
157+ Args:
158+ self: The builder instance.
159+ ctx: A context that has .read function that would read contents from a label.
160+
161+ Returns:
162+ {type}`dict[str, list[struct]]` where the key is the distribution name and the struct
163+ contains the following attributes:
164+ * `distribution`: {type}`str` The non-normalized distribution name.
165+ * `srcs`: {type}`struct` The parsed requirement line for easier Simple
166+ API downloading (see `index_sources` return value).
167+ * `target_platforms`: {type}`list[str]` Target platforms that this package is for.
168+ The format is `cp3{minor}_{os}_{arch}`.
169+ * `is_exposed`: {type}`bool` `True` if the package should be exposed via the hub
170+ repository.
171+ * `extra_pip_args`: {type}`list[str]` pip args to use in case we are
172+ not using the bazel downloader to download the archives. This should
173+ be passed to {obj}`whl_library`.
174+ * `whls`: {type}`list[struct]` The list of whl entries that can be
175+ downloaded using the bazel downloader.
176+ * `sdist`: {type}`list[struct]` The sdist that can be downloaded using
177+ the bazel downloader.
178+
179+ The second element is extra_pip_args should be passed to `whl_library`.
180+ """
87181 options = {}
88182 requirements = {}
89- for file , plats in requirements_by_platform .items ():
90- if logger :
91- logger .debug (lambda : "Using {} for {}" .format (file , plats ))
183+ for file , plats in self . requirements_by_platform .items ():
184+ if self . logger :
185+ self . logger .debug (lambda : "Using {} for {}" .format (file , plats ))
92186 contents = ctx .read (file )
93187
94188 # Parse the requirements file directly in starlark to get the information
@@ -121,10 +215,10 @@ def parse_requirements(
121215 for p in opt .split (" " ):
122216 tokenized_options .append (p )
123217
124- pip_args = tokenized_options + extra_pip_args
218+ pip_args = tokenized_options + self . extra_pip_args [ None ]
125219 for plat in plats :
126220 requirements [plat ] = requirements_dict .values ()
127- options [plat ] = pip_args
221+ options [plat ] = pip_args + self . extra_pip_args . get ( plat , [])
128222
129223 requirements_by_platform = {}
130224 reqs_with_env_markers = {}
@@ -159,16 +253,16 @@ def parse_requirements(
159253 # to do, we could use Python to parse the requirement lines and infer the
160254 # URL of the files to download things from. This should be important for
161255 # VCS package references.
162- env_marker_target_platforms = evaluate_markers (ctx , reqs_with_env_markers )
163- if logger :
164- logger .debug (lambda : "Evaluated env markers from:\n {}\n \n To:\n {}" .format (
256+ env_marker_target_platforms = self . evaluate_markers (ctx , reqs_with_env_markers )
257+ if self . logger :
258+ self . logger .debug (lambda : "Evaluated env markers from:\n {}\n \n To:\n {}" .format (
165259 reqs_with_env_markers ,
166260 env_marker_target_platforms ,
167261 ))
168262
169263 index_urls = {}
170- if get_index_urls :
171- index_urls = get_index_urls (
264+ if self . get_index_urls :
265+ index_urls = self . get_index_urls (
172266 ctx ,
173267 # Use list({}) as a way to have a set
174268 list ({
@@ -197,20 +291,20 @@ def parse_requirements(
197291 reqs = reqs ,
198292 index_urls = index_urls ,
199293 env_marker_target_platforms = env_marker_target_platforms ,
200- extract_url_srcs = extract_url_srcs ,
201- logger = logger ,
294+ extract_url_srcs = self . extract_url_srcs ,
295+ logger = self . logger ,
202296 ),
203297 )
204298 ret .append (item )
205- if not item .is_exposed and logger :
206- logger .debug (lambda : "Package '{}' will not be exposed because it is only present on a subset of platforms: {} out of {}" .format (
299+ if not item .is_exposed and self . logger :
300+ self . logger .debug (lambda : "Package '{}' will not be exposed because it is only present on a subset of platforms: {} out of {}" .format (
207301 name ,
208302 sorted (requirement_target_platforms ),
209303 sorted (requirements ),
210304 ))
211305
212- if logger :
213- logger .debug (lambda : "Will configure whl repos: {}" .format ([w .name for w in ret ]))
306+ if self . logger :
307+ self . logger .debug (lambda : "Will configure whl repos: {}" .format ([w .name for w in ret ]))
214308
215309 return ret
216310
0 commit comments