@@ -15,9 +15,15 @@ load("//private/lib:coordinates.bzl", "to_external_form")
1515
1616_REQUIRED_KEYS = ["artifacts" , "dependencies" , "repositories" ]
1717
18- def _is_valid_lock_file (lock_file_contents ):
18+ def _is_valid_lock_file_v2 (lock_file_contents ):
19+ return _is_valid_lock_file (lock_file_contents , "2" )
20+
21+ def _is_valid_lock_file_v3 (lock_file_contents ):
22+ return _is_valid_lock_file (lock_file_contents , "3" )
23+
24+ def _is_valid_lock_file (lock_file_contents , desired_version ):
1925 version = lock_file_contents .get ("version" )
20- if "2" != version :
26+ if desired_version != version :
2127 return False
2228
2329 all_keys = lock_file_contents .keys ()
@@ -37,7 +43,7 @@ def _get_input_artifacts_hash(lock_file_contents):
3743def _get_lock_file_hash (lock_file_contents ):
3844 return lock_file_contents .get ("__RESOLVED_ARTIFACTS_HASH" )
3945
40- def _compute_lock_file_hash (lock_file_contents ):
46+ def _compute_lock_file_hash_v2 (lock_file_contents ):
4147 to_hash = {}
4248 for key in sorted (_REQUIRED_KEYS ):
4349 value = lock_file_contents .get (key )
@@ -47,6 +53,75 @@ def _compute_lock_file_hash(lock_file_contents):
4753 to_hash .update ({key : json .decode (json .encode (value ))})
4854 return hash (repr (to_hash ))
4955
56+ def _compute_final_hash (all_infos ):
57+ final_hashes = dict ()
58+
59+ # in case of circular dependencies, we take a normal hash of the original info as a starting point
60+ backup_hashes = {k : hash (repr (v )) for k , v in all_infos .items ()}
61+
62+ # sets are balzel 8 only, we use a dict instead
63+ remaining = {k : 0 for k in all_infos .keys ()}
64+
65+ # bazel does not support recursion, we have to emulate it manually
66+ stack = []
67+
68+ # replacement for while True
69+ for _ in range (1000000000 ):
70+ if len (remaining ) == 0 and len (stack ) == 0 :
71+ break
72+
73+ curr = None
74+ if len (stack ) == 0 :
75+ curr , _ = remaining .popitem ()
76+ else :
77+ curr = stack .pop ()
78+
79+ if curr in final_hashes :
80+ continue
81+
82+ deps = all_infos [curr ].get ("dependencies" , [])
83+
84+ # make sure all dependencies are processed first
85+ unprocessed = [d for d in deps if d in remaining ]
86+ if len (unprocessed ) > 0 :
87+ dep = unprocessed [0 ]
88+ stack .append (curr )
89+ stack .append (dep )
90+ remaining .pop (dep , None )
91+ continue
92+
93+ all_infos [curr ]["dependency_hashes" ] = {dep : final_hashes .get (dep , backup_hashes .get (dep , 0 )) for dep in deps }
94+ final_hashes [curr ] = hash (repr (all_infos [curr ]))
95+
96+ return final_hashes
97+
98+ def _compute_lock_file_hash_v3 (lock_file_contents ):
99+ all_infos = dict ()
100+
101+ for dep , dep_info in lock_file_contents ["artifacts" ].items ():
102+ shasums = dep_info ["shasums" ]
103+ common_info = {k : v for k , v in dep_info .items () if k != "shasums" }
104+
105+ is_jar_type = dep .count (":" ) == 1
106+
107+ for type , sha in shasums .items ():
108+ jar_suffix = ":jar" if is_jar_type else ""
109+ suffix = jar_suffix + ":" + type if type != "jar" else ""
110+
111+ type_info = dict ()
112+ type_info ["standard" ] = common_info
113+ type_info ["sha" ] = sha
114+ all_infos [dep + suffix ] = type_info
115+
116+ for repo , artifacts in lock_file_contents ["repositories" ].items ():
117+ for artifact in artifacts :
118+ all_infos [artifact ]["repository" ] = repo
119+
120+ for dep , dep_info in lock_file_contents ["dependencies" ].items ():
121+ all_infos [dep ]["dependencies" ] = sorted (dep_info )
122+
123+ return _compute_final_hash (all_infos )
124+
50125def _to_m2_path (unpacked ):
51126 path = "{group}/{artifact}/{version}/{artifact}-{version}" .format (
52127 artifact = unpacked ["artifact" ],
@@ -188,8 +263,8 @@ def _render_lock_file(lock_file_contents, input_hash):
188263 contents = [
189264 "{" ,
190265 " \" __AUTOGENERATED_FILE_DO_NOT_MODIFY_THIS_FILE_MANUALLY\" : \" THERE_IS_NO_DATA_ONLY_ZUUL\" ," ,
191- " \" __INPUT_ARTIFACTS_HASH\" : %s," % input_hash ,
192- " \" __RESOLVED_ARTIFACTS_HASH\" : %s," % _compute_lock_file_hash ( lock_file_contents ),
266+ " \" __INPUT_ARTIFACTS_HASH\" : %s," % json . encode_indent ( input_hash , prefix = " " , indent = " " ) ,
267+ " \" __RESOLVED_ARTIFACTS_HASH\" : %s," % json . encode_indent ( _compute_lock_file_hash_v3 ( lock_file_contents ), prefix = " " , indent = " " ),
193268 ]
194269 if lock_file_contents .get ("conflict_resolution" ):
195270 contents .append (" \" conflict_resolution\" : %s," % json .encode_indent (lock_file_contents ["conflict_resolution" ], prefix = " " , indent = " " ))
@@ -212,17 +287,27 @@ def _render_lock_file(lock_file_contents, input_hash):
212287 contents .append (" \" services\" : %s," % json .encode_indent (lock_file_contents ["services" ], prefix = " " , indent = " " ))
213288 if lock_file_contents .get ("skipped" ):
214289 contents .append (" \" skipped\" : %s," % json .encode_indent (lock_file_contents ["skipped" ], prefix = " " , indent = " " ))
215- contents .append (" \" version\" : \" 2 \" " )
290+ contents .append (" \" version\" : \" 3 \" " )
216291 contents .append ("}" )
217292 contents .append ("" )
218293
219294 return "\n " .join (contents )
220295
221296v2_lock_file = struct (
222- is_valid_lock_file = _is_valid_lock_file ,
297+ is_valid_lock_file = _is_valid_lock_file_v2 ,
298+ get_input_artifacts_hash = _get_input_artifacts_hash ,
299+ get_lock_file_hash = _get_lock_file_hash ,
300+ compute_lock_file_hash = _compute_lock_file_hash_v2 ,
301+ get_artifacts = _get_artifacts ,
302+ get_netrc_entries = _get_netrc_entries ,
303+ has_m2local = _has_m2local ,
304+ )
305+
306+ v3_lock_file = struct (
307+ is_valid_lock_file = _is_valid_lock_file_v3 ,
223308 get_input_artifacts_hash = _get_input_artifacts_hash ,
224309 get_lock_file_hash = _get_lock_file_hash ,
225- compute_lock_file_hash = _compute_lock_file_hash ,
310+ compute_lock_file_hash = _compute_lock_file_hash_v3 ,
226311 get_artifacts = _get_artifacts ,
227312 get_netrc_entries = _get_netrc_entries ,
228313 render_lock_file = _render_lock_file ,
0 commit comments