1414
1515"""Bazel macros used in OSS."""
1616
17- def _py_proto_library_impl (ctx ):
18- """Implementation of py_proto_library rule."""
19- proto_deps = ctx .attr .deps
20-
21- # Separate proto and Python dependencies
22- all_sources = []
23- py_infos = []
24-
25- for dep in proto_deps :
26- if ProtoInfo in dep :
27- # It's a proto_library - collect proto sources
28- all_sources .extend (dep [ProtoInfo ].direct_sources )
29- elif PyInfo in dep :
30- # It's already a py_library - collect its PyInfo for passthrough
31- py_infos .append (dep [PyInfo ])
32-
33- # Filter to only include sources from the workspace (not external packages)
34- # We can only declare outputs in our own package
35- workspace_sources = []
36- for src in all_sources :
37- # Filter out external sources (they start with external/ or ..)
38- if not src .short_path .startswith ("external/" ) and not src .short_path .startswith ("../" ):
39- workspace_sources .append (src )
40-
41- # Generate Python output files from proto sources
42- py_outputs = []
43- for proto_src in workspace_sources :
44- # Use just the basename to avoid path issues
45- basename = proto_src .basename [:- 6 ] # Remove .proto
46- py_file = ctx .actions .declare_file (basename + "_pb2.py" )
47- py_outputs .append (py_file )
48-
49- if py_outputs :
50- # Build proto_path arguments for protoc
51- # We need to include paths for workspace root and external dependencies
52- proto_path_args = []
53-
54- # Add current directory to find workspace proto files
55- proto_path_args .append ("--proto_path=." )
56-
57- # Collect proto_path entries from all transitive dependencies
58- # Use dictionary as a set (Starlark doesn't have set type)
59- proto_paths = {"." : True }
60-
61- # Also add directories of workspace sources so imports like "any.proto"
62- # (in the same folder) resolve correctly.
63- for ws in workspace_sources :
64- ws_dir = "/" .join (ws .short_path .split ("/" )[:- 1 ])
65- if ws_dir and ws_dir not in proto_paths :
66- proto_paths [ws_dir ] = True
67- proto_path_args .append ("--proto_path=" + ws_dir )
68-
69- for dep in proto_deps :
70- if ProtoInfo in dep :
71- # Add proto_source_root if available
72- if hasattr (dep [ProtoInfo ], 'proto_source_root' ):
73- root = dep [ProtoInfo ].proto_source_root
74- if root and root not in proto_paths :
75- proto_paths [root ] = True
76- proto_path_args .append ("--proto_path=" + root )
77-
78- # Also derive from file paths for more coverage
79- for src in dep [ProtoInfo ].transitive_sources .to_list ():
80- # Use the directory containing the proto file's import root
81- # For external/com_google_protobuf/src/google/protobuf/any.proto,
82- # we want external/com_google_protobuf/src
83- if src .path .startswith ("external/com_google_protobuf/" ):
84- proto_path = "external/com_google_protobuf/src"
85- if proto_path not in proto_paths :
86- proto_paths [proto_path ] = True
87- proto_path_args .append ("--proto_path=" + proto_path )
88- elif src .path .startswith ("external/" ):
89- # For other external repos like tensorflow_metadata
90- # Extract external/repo_name
91- parts = src .path .split ("/" )
92- if len (parts ) >= 2 :
93- proto_path = "/" .join (parts [:2 ])
94- if proto_path not in proto_paths :
95- proto_paths [proto_path ] = True
96- proto_path_args .append ("--proto_path=" + proto_path )
97-
98- # Also add Bazel root paths
99- if src .root .path and src .root .path not in proto_paths :
100- proto_paths [src .root .path ] = True
101- proto_path_args .append ("--proto_path=" + src .root .path )
102-
103- # Build list of proto file paths - only include workspace sources
104- proto_file_args = []
105- for src in workspace_sources :
106- proto_file_args .append (src .short_path )
107-
108- # Run protoc to generate Python files
109- # Use ctx.bin_dir.path as the output directory root
110- output_root = ctx .bin_dir .path
111-
112- ctx .actions .run (
113- # Include workspace sources plus all transitive dependencies for imports
114- inputs = depset (direct = workspace_sources , transitive = [
115- dep [ProtoInfo ].transitive_sources for dep in proto_deps if ProtoInfo in dep
116- ]),
117- outputs = py_outputs ,
118- executable = ctx .executable ._protoc ,
119- arguments = [
120- "--python_out=" + output_root ,
121- ] + proto_path_args + proto_file_args ,
122- mnemonic = "ProtocPython" ,
123- )
124-
125- # Collect transitive sources from both generated files and Python deps
126- all_transitive_sources = [depset (py_outputs )]
127- all_imports = [depset ([ctx .bin_dir .path ])] if py_outputs else []
128-
129- for py_info in py_infos :
130- all_transitive_sources .append (py_info .transitive_sources )
131- if hasattr (py_info , 'imports' ):
132- all_imports .append (py_info .imports )
133-
134- # Return PyInfo provider so this can be used as a py_library dependency
135- # Merge proto-generated files with passthrough Python dependencies
136- return [
137- DefaultInfo (files = depset (py_outputs )),
138- PyInfo (
139- transitive_sources = depset (transitive = all_transitive_sources ),
140- imports = depset (transitive = all_imports ),
141- has_py2_only_sources = False ,
142- has_py3_only_sources = True ,
143- ),
144- ]
145-
146- _py_proto_library_rule = rule (
147- implementation = _py_proto_library_impl ,
148- attrs = {
149- "deps" : attr .label_list (
150- providers = [[ProtoInfo ], [PyInfo ]], # Accept either ProtoInfo OR PyInfo
151- doc = "Proto library or Python library dependencies" ,
152- ),
153- "_protoc" : attr .label (
154- default = "@com_google_protobuf//:protoc" ,
155- executable = True ,
156- cfg = "exec" ,
157- ),
158- },
159- provides = [PyInfo ],
160- )
161-
162- # Wrapper for cc_proto_library to maintain compatibility with old Protobuf 3.x API
163- def cc_proto_library (
164- name ,
165- srcs = [],
166- deps = [],
167- cc_libs = [],
168- protoc = None ,
169- default_runtime = None ,
170- use_grpc_plugin = None ,
171- testonly = 0 ,
172- visibility = None ,
173- ** kwargs ):
174- """Wrapper for cc_proto_library that works with Protobuf 4.x."""
175- _ignore = [cc_libs , protoc , default_runtime , use_grpc_plugin , kwargs ]
176-
177- # Create proto_library first
178- native .proto_library (
179- name = name + "_proto" ,
180- srcs = srcs ,
181- deps = [d + "_proto" if not d .startswith ("@" ) else d for d in deps ],
182- testonly = testonly ,
183- visibility = visibility ,
184- )
185-
186- # Create cc_proto_library that depends on proto_library
187- native .cc_proto_library (
188- name = name ,
189- deps = [":" + name + "_proto" ],
190- testonly = testonly ,
191- visibility = visibility ,
192- )
17+ load ("@com_google_protobuf//:protobuf.bzl" , "cc_proto_library" , "py_proto_library" )
18+ load ("@rules_cc//cc:cc_binary.bzl" , "cc_binary" )
19+ load ("@rules_cc//cc:cc_library.bzl" , "cc_library" )
19320
19421def s2t_pytype_library (
19522 name ,
19623 srcs = [],
19724 deps = [],
19825 srcs_version = "PY3ONLY" ,
19926 testonly = False ):
200- """Python library that automatically wraps proto_library deps with PyInfo.
201-
202- This wrapper wraps all dependencies with our custom py_proto_library_rule.
203- Dependencies that don't provide ProtoInfo will fail with a clear error.
204- Dependencies that do provide ProtoInfo (proto_library targets) will get PyInfo.
205- """
206- # Process dependencies to wrap them all with our custom rule
207- processed_deps = []
208- for dep in deps :
209- # Skip protobuf_python - it's already a proper Python library
210- if dep == "@com_google_protobuf//:protobuf_python" :
211- processed_deps .append (dep )
212- continue
213-
214- # Create a safe wrapper name for this dependency
215- safe_dep_name = dep .replace (":" , "_" ).replace ("//" , "" ).replace ("/" , "_" ).replace ("@" , "" ).replace ("-" , "_" ).replace ("." , "_" )
216- wrapper_name = name + "_proto_wrapper_" + safe_dep_name
217-
218- # Wrap all dependencies with our custom py_proto_library rule
219- # If the dep provides ProtoInfo, this will work and provide PyInfo
220- # If it doesn't provide ProtoInfo, it will fail with a clear error
221- _py_proto_library_rule (
222- name = wrapper_name ,
223- deps = [dep ],
224- testonly = testonly ,
225- )
226- processed_deps .append (":" + wrapper_name )
227-
228- native .py_library (
229- name = name ,
230- srcs = srcs ,
231- deps = processed_deps ,
232- testonly = testonly ,
233- )
234-
27+ native .py_library (name = name , srcs = srcs , deps = deps , testonly = testonly )
23528
23629def s2t_proto_library (
23730 name ,
@@ -259,22 +52,18 @@ def s2t_proto_library(
25952 testonly = testonly ,
26053 )
26154
262- # Create a native proto_library for Python generation
263- # This is needed by s2t_proto_library_py
264- proto_lib_deps = [d + "_proto" if not d .startswith ("@" ) else d for d in deps ]
265- native .proto_library (
266- name = name + "_proto" ,
267- srcs = srcs ,
268- deps = proto_lib_deps ,
269- visibility = visibility ,
270- testonly = testonly ,
271- )
55+ use_grpc_plugin = None
56+ if cc_grpc_version :
57+ use_grpc_plugin = True
27258
273- # Create cc_proto_library that depends on the proto_library we just created
274- # Don't use our cc_proto_library wrapper to avoid duplicate proto_library creation
275- native .cc_proto_library (
59+ # TODO(martinz): replace with proto_library, when that works.
60+ cc_proto_library (
27661 name = name ,
277- deps = [":" + name + "_proto" ],
62+ srcs = srcs ,
63+ deps = deps ,
64+ cc_libs = ["@com_google_protobuf//:protobuf" ],
65+ protoc = "@com_google_protobuf//:protoc" ,
66+ default_runtime = "@com_google_protobuf//:protobuf" ,
27867 testonly = testonly ,
27968 visibility = visibility ,
28069 )
@@ -287,7 +76,7 @@ DYNAMIC_DEPS = ["@local_config_tf//:libtensorflow_framework", "@local_config_tf/
28776
28877def s2t_dynamic_binary (name , deps ):
28978 """Creates a .so file intended for linking with tensorflow_framework.so."""
290- native . cc_binary (
79+ cc_binary (
29180 name = name ,
29281 copts = DYNAMIC_COPTS ,
29382 linkshared = 1 ,
@@ -300,7 +89,7 @@ def s2t_dynamic_library(
30089 deps = None ):
30190 """Creates a static library intended for linking with tensorflow_framework.so."""
30291 true_deps = [] if deps == None else deps
303- native . cc_library (
92+ cc_library (
30493 name = name ,
30594 srcs = srcs ,
30695 alwayslink = 1 ,
@@ -380,25 +169,15 @@ def s2t_proto_library_cc(
380169 )
381170
382171def s2t_proto_library_py (name , proto_library , srcs = [], deps = [], oss_deps = [], visibility = None , testonly = 0 , api_version = None ):
383- """Opensource py_proto_library.
384-
385- Uses a custom rule implementation that properly generates Python from proto_library
386- and provides PyInfo for Python library dependencies.
387-
388- Note: s2t_proto_library creates {name}_proto for the proto_library, so we append _proto.
389- """
390- _ignore = [api_version , srcs , deps ]
391-
392- if not proto_library :
393- fail ("proto_library parameter is required for s2t_proto_library_py" )
394-
395- # s2t_proto_library creates a proto_library named {name}_proto
396- # So we need to reference it correctly
397- actual_proto_library = ":" + proto_library + "_proto"
398-
399- # Use our custom py_proto_library rule
400- _py_proto_library_rule (
172+ """Opensource py_proto_library."""
173+ _ignore = [proto_library , api_version ]
174+ py_proto_library (
401175 name = name ,
402- deps = [actual_proto_library ] + oss_deps ,
176+ srcs = srcs ,
177+ srcs_version = "PY3ONLY" ,
178+ deps = ["@com_google_protobuf//:well_known_types_py_pb2" ] + oss_deps ,
179+ default_runtime = "@com_google_protobuf//:protobuf_python" ,
180+ protoc = "@com_google_protobuf//:protoc" ,
403181 visibility = visibility ,
182+ testonly = testonly ,
404183 )
0 commit comments