1+ import contextlib
12import hashlib
23import os
34from dataclasses import dataclass
5+ from tempfile import NamedTemporaryFile
46from typing import Any , Dict , List , Optional
7+ from urllib .parse import urlparse
58
9+ import click
10+ import requests
611from jumpstarter_driver_composite .client import CompositeClient
712from jumpstarter_driver_opendal .common import PathBuf
813from opendal import Operator
@@ -64,6 +69,55 @@ def get_target_iqn(self) -> str:
6469 """
6570 return self .call ("get_target_iqn" )
6671
72+ def _normalized_name_from_file (self , path : str ) -> str :
73+ base = os .path .basename (path )
74+ for ext in (".gz" , ".xz" , ".bz2" ):
75+ if base .endswith (ext ):
76+ base = base [: - len (ext )]
77+ break
78+ if base .endswith (".img" ):
79+ base = base [: - len (".img" )]
80+ return base or "image"
81+
82+ def _get_src_and_operator (
83+ self , file : str , headers : tuple [str , ...]
84+ ) -> tuple [str , Optional [Operator ], Optional [str ]]:
85+ from jumpstarter_driver_opendal .client import operator_for_path
86+
87+ if file .startswith (("http://" , "https://" )):
88+ if headers :
89+ header_map : Dict [str , str ] = {}
90+ for h in headers :
91+ if ":" not in h :
92+ raise click .ClickException (f"Invalid header format: { h !r} . Expected 'Key: Value'." )
93+ key , value = h .split (":" , 1 )
94+ key = key .strip ()
95+ value = value .strip ()
96+ if not key :
97+ raise click .ClickException (f"Invalid header key in: { h !r} " )
98+ header_map [key ] = value
99+
100+ parsed = urlparse (file )
101+ with NamedTemporaryFile (
102+ prefix = "jumpstarter-iscsi-" ,
103+ suffix = os .path .basename (parsed .path ),
104+ delete = False ,
105+ ) as tf :
106+ temp_path = tf .name
107+ with requests .get (file , stream = True , headers = header_map ) as resp :
108+ resp .raise_for_status ()
109+ for chunk in resp .iter_content (chunk_size = 65536 ):
110+ if chunk :
111+ tf .write (chunk )
112+ return temp_path , None , temp_path
113+
114+ _ , src_operator , _ = operator_for_path (file )
115+ return file , src_operator , None
116+
117+ file = os .path .abspath (file )
118+ _ , src_operator , _ = operator_for_path (file )
119+ return file , src_operator , None
120+
67121 def add_lun (self , name : str , file_path : str , size_mb : int = 0 , is_block : bool = False ) -> str :
68122 """
69123 Add a new LUN to the iSCSI target
@@ -112,11 +166,12 @@ def _calculate_file_hash(self, file_path: str, operator: Optional[Operator] = No
112166 hash_obj .update (chunk )
113167 return hash_obj .hexdigest ()
114168 else :
115- from jumpstarter_driver_opendal .client import operator_for_path
116-
117- path , op , _ = operator_for_path (file_path )
118169 hash_obj = hashlib .sha256 ()
119- with op .open (str (path ), "rb" ) as f :
170+ if isinstance (file_path , str ) and file_path .startswith (("http://" , "https://" )):
171+ src_path = urlparse (file_path ).path
172+ else :
173+ src_path = str (file_path )
174+ with operator .open (str (src_path ), "rb" ) as f :
120175 while chunk := f .read (8192 ):
121176 hash_obj .update (chunk )
122177 return hash_obj .hexdigest ()
@@ -125,6 +180,7 @@ def _files_are_identical(self, src: PathBuf, dst_path: str, operator: Optional[O
125180 """Check if source and destination files are identical"""
126181 try :
127182 if not self .storage .exists (dst_path ):
183+ self .logger .info (f"{ dst_path } does not exist" )
128184 return False
129185
130186 dst_stat = self .storage .stat (dst_path )
@@ -133,22 +189,58 @@ def _files_are_identical(self, src: PathBuf, dst_path: str, operator: Optional[O
133189 if operator is None :
134190 src_size = os .path .getsize (str (src ))
135191 else :
136- from jumpstarter_driver_opendal .client import operator_for_path
137-
138- path , op , _ = operator_for_path (src )
139- src_size = op .stat (str (path )).content_length
192+ if isinstance (src , str ) and src .startswith (("http://" , "https://" )):
193+ src_path = urlparse (src ).path
194+ else :
195+ src_path = str (src )
196+ src_size = operator .stat (str (src_path )).content_length
140197
141198 if src_size != dst_size :
199+ self .logger .info (f"Source size { src_size } != destination size { dst_size } " )
142200 return False
143201
202+ self .logger .info ("checking hashes" )
144203 src_hash = self ._calculate_file_hash (str (src ), operator )
204+ self .logger .info (f"Source hash: { src_hash } " )
145205 dst_hash = self .storage .hash (dst_path , "sha256" )
206+ self .logger .info (f"Destination hash: { dst_hash } " )
146207
147208 return src_hash == dst_hash
148209
149210 except Exception :
150211 return False
151212
213+ def _should_skip_upload (
214+ self , src_path : str , dst_path : str , operator : Optional [Operator ], force_upload : bool , algo : Optional [str ]
215+ ) -> bool :
216+ if force_upload or algo is not None or not self .storage .exists (dst_path ):
217+ return False
218+
219+ self .logger .info (f"Checking if { src_path } and { dst_path } are identical" )
220+ if self ._files_are_identical (src_path , dst_path , operator ):
221+ self .logger .info (f"File { dst_path } already exists and is identical to source. Skipping upload..." )
222+ return True
223+
224+ self .logger .info (f"File { dst_path } is not identical to source" )
225+ return False
226+
227+ def _upload_file (
228+ self , src_path : str , dst_name : str , dst_path : str , operator : Optional [Operator ], algo : Optional [str ]
229+ ):
230+ if algo is None :
231+ self .logger .info (f"Uploading { src_path } to { dst_path } ..." )
232+ self .storage .write_from_path (dst_path , src_path , operator )
233+ else :
234+ ext_to_algo = {".gz" : "gz" , ".xz" : "xz" , ".bz2" : "bz2" }
235+ ext = next (k for k , v in ext_to_algo .items () if v == algo )
236+ compressed_path = f"{ dst_name } .img{ ext } "
237+ self .logger .info (f"Uploading { src_path } to { compressed_path } ..." )
238+ self .storage .write_from_path (compressed_path , src_path , operator )
239+ self .logger .info (f"Decompressing on exporter: { compressed_path } -> { dst_name } .img ..." )
240+ self .call ("decompress" , compressed_path , f"{ dst_name } .img" , algo )
241+ with contextlib .suppress (Exception ):
242+ self .storage .delete (compressed_path )
243+
152244 def upload_image (
153245 self ,
154246 dst_name : str ,
@@ -176,18 +268,70 @@ def upload_image(
176268 size_mb = int (size_mb )
177269 dst_path = f"{ dst_name } .img"
178270
179- if not force_upload and self ._files_are_identical (src , dst_path , operator ):
180- print (f"File { dst_path } already exists and is identical to source. Skipping upload." )
181- else :
182- print (f"Uploading { src } to { dst_path } ..." )
183- self .storage .write_from_path (dst_path , src , operator )
271+ src_path = str (src )
272+ if operator is None and not src_path .startswith (("http://" , "https://" )):
273+ src_path = os .path .abspath (src_path )
274+
275+ ext_to_algo = {".gz" : "gz" , ".xz" : "xz" , ".bz2" : "bz2" }
276+ algo = next ((v for k , v in ext_to_algo .items () if src_path .endswith (k )), None )
277+
278+ if not self ._should_skip_upload (src_path , dst_path , operator , force_upload , algo ):
279+ self ._upload_file (src_path , dst_name , dst_path , operator , algo )
184280
185281 if size_mb <= 0 :
186- src_path = os .path .join (self .storage ._storage .root_dir , dst_path )
187- size_mb = os .path .getsize (src_path ) // (1024 * 1024 )
188- if size_mb <= 0 :
282+ try :
283+ dst_stat = self .storage .stat (dst_path )
284+ size_mb = max (1 , int (dst_stat .content_length ) // (1024 * 1024 ))
285+ except Exception :
189286 size_mb = 1
190287
191288 self .add_lun (dst_name , dst_path , size_mb )
192-
193289 return self .get_target_iqn ()
290+
291+ def cli (self ):
292+ base = super ().cli ()
293+
294+ @base .command ()
295+ @click .argument ("file" , type = str )
296+ @click .option ("--name" , "name" , "-n" , type = str , help = "LUN name (defaults to basename without extension)" )
297+ @click .option ("--size-mb" , type = int , default = 0 , show_default = True , help = "Size in MB if creating a new image" )
298+ @click .option (
299+ "--force-upload" ,
300+ is_flag = True ,
301+ default = False ,
302+ help = "Force uploading even if the file appears identical on the exporter" ,
303+ )
304+ @click .option (
305+ "--header" ,
306+ "headers" ,
307+ multiple = True ,
308+ help = "Custom HTTP header in 'Key: Value' format. Repeatable." ,
309+ )
310+ def serve (file : str , name : Optional [str ], size_mb : int , force_upload : bool , headers : tuple [str , ...]):
311+ """Serve an image as an iSCSI LUN from a local path or HTTP(S) URL."""
312+ self .start ()
313+
314+ try :
315+ self .call ("clear_all_luns" )
316+ except Exception :
317+ pass
318+
319+ if not name :
320+ candidate = urlparse (file ).path if file .startswith (("http://" , "https://" )) else file
321+ name = self ._normalized_name_from_file (candidate )
322+
323+ src_path , src_operator , temp_cleanup = self ._get_src_and_operator (file , headers )
324+ try :
325+ iqn = self .upload_image (
326+ name , src_path , size_mb = size_mb , operator = src_operator , force_upload = force_upload
327+ )
328+ finally :
329+ if temp_cleanup is not None :
330+ with contextlib .suppress (Exception ):
331+ os .remove (temp_cleanup )
332+ host = self .get_host ()
333+ port = self .get_port ()
334+
335+ click .echo (f"{ host } :{ port } { iqn } " )
336+
337+ return base
0 commit comments