@@ -50,7 +50,7 @@ def initialize(config: DataObjects.PipelineConfig):
5050 global _progress_tracker , _pipeline_config , _device_map , _pipeline_device_map
5151
5252 _progress_tracker = Utils .ModelDownloadProgress (total_models = get_model_count (config ))
53- _pipeline_config = Utils .get_pipeline_config (config .base_model_path , config .cache_directory , config .secure_token )
53+ _pipeline_config = Utils .get_pipeline_config (config .base_model_path , config .cache_directory , config .secure_token , config . is_offline_mode )
5454 _device_map = Utils .get_device_map (config , _execution_device )
5555 _pipeline_device_map = Utils .get_pipeline_device_map (config , _execution_device )
5656 return create_pipeline (config )
@@ -65,7 +65,7 @@ def download(config_args: Dict[str, Any]):
6565 _device_map = "meta"
6666 _config = DataObjects .PipelineConfig (** config_args )
6767 _progress_tracker = Utils .ModelDownloadProgress (total_models = get_model_count (_config ))
68- _pipeline_config = Utils .get_pipeline_config (_config .base_model_path , _config .cache_directory , _config .secure_token )
68+ _pipeline_config = Utils .get_pipeline_config (_config .base_model_path , _config .cache_directory , _config .secure_token , _config . is_offline_mode )
6969 create_pipeline (_config , True )
7070 return True
7171
@@ -326,16 +326,16 @@ def load_text_encoder(config: DataObjects.PipelineConfig, pipeline_kwargs: Dict[
326326 _progress_tracker .Initialize (0 , "text_encoder" )
327327 checkpoint = config .checkpoint_config .text_encoder
328328 if checkpoint :
329- print (f"[Load] Loading Checkpoint TextEncoder" )
329+ print (f"[Load] Loading Checkpoint TextEncoder, IsOffline: { config . is_offline_mode } " )
330330 is_gguf = Utils .isGGUF (checkpoint )
331331 text_encoder = T5EncoderModel .from_single_file (
332332 checkpoint ,
333333 config = _pipeline_config ["text_encoder" ],
334334 torch_dtype = config .data_type ,
335335 use_safetensors = True ,
336336 low_cpu_mem_usage = True ,
337+ local_files_only = config .is_offline_mode ,
337338 device_map = _device_map ,
338- local_files_only = False ,
339339 token = config .secure_token ,
340340 quantization_config = Quantization .auto_single_file_config (config , QuantTarget .TEXT_ENCODER , is_gguf ),
341341 )
@@ -344,7 +344,7 @@ def load_text_encoder(config: DataObjects.PipelineConfig, pipeline_kwargs: Dict[
344344 Utils .trim_memory (True )
345345 return text_encoder
346346
347- print (f"[Load] Loading Pretrained TextEncoder" )
347+ print (f"[Load] Loading Pretrained TextEncoder, IsOffline: { config . is_offline_mode } " )
348348 text_encoder = T5EncoderModel .from_pretrained (
349349 "TensorStack/TextEncoder" ,
350350 subfolder = "T5-XXL" ,
@@ -353,6 +353,7 @@ def load_text_encoder(config: DataObjects.PipelineConfig, pipeline_kwargs: Dict[
353353 quantization_config = Quantization .auto_pretrained_config (config , QuantTarget .TEXT_ENCODER ),
354354 use_safetensors = True ,
355355 low_cpu_mem_usage = True ,
356+ local_files_only = config .is_offline_mode ,
356357 device_map = _device_map ,
357358 ** pipeline_kwargs
358359 )
@@ -376,16 +377,16 @@ def load_transformer(config: DataObjects.PipelineConfig, pipeline_kwargs: Dict[s
376377 else config .checkpoint_config .single_file
377378 )
378379 if checkpoint :
379- print (f"[Load] Loading Checkpoint Transformer" )
380+ print (f"[Load] Loading Checkpoint Transformer, IsOffline: { config . is_offline_mode } " )
380381 is_gguf = Utils .isGGUF (checkpoint )
381382 transformer = CogVideoXTransformer3DModel .from_single_file (
382383 checkpoint ,
383384 config = _pipeline_config ["transformer" ],
384385 torch_dtype = config .data_type ,
385386 use_safetensors = True ,
386387 low_cpu_mem_usage = True ,
388+ local_files_only = config .is_offline_mode ,
387389 device_map = _device_map ,
388- local_files_only = False ,
389390 token = config .secure_token ,
390391 quantization_config = Quantization .auto_single_file_config (config , QuantTarget .TRANSFORMER , is_gguf ),
391392 )
@@ -394,14 +395,15 @@ def load_transformer(config: DataObjects.PipelineConfig, pipeline_kwargs: Dict[s
394395 Utils .trim_memory (True )
395396 return transformer
396397
397- print (f"[Load] Loading Pretrained Transformer" )
398+ print (f"[Load] Loading Pretrained Transformer, IsOffline: { config . is_offline_mode } " )
398399 transformer = CogVideoXTransformer3DModel .from_pretrained (
399400 config .base_model_path ,
400401 subfolder = "transformer" ,
401402 torch_dtype = config .data_type ,
402403 quantization_config = Quantization .auto_pretrained_config (config , QuantTarget .TRANSFORMER ),
403404 use_safetensors = True ,
404405 low_cpu_mem_usage = True ,
406+ local_files_only = config .is_offline_mode ,
405407 device_map = _device_map ,
406408 ** pipeline_kwargs
407409 )
@@ -425,27 +427,28 @@ def load_vae(config: DataObjects.PipelineConfig, pipeline_kwargs: Dict[str, str]
425427 else config .checkpoint_config .single_file
426428 )
427429 if checkpoint :
428- print (f"[Load] Loading Checkpoint Vae" )
430+ print (f"[Load] Loading Checkpoint Vae, IsOffline: { config . is_offline_mode } " )
429431 auto_encoder = AutoencoderKLCogVideoX .from_single_file (
430432 checkpoint ,
431433 config = _pipeline_config ["vae" ],
432434 torch_dtype = config .data_type ,
433435 use_safetensors = True ,
434436 low_cpu_mem_usage = True ,
437+ local_files_only = config .is_offline_mode ,
435438 device_map = _device_map ,
436- local_files_only = False ,
437439 token = config .secure_token ,
438440 )
439441 Utils .trim_memory (True )
440442 return auto_encoder
441443
442- print (f"[Load] Loading Pretrained Vae" )
444+ print (f"[Load] Loading Pretrained Vae, IsOffline: { config . is_offline_mode } " )
443445 auto_encoder = AutoencoderKLCogVideoX .from_pretrained (
444446 "TensorStack/AutoEncoder" ,
445447 subfolder = "CogVideoX" ,
446448 torch_dtype = config .data_type ,
447449 use_safetensors = True ,
448450 low_cpu_mem_usage = True ,
451+ local_files_only = config .is_offline_mode ,
449452 device_map = _device_map ,
450453 ** pipeline_kwargs
451454 )
@@ -468,14 +471,15 @@ def load_control_net(config: DataObjects.PipelineConfig, pipeline_kwargs: Dict[s
468471 _control_net_cache = None
469472 return None
470473
471- # print(f"[Load] Loading ControlNet")
474+ # print(f"[Load] Loading ControlNet, IsOffline: {config.control_net.is_offline_mode} ")
472475 # _control_net_name = config.control_net.name
473476 # _progress_tracker.Initialize(4, "control_net")
474477 # _control_net_cache = ControlNetModel.from_pretrained(
475478 # config.control_net.path,
476479 # torch_dtype=config.data_type,
477480 # use_safetensors=True,
478481 # low_cpu_mem_usage=True,
482+ # local_files_only=config.control_net.is_offline_mode,
479483 # device_map=_device_map,
480484 # )
481485 return None
0 commit comments