Skip to content

Commit be7bf54

Browse files
committed
OfflineMode, Download progress Fix
1 parent 131b830 commit be7bf54

22 files changed

Lines changed: 315 additions & 261 deletions
Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,14 @@
1-
namespace TensorStack.Python.Config
1+
using System.Text.Json.Serialization;
2+
3+
namespace TensorStack.Python.Config
24
{
35
public class ControlNetConfig
46
{
57
public string Path { get; set; }
68
public string Name { get; set; }
79
public string Weights { get; set; }
10+
11+
[JsonPropertyName("is_offline_mode")]
12+
public bool IsOfflineMode { get; set; }
813
}
914
}
Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,14 @@
1-
namespace TensorStack.Python.Config
1+
using System.Text.Json.Serialization;
2+
3+
namespace TensorStack.Python.Config
24
{
35
public class LoraConfig
46
{
57
public string Path { get; set; }
68
public string Name { get; set; }
79
public string Weights { get; set; }
10+
11+
[JsonPropertyName("is_offline_mode")]
12+
public bool IsOfflineMode { get; set; }
813
}
914
}

TensorStack.Python/Config/PipelineConfig.cs

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -56,5 +56,8 @@ public sealed record PipelineConfig
5656

5757
[JsonPropertyName("checkpoint_config")]
5858
public CheckpointConfig CheckpointConfig { get; set; }
59+
60+
[JsonPropertyName("is_offline_mode")]
61+
public bool IsOfflineMode { get; set; }
5962
}
6063
}

TensorStack.Python/LogParser.cs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -40,8 +40,8 @@ private static LogEntry ParsePythonLog(string logEntry)
4040
if (messageSections.Length < 2)
4141
return default;
4242

43-
var message = messageSections[1].Trim([' ', '\n', '\r']);
44-
if (message.Length < 5)
43+
var message = messageSections[1].Trim([' ', '\n', '\r', '\t']);
44+
if (string.IsNullOrWhiteSpace(message) || message.Length < 5 || !message.StartsWith('['))
4545
return default;
4646

4747
return new LogEntry(DateTime.Parse(messageSections[0], CultureInfo.InvariantCulture), message);

TensorStack.Python/Pipelines/ChromaPipeline.py

Lines changed: 16 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@ def initialize(config: DataObjects.PipelineConfig):
4848
global _progress_tracker, _pipeline_config, _device_map, _pipeline_device_map
4949

5050
_progress_tracker = Utils.ModelDownloadProgress(total_models=get_model_count(config))
51-
_pipeline_config = Utils.get_pipeline_config(config.base_model_path, config.cache_directory, config.secure_token)
51+
_pipeline_config = Utils.get_pipeline_config(config.base_model_path, config.cache_directory, config.secure_token, config.is_offline_mode)
5252
_device_map = Utils.get_device_map(config, _execution_device)
5353
_pipeline_device_map = Utils.get_pipeline_device_map(config, _execution_device)
5454
return create_pipeline(config)
@@ -63,7 +63,7 @@ def download(config_args: Dict[str, Any]):
6363
_device_map = "meta"
6464
_config = DataObjects.PipelineConfig(**config_args)
6565
_progress_tracker = Utils.ModelDownloadProgress(total_models=get_model_count(_config))
66-
_pipeline_config = Utils.get_pipeline_config(_config.base_model_path, _config.cache_directory, _config.secure_token)
66+
_pipeline_config = Utils.get_pipeline_config(_config.base_model_path, _config.cache_directory, _config.secure_token, _config.is_offline_mode)
6767
create_pipeline(_config, True)
6868
return True
6969

@@ -322,16 +322,16 @@ def load_text_encoder(config: DataObjects.PipelineConfig, pipeline_kwargs: Dict[
322322
_progress_tracker.Initialize(0, "text_encoder")
323323
checkpoint = config.checkpoint_config.text_encoder
324324
if checkpoint:
325-
print(f"[Load] Loading Checkpoint TextEncoder")
325+
print(f"[Load] Loading Checkpoint TextEncoder, IsOffline: {config.is_offline_mode}")
326326
is_gguf = Utils.isGGUF(checkpoint)
327327
text_encoder = T5EncoderModel.from_single_file(
328328
checkpoint,
329329
config=_pipeline_config["text_encoder"],
330330
torch_dtype=config.data_type,
331331
use_safetensors=True,
332332
low_cpu_mem_usage=True,
333+
local_files_only=config.is_offline_mode,
333334
device_map=_device_map,
334-
local_files_only=False,
335335
token=config.secure_token,
336336
quantization_config=Quantization.auto_single_file_config(config, QuantTarget.TEXT_ENCODER, is_gguf),
337337
)
@@ -340,7 +340,7 @@ def load_text_encoder(config: DataObjects.PipelineConfig, pipeline_kwargs: Dict[
340340
Utils.trim_memory(True)
341341
return text_encoder
342342

343-
print(f"[Load] Loading Pretrained TextEncoder")
343+
print(f"[Load] Loading Pretrained TextEncoder, IsOffline: {config.is_offline_mode}")
344344
text_encoder = T5EncoderModel.from_pretrained(
345345
"TensorStack/TextEncoder",
346346
subfolder="T5-XXL",
@@ -349,6 +349,7 @@ def load_text_encoder(config: DataObjects.PipelineConfig, pipeline_kwargs: Dict[
349349
quantization_config=Quantization.auto_pretrained_config(config, QuantTarget.TEXT_ENCODER),
350350
use_safetensors=True,
351351
low_cpu_mem_usage=True,
352+
local_files_only=config.is_offline_mode,
352353
device_map=_device_map,
353354
**pipeline_kwargs
354355
)
@@ -372,16 +373,16 @@ def load_transformer(config: DataObjects.PipelineConfig, pipeline_kwargs: Dict[s
372373
else config.checkpoint_config.single_file
373374
)
374375
if checkpoint:
375-
print(f"[Load] Loading Checkpoint Transformer")
376+
print(f"[Load] Loading Checkpoint Transformer, IsOffline: {config.is_offline_mode}")
376377
is_gguf = Utils.isGGUF(checkpoint)
377378
transformer = ChromaTransformer2DModel.from_single_file(
378379
checkpoint,
379380
config=_pipeline_config["transformer"],
380381
torch_dtype=config.data_type,
381382
use_safetensors=True,
382383
low_cpu_mem_usage=True,
384+
local_files_only=config.is_offline_mode,
383385
device_map=_device_map,
384-
local_files_only=False,
385386
token=config.secure_token,
386387
quantization_config=Quantization.auto_single_file_config(config, QuantTarget.TRANSFORMER, is_gguf)
387388
)
@@ -390,14 +391,15 @@ def load_transformer(config: DataObjects.PipelineConfig, pipeline_kwargs: Dict[s
390391
Utils.trim_memory(True)
391392
return transformer
392393

393-
print(f"[Load] Loading Pretrained Transformer")
394+
print(f"[Load] Loading Pretrained Transformer, IsOffline: {config.is_offline_mode}")
394395
transformer = ChromaTransformer2DModel.from_pretrained(
395396
config.base_model_path,
396397
subfolder="transformer",
397398
torch_dtype=config.data_type,
398399
quantization_config=Quantization.auto_pretrained_config(config, QuantTarget.TRANSFORMER),
399400
use_safetensors=True,
400401
low_cpu_mem_usage=True,
402+
local_files_only=config.is_offline_mode,
401403
device_map=_device_map,
402404
**pipeline_kwargs
403405
)
@@ -421,27 +423,28 @@ def load_vae(config: DataObjects.PipelineConfig, pipeline_kwargs: Dict[str, str]
421423
else config.checkpoint_config.single_file
422424
)
423425
if checkpoint:
424-
print(f"[Load] Loading Checkpoint Vae")
426+
print(f"[Load] Loading Checkpoint Vae, IsOffline: {config.is_offline_mode}")
425427
auto_encoder = AutoencoderKL.from_single_file(
426428
checkpoint,
427429
config=_pipeline_config["vae"],
428430
torch_dtype=config.data_type,
429431
use_safetensors=True,
430432
low_cpu_mem_usage=True,
433+
local_files_only=config.is_offline_mode,
431434
device_map=_device_map,
432-
local_files_only=False,
433435
token=config.secure_token,
434436
)
435437
Utils.trim_memory(True)
436438
return auto_encoder
437439

438-
print(f"[Load] Loading Pretrained Vae")
440+
print(f"[Load] Loading Pretrained Vae, IsOffline: {config.is_offline_mode}")
439441
auto_encoder = AutoencoderKL.from_pretrained(
440442
"TensorStack/AutoEncoder",
441443
subfolder="Flux1",
442444
torch_dtype=config.data_type,
443445
use_safetensors=True,
444446
low_cpu_mem_usage=True,
447+
local_files_only=config.is_offline_mode,
445448
device_map=_device_map,
446449
**pipeline_kwargs
447450
)
@@ -464,14 +467,15 @@ def load_control_net(config: DataObjects.PipelineConfig, pipeline_kwargs: Dict[s
464467
_control_net_cache = None
465468
return None
466469

467-
# print(f"[Load] Loading Pretrained ControlNet")
470+
# print(f"[Load] Loading Pretrained ControlNet, IsOffline: {config.control_net.is_offline_mode}")
468471
# _control_net_name = config.control_net.name
469472
# _progress_tracker.Initialize(4, "control_net")
470473
# _control_net_cache = ControlNetModel.from_pretrained(
471474
# config.control_net.path,
472475
# torch_dtype=config.data_type,
473476
# use_safetensors=True,
474477
# low_cpu_mem_usage=True,
478+
# local_files_only=config.control_net.is_offline_mode,
475479
# device_map=_device_map,
476480
# )
477481
return None

TensorStack.Python/Pipelines/CogVideoXPipeline.py

Lines changed: 16 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@ def initialize(config: DataObjects.PipelineConfig):
5050
global _progress_tracker, _pipeline_config, _device_map, _pipeline_device_map
5151

5252
_progress_tracker = Utils.ModelDownloadProgress(total_models=get_model_count(config))
53-
_pipeline_config = Utils.get_pipeline_config(config.base_model_path, config.cache_directory, config.secure_token)
53+
_pipeline_config = Utils.get_pipeline_config(config.base_model_path, config.cache_directory, config.secure_token, config.is_offline_mode)
5454
_device_map = Utils.get_device_map(config, _execution_device)
5555
_pipeline_device_map = Utils.get_pipeline_device_map(config, _execution_device)
5656
return create_pipeline(config)
@@ -65,7 +65,7 @@ def download(config_args: Dict[str, Any]):
6565
_device_map = "meta"
6666
_config = DataObjects.PipelineConfig(**config_args)
6767
_progress_tracker = Utils.ModelDownloadProgress(total_models=get_model_count(_config))
68-
_pipeline_config = Utils.get_pipeline_config(_config.base_model_path, _config.cache_directory, _config.secure_token)
68+
_pipeline_config = Utils.get_pipeline_config(_config.base_model_path, _config.cache_directory, _config.secure_token, _config.is_offline_mode)
6969
create_pipeline(_config, True)
7070
return True
7171

@@ -326,16 +326,16 @@ def load_text_encoder(config: DataObjects.PipelineConfig, pipeline_kwargs: Dict[
326326
_progress_tracker.Initialize(0, "text_encoder")
327327
checkpoint = config.checkpoint_config.text_encoder
328328
if checkpoint:
329-
print(f"[Load] Loading Checkpoint TextEncoder")
329+
print(f"[Load] Loading Checkpoint TextEncoder, IsOffline: {config.is_offline_mode}")
330330
is_gguf = Utils.isGGUF(checkpoint)
331331
text_encoder = T5EncoderModel.from_single_file(
332332
checkpoint,
333333
config=_pipeline_config["text_encoder"],
334334
torch_dtype=config.data_type,
335335
use_safetensors=True,
336336
low_cpu_mem_usage=True,
337+
local_files_only=config.is_offline_mode,
337338
device_map=_device_map,
338-
local_files_only=False,
339339
token=config.secure_token,
340340
quantization_config=Quantization.auto_single_file_config(config, QuantTarget.TEXT_ENCODER, is_gguf),
341341
)
@@ -344,7 +344,7 @@ def load_text_encoder(config: DataObjects.PipelineConfig, pipeline_kwargs: Dict[
344344
Utils.trim_memory(True)
345345
return text_encoder
346346

347-
print(f"[Load] Loading Pretrained TextEncoder")
347+
print(f"[Load] Loading Pretrained TextEncoder, IsOffline: {config.is_offline_mode}")
348348
text_encoder = T5EncoderModel.from_pretrained(
349349
"TensorStack/TextEncoder",
350350
subfolder="T5-XXL",
@@ -353,6 +353,7 @@ def load_text_encoder(config: DataObjects.PipelineConfig, pipeline_kwargs: Dict[
353353
quantization_config=Quantization.auto_pretrained_config(config, QuantTarget.TEXT_ENCODER),
354354
use_safetensors=True,
355355
low_cpu_mem_usage=True,
356+
local_files_only=config.is_offline_mode,
356357
device_map=_device_map,
357358
**pipeline_kwargs
358359
)
@@ -376,16 +377,16 @@ def load_transformer(config: DataObjects.PipelineConfig, pipeline_kwargs: Dict[s
376377
else config.checkpoint_config.single_file
377378
)
378379
if checkpoint:
379-
print(f"[Load] Loading Checkpoint Transformer")
380+
print(f"[Load] Loading Checkpoint Transformer, IsOffline: {config.is_offline_mode}")
380381
is_gguf = Utils.isGGUF(checkpoint)
381382
transformer = CogVideoXTransformer3DModel.from_single_file(
382383
checkpoint,
383384
config=_pipeline_config["transformer"],
384385
torch_dtype=config.data_type,
385386
use_safetensors=True,
386387
low_cpu_mem_usage=True,
388+
local_files_only=config.is_offline_mode,
387389
device_map=_device_map,
388-
local_files_only=False,
389390
token=config.secure_token,
390391
quantization_config=Quantization.auto_single_file_config(config, QuantTarget.TRANSFORMER, is_gguf),
391392
)
@@ -394,14 +395,15 @@ def load_transformer(config: DataObjects.PipelineConfig, pipeline_kwargs: Dict[s
394395
Utils.trim_memory(True)
395396
return transformer
396397

397-
print(f"[Load] Loading Pretrained Transformer")
398+
print(f"[Load] Loading Pretrained Transformer, IsOffline: {config.is_offline_mode}")
398399
transformer = CogVideoXTransformer3DModel.from_pretrained(
399400
config.base_model_path,
400401
subfolder="transformer",
401402
torch_dtype=config.data_type,
402403
quantization_config=Quantization.auto_pretrained_config(config, QuantTarget.TRANSFORMER),
403404
use_safetensors=True,
404405
low_cpu_mem_usage=True,
406+
local_files_only=config.is_offline_mode,
405407
device_map=_device_map,
406408
**pipeline_kwargs
407409
)
@@ -425,27 +427,28 @@ def load_vae(config: DataObjects.PipelineConfig, pipeline_kwargs: Dict[str, str]
425427
else config.checkpoint_config.single_file
426428
)
427429
if checkpoint:
428-
print(f"[Load] Loading Checkpoint Vae")
430+
print(f"[Load] Loading Checkpoint Vae, IsOffline: {config.is_offline_mode}")
429431
auto_encoder = AutoencoderKLCogVideoX.from_single_file(
430432
checkpoint,
431433
config=_pipeline_config["vae"],
432434
torch_dtype=config.data_type,
433435
use_safetensors=True,
434436
low_cpu_mem_usage=True,
437+
local_files_only=config.is_offline_mode,
435438
device_map=_device_map,
436-
local_files_only=False,
437439
token=config.secure_token,
438440
)
439441
Utils.trim_memory(True)
440442
return auto_encoder
441443

442-
print(f"[Load] Loading Pretrained Vae")
444+
print(f"[Load] Loading Pretrained Vae, IsOffline: {config.is_offline_mode}")
443445
auto_encoder = AutoencoderKLCogVideoX.from_pretrained(
444446
"TensorStack/AutoEncoder",
445447
subfolder="CogVideoX",
446448
torch_dtype=config.data_type,
447449
use_safetensors=True,
448450
low_cpu_mem_usage=True,
451+
local_files_only=config.is_offline_mode,
449452
device_map=_device_map,
450453
**pipeline_kwargs
451454
)
@@ -468,14 +471,15 @@ def load_control_net(config: DataObjects.PipelineConfig, pipeline_kwargs: Dict[s
468471
_control_net_cache = None
469472
return None
470473

471-
# print(f"[Load] Loading ControlNet")
474+
# print(f"[Load] Loading ControlNet, IsOffline: {config.control_net.is_offline_mode}")
472475
# _control_net_name = config.control_net.name
473476
# _progress_tracker.Initialize(4, "control_net")
474477
# _control_net_cache = ControlNetModel.from_pretrained(
475478
# config.control_net.path,
476479
# torch_dtype=config.data_type,
477480
# use_safetensors=True,
478481
# low_cpu_mem_usage=True,
482+
# local_files_only=config.control_net.is_offline_mode,
479483
# device_map=_device_map,
480484
# )
481485
return None

0 commit comments

Comments
 (0)