mirror of
https://git.datalinker.icu/kijai/ComfyUI-CogVideoXWrapper.git
synced 2025-12-09 21:04:23 +08:00
Compare commits
5 Commits
3a56f2d110
...
a6556a7bd8
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a6556a7bd8 | ||
|
|
fdb8abd279 | ||
|
|
881bbbf6c9 | ||
|
|
389fb0323f | ||
|
|
222b2e3b37 |
8
.github/workflows/publish.yml
vendored
8
.github/workflows/publish.yml
vendored
@ -8,17 +8,19 @@ on:
|
|||||||
paths:
|
paths:
|
||||||
- "pyproject.toml"
|
- "pyproject.toml"
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
issues: write
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
publish-node:
|
publish-node:
|
||||||
name: Publish Custom Node to registry
|
name: Publish Custom Node to registry
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
# if this is a forked repository. Skipping the workflow.
|
if: ${{ github.repository_owner == 'kijai' }}
|
||||||
if: github.event.repository.fork == false
|
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code
|
- name: Check out code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
- name: Publish Custom Node
|
- name: Publish Custom Node
|
||||||
uses: Comfy-Org/publish-node-action@main
|
uses: Comfy-Org/publish-node-action@v1
|
||||||
with:
|
with:
|
||||||
## Add your own personal access token to your Github Repository secrets and reference it here.
|
## Add your own personal access token to your Github Repository secrets and reference it here.
|
||||||
personal_access_token: ${{ secrets.REGISTRY_ACCESS_TOKEN }}
|
personal_access_token: ${{ secrets.REGISTRY_ACCESS_TOKEN }}
|
||||||
|
|||||||
@ -67,8 +67,9 @@ class CogVideoXPatchEmbed(nn.Module):
|
|||||||
post_time_compression_frames,
|
post_time_compression_frames,
|
||||||
self.spatial_interpolation_scale,
|
self.spatial_interpolation_scale,
|
||||||
self.temporal_interpolation_scale,
|
self.temporal_interpolation_scale,
|
||||||
|
output_type="pt",
|
||||||
)
|
)
|
||||||
pos_embedding = torch.from_numpy(pos_embedding).flatten(0, 1)
|
pos_embedding = pos_embedding.flatten(0, 1)
|
||||||
joint_pos_embedding = torch.zeros(
|
joint_pos_embedding = torch.zeros(
|
||||||
1, self.max_text_seq_length + num_patches, self.embed_dim, requires_grad=False
|
1, self.max_text_seq_length + num_patches, self.embed_dim, requires_grad=False
|
||||||
)
|
)
|
||||||
@ -173,6 +174,8 @@ def get_3d_rotary_pos_embed(
|
|||||||
grid_t = np.arange(temporal_size, dtype=np.float32)
|
grid_t = np.arange(temporal_size, dtype=np.float32)
|
||||||
grid_t = np.linspace(0, temporal_size, temporal_size, endpoint=False, dtype=np.float32)
|
grid_t = np.linspace(0, temporal_size, temporal_size, endpoint=False, dtype=np.float32)
|
||||||
elif grid_type == "slice":
|
elif grid_type == "slice":
|
||||||
|
if max_size is None:
|
||||||
|
raise ValueError("`max_size` must be provided when `grid_type` is 'slice'")
|
||||||
max_h, max_w = max_size
|
max_h, max_w = max_size
|
||||||
grid_size_h, grid_size_w = grid_size
|
grid_size_h, grid_size_w = grid_size
|
||||||
grid_h = np.arange(max_h, dtype=np.float32)
|
grid_h = np.arange(max_h, dtype=np.float32)
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
huggingface_hub
|
huggingface_hub
|
||||||
diffusers>=0.31.0
|
diffusers>=0.33.1
|
||||||
accelerate>=0.33.0
|
accelerate>=0.33.0
|
||||||
einops
|
einops
|
||||||
peft
|
peft
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user