Spaces:
Running
on
Zero
Running
on
Zero
File size: 2,525 Bytes
42f2c22 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 |
# // Copyright (c) 2025 Bytedance Ltd. and/or its affiliates
# //
# // Licensed under the Apache License, Version 2.0 (the "License");
# // you may not use this file except in compliance with the License.
# // You may obtain a copy of the License at
# //
# // http://www.apache.org/licenses/LICENSE-2.0
# //
# // Unless required by applicable law or agreed to in writing, software
# // distributed under the License is distributed on an "AS IS" BASIS,
# // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# // See the License for the specific language governing permissions and
# // limitations under the License.
from dataclasses import dataclass
from typing import Any, Callable, Dict, List, Tuple
import torch
from torch import nn
@dataclass
class MMArg:
vid: Any
txt: Any
def get_args(key: str, args: List[Any]) -> List[Any]:
return [getattr(v, key) if isinstance(v, MMArg) else v for v in args]
def get_kwargs(key: str, kwargs: Dict[str, Any]) -> Dict[str, Any]:
return {k: getattr(v, key) if isinstance(v, MMArg) else v for k, v in kwargs.items()}
class MMModule(nn.Module):
def __init__(
self,
module: Callable[..., nn.Module],
*args,
shared_weights: bool = False,
vid_only: bool = False,
**kwargs,
):
super().__init__()
self.shared_weights = shared_weights
self.vid_only = vid_only
if self.shared_weights:
assert get_args("vid", args) == get_args("txt", args)
assert get_kwargs("vid", kwargs) == get_kwargs("txt", kwargs)
self.all = module(*get_args("vid", args), **get_kwargs("vid", kwargs))
else:
self.vid = module(*get_args("vid", args), **get_kwargs("vid", kwargs))
self.txt = (
module(*get_args("txt", args), **get_kwargs("txt", kwargs))
if not vid_only
else None
)
def forward(
self,
vid: torch.FloatTensor,
txt: torch.FloatTensor,
*args,
**kwargs,
) -> Tuple[
torch.FloatTensor,
torch.FloatTensor,
]:
vid_module = self.vid if not self.shared_weights else self.all
vid = vid_module(vid, *get_args("vid", args), **get_kwargs("vid", kwargs))
if not self.vid_only:
txt_module = self.txt if not self.shared_weights else self.all
txt = txt_module(txt, *get_args("txt", args), **get_kwargs("txt", kwargs))
return vid, txt
|