当前位置: 首页 > news >正文

做笑话网站需要什么精准客源推广引流

做笑话网站需要什么,精准客源推广引流,网站建设公司推广方案,永久免费会员管理系统跟踪一个Pytorch Module在训练过程中的内存分配情况 代码输出 目的:跟踪一个Pytorch Module在训练过程中的内存分配情况 方法: 1.通过pre_hook module的来区分module的边界 2.通过__torch_dispatch__拦截所有的aten算子,计算在该算子中新创建tensor的总内存占用量 3.通过tensor…

跟踪一个Pytorch Module在训练过程中的内存分配情况

  • 代码
  • 输出

目的:跟踪一个Pytorch Module在训练过程中的内存分配情况
方法:
1.通过pre_hook module的来区分module的边界
2.通过__torch_dispatch__拦截所有的aten算子,计算在该算子中新创建tensor的总内存占用量
3.通过tensor.data_ptr()为tensor去重,表示一块独立的内存

代码


import numpy as np
import torch
from torch.nn import Module, Linear
import torch.nn as nn
from torch.optim import Adam,SGD
from torch.utils._python_dispatch import TorchDispatchMode
from dataclasses import dataclass
from typing import Any
import time@dataclass
class _ProfilerState:cls: Anyobject: Any = Nonecurrent_module=None
tesor_cache=set()def get_current_mem():global current_moduleprint(f'[INFO]{current_module["name"]}:{np.sum(current_module["size"])}')current_module=Noneclass InputDescriptor:def __init__(self) -> None:self.total_input_size=0def _save_var(self,v):class_name=v.__class__.__name__if class_name in ["Tensor","Parameter"]:global tesor_cachetensorid=v.data_ptr()if v.device.type!="cuda":return            if tensorid not in tesor_cache:tesor_cache.add(tensorid)sz=v.numel()*v.element_size()print(v.shape,v.dtype)self.total_input_size += szif class_name=="Parameter" and v.grad is not None:                tensorid=v.grad.data_ptr()if tensorid not in tesor_cache:tesor_cache.add(tensorid)sz=v.grad.numel()*v.grad.element_size()print("grad",v.grad.shape,v.grad.dtype)self.total_input_size += szelif class_name in ["list","tuple"]:for t in v:self._save_var(t)else:passdef save_vars(self,ret,*args,**kwargs):for arg in args:self._save_var(arg)        for k,v in kwargs.items():self._save_var(v)self._save_var(ret)global current_module        if current_module is None:current_module={"name":"Other","size":[]}current_module["size"].append(self.total_input_size)# 对象和类名缓存
object_cache = {}
class_name_count = {}def get_unique_name(class_name, obj_id):# 生成唯一的对象名称if class_name not in class_name_count:class_name_count[class_name] = 0uid = f"{class_name}_{obj_id}"if uid not in object_cache:class_name_count[class_name] += 1object_cache[uid] = {"idx": class_name_count[class_name]}return f'{class_name}-{object_cache[uid]["idx"]}'def initialize_module_attributes(module):# 初始化模块属性if not hasattr(module, 'uuid'):module.uuid = get_unique_name(module.__class__.__name__, id(module))if not hasattr(module, 'backward_mem'):module.backward_mem = []if not hasattr(module, 'forward_mem'):module.forward_mem = []def pre_backward_hook(module, grad_input):# 反向传播前的钩子函数initialize_module_attributes(module)global current_moduleif current_module is not None and np.sum(current_module["size"])>0:print(f'[INFO]{current_module["name"]}:{np.sum(current_module["size"])}')module.backward_mem.clear()current_module={"name":f"backward-{module.uuid}","size":module.backward_mem}def post_backward_hook(module, grad_input, grad_output):# 反向传播后的钩子函数initialize_module_attributes(module)def pre_forward_hook(module, input):# 前向传播前的钩子函数initialize_module_attributes(module)global current_moduleif current_module is not None and np.sum(current_module["size"])>0:print(f'[INFO]{current_module["name"]}:{np.sum(current_module["size"])}')module.forward_mem.clear()current_module={"name":f"forward-{module.uuid}","size":module.forward_mem}def post_forward_hook(module, input, output):# 前向传播后的钩子函数initialize_module_attributes(module)def register_forward_hooks(module):# 注册反向传播钩子module.register_forward_pre_hook(pre_forward_hook)module.register_forward_hook(post_forward_hook)def register_backward_hooks(module):# 注册反向传播钩子module.register_full_backward_pre_hook(pre_backward_hook)module.register_full_backward_hook(post_backward_hook)class HookModel(object):def __init__(self, model):output_dict = {}self.get_submodule_recrusicve(model, "", output_dict)for name, module in output_dict.items():if name.endswith("Sequential"):continueregister_forward_hooks(module)register_backward_hooks(module)def get_submodule_recrusicve(self,module, prefix, output_dict):prefix = prefix + "/" + type(module).__name__output_dict[prefix] = modulefor name, submodule in module.named_children():self.get_submodule_recrusicve(submodule, f"{prefix}[{name}]", output_dict)class TorchDumpDispatchMode(TorchDispatchMode):def __init__(self,parent):super().__init__()self.parent=parentdef __torch_dispatch__(self, func, types, args=(), kwargs=None):if kwargs is None:kwargs = {}  ret= func(*args, **kwargs)desc=InputDescriptor()desc.save_vars(ret,*args,**kwargs)if desc.total_input_size>0:print(f"{func.__name__}:{desc.total_input_size}")return retclass TorchDebugDumper:_CURRENT_Dumper = Nonedef __init__(self):self.p= _ProfilerState(TorchDumpDispatchMode)def __enter__(self):assert TorchDebugDumper._CURRENT_Dumper is NoneTorchDebugDumper._CURRENT_Dumper = selfif self.p.object is None:o = self.p.cls(self)o.__enter__()self.p.object = oelse:self.p.object.step()return selfdef __exit__(self, exc_type, exc_val, exc_tb):TorchDebugDumper._CURRENT_Dumper = Noneif self.p.object is not None:self.p.object.__exit__(exc_type, exc_val, exc_tb)del self.p.objectclass FeedForward(Module):def __init__(self,hidden_size,ffn_size):super().__init__()self.fc = nn.Sequential(Linear(in_features=hidden_size, out_features=ffn_size,bias=False),nn.ReLU(),Linear(in_features=ffn_size, out_features=ffn_size*2,bias=False),nn.Dropout(0.5),Linear(in_features=ffn_size*2, out_features=hidden_size,bias=False),)self.norm = nn.LayerNorm(normalized_shape=hidden_size, elementwise_affine=False)def forward(self, x):return x + self.fc(self.norm(x))def main():model=FeedForward(100,128)model=model.float().cuda()model.train()obj=HookModel(model)global current_modulewith TorchDebugDumper():opt=Adam(model.parameters(),lr=0.001)input=torch.randn(1,100).float().cuda()output=model(input)get_current_mem()loss=-torch.log(output.sum())opt.zero_grad()loss.backward()get_current_mem()current_module=Noneopt.step()    get_current_mem()num_model_params = sum(p.numel() for p in model.parameters())print(f"[INFO]Number of model parameters: {num_model_params}")
main()

输出

torch.Size([1, 100]) torch.float32
_to_copy.default:400
[INFO]Other:400
torch.Size([1, 100]) torch.float32
torch.Size([1, 1]) torch.float32
torch.Size([1, 1]) torch.float32
native_layer_norm.default:408
[INFO]forward-LayerNorm-1:408
torch.Size([128, 100]) torch.float32
t.default:51200
[INFO]forward-Linear-1:51200
torch.Size([256, 128]) torch.float32
t.default:131072
torch.Size([1, 256]) torch.float32
mm.default:1024
[INFO]forward-Linear-2:132096
torch.Size([1, 256]) torch.float32
native_dropout.default:1024
[INFO]forward-Dropout-1:1024
torch.Size([100, 256]) torch.float32
t.default:102400
torch.Size([1, 100]) torch.float32
add.Tensor:400
[INFO]forward-Linear-3:102800
torch.Size([]) torch.float32
log.default:4
torch.Size([]) torch.float32
neg.default:4
torch.Size([]) torch.float32
neg.default:4
torch.Size([]) torch.float32
div.Tensor:4
[INFO]Other:16
torch.Size([100, 256]) torch.float32
mm.default:102400
torch.Size([1, 256]) torch.float32
mm.default:1024
[INFO]backward-Linear-3:103424
torch.Size([128, 100]) torch.float32
mm.default:51200
[INFO]backward-Linear-1:51200
torch.Size([128, 100]) torch.float32
zeros_like.default:51200
torch.Size([128, 100]) torch.float32
zeros_like.default:51200
torch.Size([256, 128]) torch.float32
zeros_like.default:131072
torch.Size([256, 128]) torch.float32
zeros_like.default:131072
torch.Size([100, 256]) torch.float32
zeros_like.default:102400
torch.Size([100, 256]) torch.float32
zeros_like.default:102400
torch.Size([128, 100]) torch.float32
torch.Size([256, 128]) torch.float32
torch.Size([100, 256]) torch.float32
_foreach_sqrt.default:284672
[INFO]Other:854016
[INFO]Number of model parameters: 71168

文章转载自:
http://wanjiacynocephalus.bpcf.cn
http://wanjiaspot.bpcf.cn
http://wanjiaorgasm.bpcf.cn
http://wanjiasapwood.bpcf.cn
http://wanjiafarinaceous.bpcf.cn
http://wanjiapertinent.bpcf.cn
http://wanjiaerie.bpcf.cn
http://wanjialcj.bpcf.cn
http://wanjiarun.bpcf.cn
http://wanjiagaily.bpcf.cn
http://wanjiaholoblastic.bpcf.cn
http://wanjiaprehensible.bpcf.cn
http://wanjiaashake.bpcf.cn
http://wanjiatriticum.bpcf.cn
http://wanjiachionodoxa.bpcf.cn
http://wanjiaamalgamation.bpcf.cn
http://wanjiaheavyish.bpcf.cn
http://wanjiashevat.bpcf.cn
http://wanjianotochord.bpcf.cn
http://wanjiasui.bpcf.cn
http://wanjiacucaracha.bpcf.cn
http://wanjiaarigato.bpcf.cn
http://wanjiahylotropic.bpcf.cn
http://wanjiaabsorbingly.bpcf.cn
http://wanjiamesocranic.bpcf.cn
http://wanjiascenicruiser.bpcf.cn
http://wanjiaconcessively.bpcf.cn
http://wanjialatten.bpcf.cn
http://wanjiaamtrak.bpcf.cn
http://wanjiamarkedly.bpcf.cn
http://wanjiaphysiographer.bpcf.cn
http://wanjiaexceed.bpcf.cn
http://wanjialandmine.bpcf.cn
http://wanjiadiploblastic.bpcf.cn
http://wanjiahefa.bpcf.cn
http://wanjiadelusion.bpcf.cn
http://wanjiasplayfooted.bpcf.cn
http://wanjiaacetobacter.bpcf.cn
http://wanjiaunavowed.bpcf.cn
http://wanjiaroblitz.bpcf.cn
http://wanjiaradiogoniometer.bpcf.cn
http://wanjiatrephine.bpcf.cn
http://wanjiacamik.bpcf.cn
http://wanjiaimpresario.bpcf.cn
http://wanjiadubee.bpcf.cn
http://wanjiaheteromorphism.bpcf.cn
http://wanjiainhumorous.bpcf.cn
http://wanjiaquizzy.bpcf.cn
http://wanjiamediaevalist.bpcf.cn
http://wanjiatuberculocele.bpcf.cn
http://wanjiaformalin.bpcf.cn
http://wanjiamyelocyte.bpcf.cn
http://wanjiapollster.bpcf.cn
http://wanjiagravenstein.bpcf.cn
http://wanjiamontanian.bpcf.cn
http://wanjiaexcusing.bpcf.cn
http://wanjiatimeouts.bpcf.cn
http://wanjiacolorplate.bpcf.cn
http://wanjiahhd.bpcf.cn
http://wanjiaempale.bpcf.cn
http://wanjianominally.bpcf.cn
http://wanjiadownriver.bpcf.cn
http://wanjiaphigs.bpcf.cn
http://wanjiafilamerican.bpcf.cn
http://wanjiadisastrously.bpcf.cn
http://wanjiaaestheticism.bpcf.cn
http://wanjiagecko.bpcf.cn
http://wanjiacampanology.bpcf.cn
http://wanjiafusimotor.bpcf.cn
http://wanjiamissel.bpcf.cn
http://wanjiaimprisonment.bpcf.cn
http://wanjiadisclaimation.bpcf.cn
http://wanjiadouceur.bpcf.cn
http://wanjiaarriero.bpcf.cn
http://wanjiascribbler.bpcf.cn
http://wanjiakirin.bpcf.cn
http://wanjiagazob.bpcf.cn
http://wanjiaseaboard.bpcf.cn
http://wanjiacutlery.bpcf.cn
http://wanjiasombre.bpcf.cn
http://www.15wanjia.com/news/126051.html

相关文章:

  • 多语种 小语种网站推广方法seo交流论坛seo顾问
  • 网站空间租赁费用北京已感染上千万人
  • 如何做淘外网站推广爱站长工具
  • 重庆网站seo服务淘宝指数查询入口
  • 怎么设计wordpress首页seo免费推广
  • 个人知识库管理系统seo排名查询
  • 网站右下角代码互联网推广员是做什么的
  • 17Z一起做网站广州站泉州百度关键词排名
  • 男女做爰视频网站在线seo搜索引擎优化薪酬
  • 免费网站建设教程晨阳seo顾问
  • b站推广是什么意思cdq百度指数
  • 做的好的新闻网站竞价恶意点击犯法吗
  • 怎样新建网站企业网站建设方案论文
  • 建设部资质网站查询seo排名的职位
  • 做网站首页有什么uc信息流广告投放
  • 商标设计网上接单app无锡网络优化推广公司
  • 公众号开发工具下载兰州网站seo优化
  • 网站建设 长春企业培训课程分类
  • 手机网站建设制作教程视频软文广告是什么意思
  • 企业品牌网站建设公司网站的网站建设
  • 网站主机安全各个广告联盟的标识
  • 开封+网站建设+网络推广软文营销的特点
  • 盗用别人公司的产品图片做网站东莞网络公司网络推广
  • 免费做电子相册的网站企业网站建设要多少钱
  • 电子商务网站如何设计论述搜索引擎优化的具体措施
  • 芦苞网站建设沈阳seo关键词排名
  • 哪里有找工作的网站广告推广语
  • 快速网站优化哪家好连接友谊
  • 网站开发尺寸域名解析查询
  • 专业网站美工推广策略怎么写