import datetime

import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torch.utils.data import Dataset, DataLoader
from torchvision import transforms, utils, datasets from tensorflow import summary
%load_ext tensorboard

根据情况换成

%load_ext tensorboard.notebook

class Network(nn.Module):
def __init__(self):
super(Network, self).__init__()
self.conv1 = nn.Conv2d(1, 20, 5, 1)
self.conv2 = nn.Conv2d(20, 50, 5, 1)
self.fc1 = nn.Linear(4*4*50, 500)
self.fc2 = nn.Linear(500, 10) def forward(self, x):
x = F.relu(self.conv1(x))
x = F.max_pool2d(x, 2, 2)
x = F.relu(self.conv2(x))
x = F.max_pool2d(x, 2, 2)
x = x.view(-1, 4*4*50)
x = F.relu(self.fc1(x))
x = self.fc2(x)
return F.log_softmax(x, dim=1)
class Config:
def __init__(self, **kwargs):
for key, value in kwargs.items():
setattr(self, key, value) model_config = Config(
cuda = True if torch.cuda.is_available() else False,
device = torch.device("cuda" if torch.cuda.is_available() else "cpu"),
seed = 2,
lr = 0.01,
epochs = 4,
save_model = False,
batch_size = 32,
log_interval = 100
) class Trainer: def __init__(self, config): self.cuda = config.cuda
self.device = config.device
self.seed = config.seed
self.lr = config.lr
self.epochs = config.epochs
self.save_model = config.save_model
self.batch_size = config.batch_size
self.log_interval = config.log_interval self.globaliter = 0
#self.tb = TensorBoardColab() torch.manual_seed(self.seed)
kwargs = {'num_workers': 1, 'pin_memory': True} if self.cuda else {} self.train_loader = torch.utils.data.DataLoader(
datasets.MNIST('../data', train=True, download=True,
transform=transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.1307,), (0.3081,))
])),
batch_size=self.batch_size, shuffle=True, **kwargs) self.test_loader = torch.utils.data.DataLoader(
datasets.MNIST('../data', train=False, transform=transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.1307,), (0.3081,))
])),
batch_size=self.batch_size, shuffle=True, **kwargs) self.model = Network().to(self.device)
self.optimizer = optim.Adam(self.model.parameters(), lr=self.lr) def train(self, epoch): self.model.train()
for batch_idx, (data, target) in enumerate(self.train_loader): self.globaliter += 1
data, target = data.to(self.device), target.to(self.device) self.optimizer.zero_grad()
predictions = self.model(data) loss = F.nll_loss(predictions, target)
loss.backward()
self.optimizer.step() if batch_idx % self.log_interval == 0:
print('Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}'.format(
epoch, batch_idx * len(data), len(self.train_loader.dataset),
100. * batch_idx / len(self.train_loader), loss.item())) with train_summary_writer.as_default():
summary.scalar(
'loss', loss.item(), step=self.globaliter) def test(self, epoch):
self.model.eval()
test_loss = 0
correct = 0 with torch.no_grad():
for data, target in self.test_loader:
data, target = data.to(self.device), target.to(self.device)
predictions = self.model(data) test_loss += F.nll_loss(predictions, target, reduction='sum').item()
prediction = predictions.argmax(dim=1, keepdim=True)
correct += prediction.eq(target.view_as(prediction)).sum().item() test_loss /= len(self.test_loader.dataset)
accuracy = 100. * correct / len(self.test_loader.dataset) print('\nTest set: Average loss: {:.4f}, Accuracy: {}/{} ({:.0f}%)\n'.format(
test_loss, correct, len(self.test_loader.dataset), accuracy))
with test_summary_writer.as_default():
summary.scalar(
'loss', test_loss, step=self.globaliter)
summary.scalar('accuracy', accuracy, step=self.globaliter)
def main(): trainer = Trainer(model_config) for epoch in range(1, trainer.epochs + 1):
trainer.train(epoch)
trainer.test(epoch) if (trainer.save_model):
torch.save(trainer.model.state_dict(),"mnist_cnn.pt")
current_time = str(datetime.datetime.now().timestamp())
train_log_dir = 'logs/tensorboard/train/' + current_time
test_log_dir = 'logs/tensorboard/test/' + current_time
train_summary_writer = summary.create_file_writer(train_log_dir)
test_summary_writer = summary.create_file_writer(test_log_dir)
%tensorboard --logdir logs/tensorboard

main()
Train Epoch: 1 [0/60000 (0%)]    Loss: 2.320306
Train Epoch: 1 [3200/60000 (5%)] Loss: 0.881239
Train Epoch: 1 [6400/60000 (11%)] Loss: 0.014427
Train Epoch: 1 [9600/60000 (16%)] Loss: 0.046511
Train Epoch: 1 [12800/60000 (21%)] Loss: 0.194090
Train Epoch: 1 [16000/60000 (27%)] Loss: 0.178779
Train Epoch: 1 [19200/60000 (32%)] Loss: 0.437568
Train Epoch: 1 [22400/60000 (37%)] Loss: 0.058614
Train Epoch: 1 [25600/60000 (43%)] Loss: 0.051354
Train Epoch: 1 [28800/60000 (48%)] Loss: 0.339627
Train Epoch: 1 [32000/60000 (53%)] Loss: 0.057814
Train Epoch: 1 [35200/60000 (59%)] Loss: 0.216959
Train Epoch: 1 [38400/60000 (64%)] Loss: 0.111091
Train Epoch: 1 [41600/60000 (69%)] Loss: 0.268371
Train Epoch: 1 [44800/60000 (75%)] Loss: 0.129569
Train Epoch: 1 [48000/60000 (80%)] Loss: 0.392319
Train Epoch: 1 [51200/60000 (85%)] Loss: 0.374106
Train Epoch: 1 [54400/60000 (91%)] Loss: 0.145877
Train Epoch: 1 [57600/60000 (96%)] Loss: 0.136342 Test set: Average loss: 0.1660, Accuracy: 9497/10000 (95%) Train Epoch: 2 [0/60000 (0%)] Loss: 0.215095
Train Epoch: 2 [3200/60000 (5%)] Loss: 0.064202
Train Epoch: 2 [6400/60000 (11%)] Loss: 0.059504
Train Epoch: 2 [9600/60000 (16%)] Loss: 0.116854
Train Epoch: 2 [12800/60000 (21%)] Loss: 0.259310
Train Epoch: 2 [16000/60000 (27%)] Loss: 0.280154
Train Epoch: 2 [19200/60000 (32%)] Loss: 0.260245
Train Epoch: 2 [22400/60000 (37%)] Loss: 0.039311
Train Epoch: 2 [25600/60000 (43%)] Loss: 0.049329
Train Epoch: 2 [28800/60000 (48%)] Loss: 0.437081
Train Epoch: 2 [32000/60000 (53%)] Loss: 0.094939
Train Epoch: 2 [35200/60000 (59%)] Loss: 0.311777
Train Epoch: 2 [38400/60000 (64%)] Loss: 0.076921
Train Epoch: 2 [41600/60000 (69%)] Loss: 0.800094
Train Epoch: 2 [44800/60000 (75%)] Loss: 0.074938
Train Epoch: 2 [48000/60000 (80%)] Loss: 0.240811
Train Epoch: 2 [51200/60000 (85%)] Loss: 0.303044
Train Epoch: 2 [54400/60000 (91%)] Loss: 0.372847
Train Epoch: 2 [57600/60000 (96%)] Loss: 0.290946 Test set: Average loss: 0.1341, Accuracy: 9634/10000 (96%) Train Epoch: 3 [0/60000 (0%)] Loss: 0.092767
Train Epoch: 3 [3200/60000 (5%)] Loss: 0.038457
Train Epoch: 3 [6400/60000 (11%)] Loss: 0.005179
Train Epoch: 3 [9600/60000 (16%)] Loss: 0.168411
Train Epoch: 3 [12800/60000 (21%)] Loss: 0.171331
Train Epoch: 3 [16000/60000 (27%)] Loss: 0.267252
Train Epoch: 3 [19200/60000 (32%)] Loss: 0.072991
Train Epoch: 3 [22400/60000 (37%)] Loss: 0.034315
Train Epoch: 3 [25600/60000 (43%)] Loss: 0.143128
Train Epoch: 3 [28800/60000 (48%)] Loss: 0.324783
Train Epoch: 3 [32000/60000 (53%)] Loss: 0.049743
Train Epoch: 3 [35200/60000 (59%)] Loss: 0.090172
Train Epoch: 3 [38400/60000 (64%)] Loss: 0.002107
Train Epoch: 3 [41600/60000 (69%)] Loss: 0.025945
Train Epoch: 3 [44800/60000 (75%)] Loss: 0.054859
Train Epoch: 3 [48000/60000 (80%)] Loss: 0.009291
Train Epoch: 3 [51200/60000 (85%)] Loss: 0.010495
Train Epoch: 3 [54400/60000 (91%)] Loss: 0.132548
Train Epoch: 3 [57600/60000 (96%)] Loss: 0.005778 Test set: Average loss: 0.1570, Accuracy: 9553/10000 (96%) Train Epoch: 4 [0/60000 (0%)] Loss: 0.103177
Train Epoch: 4 [3200/60000 (5%)] Loss: 0.087844
Train Epoch: 4 [6400/60000 (11%)] Loss: 0.066604
Train Epoch: 4 [9600/60000 (16%)] Loss: 0.052869
Train Epoch: 4 [12800/60000 (21%)] Loss: 0.091576
Train Epoch: 4 [16000/60000 (27%)] Loss: 0.094903
Train Epoch: 4 [19200/60000 (32%)] Loss: 0.247008
Train Epoch: 4 [22400/60000 (37%)] Loss: 0.037751
Train Epoch: 4 [25600/60000 (43%)] Loss: 0.067071
Train Epoch: 4 [28800/60000 (48%)] Loss: 0.191988
Train Epoch: 4 [32000/60000 (53%)] Loss: 0.403029
Train Epoch: 4 [35200/60000 (59%)] Loss: 0.547171
Train Epoch: 4 [38400/60000 (64%)] Loss: 0.187923
Train Epoch: 4 [41600/60000 (69%)] Loss: 0.231193
Train Epoch: 4 [44800/60000 (75%)] Loss: 0.010785
Train Epoch: 4 [48000/60000 (80%)] Loss: 0.077892
Train Epoch: 4 [51200/60000 (85%)] Loss: 0.093144
Train Epoch: 4 [54400/60000 (91%)] Loss: 0.004715
Train Epoch: 4 [57600/60000 (96%)] Loss: 0.083726 Test set: Average loss: 0.1932, Accuracy: 9584/10000 (96%)

核心就是标红的地方。

【colab pytorch】使用tensorboard可视化的更多相关文章

  1. Pytorch的网络结构可视化(tensorboardX)(详细)

    版权声明:本文为博主原创文章,遵循 CC 4.0 BY-SA 版权协议,转载请附上原文出处链接和本声明.本文链接:https://blog.csdn.net/xiaoxifei/article/det ...

  2. 【猫狗数据集】利用tensorboard可视化训练和测试过程

    数据集下载地址: 链接:https://pan.baidu.com/s/1l1AnBgkAAEhh0vI5_loWKw提取码:2xq4 创建数据集:https://www.cnblogs.com/xi ...

  3. 使用 TensorBoard 可视化模型、数据和训练

    使用 TensorBoard 可视化模型.数据和训练 在 60 Minutes Blitz 中,我们展示了如何加载数据,并把数据送到我们继承 nn.Module 类的模型,在训练数据上训练模型,并在测 ...

  4. 利用Tensorboard可视化模型、数据和训练过程

    在60分钟闪电战中,我们像你展示了如何加载数据,通过为我们定义的nn.Module的子类的model提供数据,在训练集上训练模型,在测试集上测试模型.为了了解发生了什么,我们在模型训练时打印了一些统计 ...

  5. Tensorflow学习笔记3:TensorBoard可视化学习

    TensorBoard简介 Tensorflow发布包中提供了TensorBoard,用于展示Tensorflow任务在计算过程中的Graph.定量指标图以及附加数据.大致的效果如下所示, Tenso ...

  6. 学习TensorFlow,TensorBoard可视化网络结构和参数

    在学习深度网络框架的过程中,我们发现一个问题,就是如何输出各层网络参数,用于更好地理解,调试和优化网络?针对这个问题,TensorFlow开发了一个特别有用的可视化工具包:TensorBoard,既可 ...

  7. tensorboard可视化节点却没有显示图像的解决方法---注意路径问题加中文文件名

    问题:完成graph中的算子,并执行tf.Session后,用tensorboard可视化节点时,没有显示图像 1. tensorboard 1.10 我是将log文件存储在E盘下面的,所以直接在E盘 ...

  8. 在Keras中使用tensorboard可视化acc等曲线

    1.使用tensorboard可视化ACC,loss等曲线 keras.callbacks.TensorBoard(log_dir='./Graph', histogram_freq= 0 , wri ...

  9. 超简单tensorflow入门优化程序&&tensorboard可视化

    程序1 任务描述: x = 3.0, y = 100.0, 运算公式 x×W+b = y,求 W和b的最优解. 使用tensorflow编程实现: #-*- coding: utf-8 -*-) im ...

  10. 使用TensorBoard可视化工具

    title: 使用TensorBoard可视化工具 date: 2018-04-01 13:04:00 categories: deep learning tags: TensorFlow Tenso ...

随机推荐

  1. Arcpy处理修改shapefile FeatureClass 线要素坐标

    需求:在开发的webgis系统中需要将道路矢量数据与谷歌地图瓦片叠加,谷歌地图瓦片在国家测绘局的要求是进行了偏移处理的,人称“火星坐标系GCJ_02”,道路数据是WGS-84坐标系下的经纬度坐标,现在 ...

  2. Leetcode13_罗马数字转整数

    题目 罗马数字包含以下七种字符: I, V, X, L,C,D 和 M. 字符 数值I 1V 5X 10L 50C 100D 500M 1000例如, 罗马数字 2 写做 II ,即为两个并列的 1. ...

  3. ZOJ-1167-Trees on the Level

    题解: 我的解法是用一个类似字典树结构的结构体来表示节点.看到另一种解法是用数组来映射二叉树的,开到14000就过了,但是我觉得是数据水了,因为题中说最多 256个节点,如果256个节点连成链型,除根 ...

  4. django Field选项中null和blank的区别

    blank只是在填写表单的时候可以为空,而在数据库上存储的是一个空字符串:null是在数据库上表现NULL,而不是一个空字符串: 需要注意的是,日期型(DateField.TimeField.Date ...

  5. C 语言高效编程与代码优化

    译文链接:http://www.codeceo.com/article/c-high-performance-coding.html英文原文:Writing Efficient C and C Cod ...

  6. Oracle之函数中使用游标

    create or replace function getcustprodinstaddr(in_CustId in number,in_area_code in number) return va ...

  7. C# 输出&输入&类型强制转换

    输入字符串 String s; s=Console.ReadLine(); 输出字符串 Console.WritrLine(s); 输出分两种 ①占位符输出:Console.WriteLine(&qu ...

  8. Python Web 基础向(四) 浅谈数据层

    数据层一般会给人带来一些困扰,在于其定位不准确.聚合Model的工作也可以放在逻辑层做,但会导致逻辑层变重,经常出现大段晦涩代码.因此我的建议是保留Model聚合层,尽管会导致工作量的略微增加,但却可 ...

  9. python——pymysql的安装

    pymysql是python程序连接mysql数据库的的第三方库,通过运行import pymysql 查看系统中是否有该模块,没有的话需要自行安装. 安装教程如下: 1.下载pymysql安装包,下 ...

  10. windows 右键新建html文档

    1.win+R 输入 regedit 启动注册表 2.HKEY_CLASSES_ROOT->.html 3.右键新建-项 名为:ShellNew 4.在右侧空白区右键新建字符串值FileName ...