记录

转载http://blog.sina.com.cn/s/blog_a99f842a0102xfb3.html

------------ Options -------------

#from options.train_options import TrainOptions#opt = TrainOptions().parse()

batchSize: 1

beta1: 0.5

checkpoints_dir: ./checkpoints

continue_train: False

dataroot: ./datasets/girls

dataset_mode: unaligned

display_freq: 100

display_id: 1

display_port: 8097

display_single_pane_ncols: 0

display_winsize: 256

epoch_count: 1

fineSize: 256

gpu_ids: [0]

identity: 0.5

init_type: normal

input_nc: 3

isTrain: True

lambda_A: 10.0

lambda_B: 10.0

loadSize: 286

lr: 0.0002

lr_decay_iters: 50

lr_policy: lambda

max_dataset_size: inf

model: cycle_gan

nThreads: 2

n_layers_D: 3

name: girls_cyclegan

ndf: 64

ngf: 64

niter: 100

niter_decay: 100

no_dropout: True

no_flip: False

no_html: False

no_lsgan: False

norm: instance

output_nc: 3

phase: train

pool_size: 50

print_freq: 100

resize_or_crop: resize_and_crop

save_epoch_freq: 5

save_latest_freq: 5000

serial_batches: False

update_html_freq: 1000

which_direction: AtoB

which_epoch: latest

which_model_netD: basic

which_model_netG: resnet_9blocks

-------------- End ----------------

Custom Dataset Data Loader #print(data_loader.name())

dataset [UnalignedDataset] was created #self.dataset = CreateDataset(opt)

/anaconda/envs/python3.5/lib/python3.5/site-packages/torchvision-0.2.0-py3.5.egg/torchvision/transforms/transforms.py:156: UserWarning: The use of the transforms.Scale transform is deprecated, please use transforms.Resize instead.

#training images = 2062   #print('#training images = %d' % dataset_size)

cycle_gan   #model = create_model(opt)#print(opt.model)

initialization method [normal]    #model.initialize(opt)

initialization method [normal]

initialization method [normal]

initialization method [normal]

---------- Networks initialized -------------

#networks.print_network(self.netG_A)

ResnetGenerator(

(model): Sequential(

(0): ReflectionPad2d((3, 3, 3, 3))

(1): Conv2d(3, 64, kernel_size=(7, 7), stride=(1, 1))

(2): InstanceNorm2d(64, eps=1e-05, momentum=0.1, affine=False)

(3): ReLU(inplace)

(4): Conv2d(64, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1))

(5): InstanceNorm2d(128, eps=1e-05, momentum=0.1, affine=False)

(6): ReLU(inplace)

(7): Conv2d(128, 256, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1))

(8): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False)

(9): ReLU(inplace)

(10): ResnetBlock(

(conv_block): Sequential(

(0): ReflectionPad2d((1, 1, 1, 1))

(1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1))

(2): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False)

(3): ReLU(inplace)

(4): ReflectionPad2d((1, 1, 1, 1))

(5): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1))

(6): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False)

)

)

(11): ResnetBlock(

(conv_block): Sequential(

(0): ReflectionPad2d((1, 1, 1, 1))

(1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1))

(2): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False)

(3): ReLU(inplace)

(4): ReflectionPad2d((1, 1, 1, 1))

(5): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1))

(6): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False)

)

)

(12): ResnetBlock(

(conv_block): Sequential(

(0): ReflectionPad2d((1, 1, 1, 1))

(1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1))

(2): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False)

(3): ReLU(inplace)

(4): ReflectionPad2d((1, 1, 1, 1))

(5): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1))

(6): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False)

)

)

(13): ResnetBlock(

(conv_block): Sequential(

(0): ReflectionPad2d((1, 1, 1, 1))

(1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1))

(2): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False)

(3): ReLU(inplace)

(4): ReflectionPad2d((1, 1, 1, 1))

(5): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1))

(6): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False)

)

)

(14): ResnetBlock(

(conv_block): Sequential(

(0): ReflectionPad2d((1, 1, 1, 1))

(1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1))

(2): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False)

(3): ReLU(inplace)

(4): ReflectionPad2d((1, 1, 1, 1))

(5): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1))

(6): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False)

)

)

(15): ResnetBlock(

(conv_block): Sequential(

(0): ReflectionPad2d((1, 1, 1, 1))

(1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1))

(2): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False)

(3): ReLU(inplace)

(4): ReflectionPad2d((1, 1, 1, 1))

(5): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1))

(6): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False)

)

)

(16): ResnetBlock(

(conv_block): Sequential(

(0): ReflectionPad2d((1, 1, 1, 1))

(1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1))

(2): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False)

(3): ReLU(inplace)

(4): ReflectionPad2d((1, 1, 1, 1))

(5): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1))

(6): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False)

)

)

(17): ResnetBlock(

(conv_block): Sequential(

(0): ReflectionPad2d((1, 1, 1, 1))

(1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1))

(2): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False)

(3): ReLU(inplace)

(4): ReflectionPad2d((1, 1, 1, 1))

(5): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1))

(6): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False)

)

)

(18): ResnetBlock(

(conv_block): Sequential(

(0): ReflectionPad2d((1, 1, 1, 1))

(1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1))

(2): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False)

(3): ReLU(inplace)

(4): ReflectionPad2d((1, 1, 1, 1))

(5): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1))

(6): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False)

)

)

(19): ConvTranspose2d(256, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), output_padding=(1, 1))

(20): InstanceNorm2d(128, eps=1e-05, momentum=0.1, affine=False)

(21): ReLU(inplace)

(22): ConvTranspose2d(128, 64, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), output_padding=(1, 1))

(23): InstanceNorm2d(64, eps=1e-05, momentum=0.1, affine=False)

(24): ReLU(inplace)

(25): ReflectionPad2d((3, 3, 3, 3))

(26): Conv2d(64, 3, kernel_size=(7, 7), stride=(1, 1))

(27): Tanh()

)

)

Total number of parameters: 11378179

ResnetGenerator(

(model): Sequential(

(0): ReflectionPad2d((3, 3, 3, 3))

(1): Conv2d(3, 64, kernel_size=(7, 7), stride=(1, 1))

(2): InstanceNorm2d(64, eps=1e-05, momentum=0.1, affine=False)

(3): ReLU(inplace)

(4): Conv2d(64, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1))

(5): InstanceNorm2d(128, eps=1e-05, momentum=0.1, affine=False)

(6): ReLU(inplace)

(7): Conv2d(128, 256, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1))

(8): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False)

(9): ReLU(inplace)

(10): ResnetBlock(

(conv_block): Sequential(

(0): ReflectionPad2d((1, 1, 1, 1))

(1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1))

(2): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False)

(3): ReLU(inplace)

(4): ReflectionPad2d((1, 1, 1, 1))

(5): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1))

(6): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False)

)

)

(11): ResnetBlock(

(conv_block): Sequential(

(0): ReflectionPad2d((1, 1, 1, 1))

(1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1))

(2): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False)

(3): ReLU(inplace)

(4): ReflectionPad2d((1, 1, 1, 1))

(5): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1))

(6): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False)

)

)

(12): ResnetBlock(

(conv_block): Sequential(

(0): ReflectionPad2d((1, 1, 1, 1))

(1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1))

(2): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False)

(3): ReLU(inplace)

(4): ReflectionPad2d((1, 1, 1, 1))

(5): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1))

(6): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False)

)

)

(13): ResnetBlock(

(conv_block): Sequential(

(0): ReflectionPad2d((1, 1, 1, 1))

(1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1))

(2): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False)

(3): ReLU(inplace)

(4): ReflectionPad2d((1, 1, 1, 1))

(5): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1))

(6): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False)

)

)

(14): ResnetBlock(

(conv_block): Sequential(

(0): ReflectionPad2d((1, 1, 1, 1))

(1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1))

(2): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False)

(3): ReLU(inplace)

(4): ReflectionPad2d((1, 1, 1, 1))

(5): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1))

(6): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False)

)

)

(15): ResnetBlock(

(conv_block): Sequential(

(0): ReflectionPad2d((1, 1, 1, 1))

(1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1))

(2): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False)

(3): ReLU(inplace)

(4): ReflectionPad2d((1, 1, 1, 1))

(5): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1))

(6): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False)

)

)

(16): ResnetBlock(

(conv_block): Sequential(

(0): ReflectionPad2d((1, 1, 1, 1))

(1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1))

(2): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False)

(3): ReLU(inplace)

(4): ReflectionPad2d((1, 1, 1, 1))

(5): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1))

(6): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False)

)

)

(17): ResnetBlock(

(conv_block): Sequential(

(0): ReflectionPad2d((1, 1, 1, 1))

(1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1))

(2): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False)

(3): ReLU(inplace)

(4): ReflectionPad2d((1, 1, 1, 1))

(5): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1))

(6): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False)

)

)

(18): ResnetBlock(

(conv_block): Sequential(

(0): ReflectionPad2d((1, 1, 1, 1))

(1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1))

(2): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False)

(3): ReLU(inplace)

(4): ReflectionPad2d((1, 1, 1, 1))

(5): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1))

(6): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False)

)

)

(19): ConvTranspose2d(256, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), output_padding=(1, 1))

(20): InstanceNorm2d(128, eps=1e-05, momentum=0.1, affine=False)

(21): ReLU(inplace)

(22): ConvTranspose2d(128, 64, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), output_padding=(1, 1))

(23): InstanceNorm2d(64, eps=1e-05, momentum=0.1, affine=False)

(24): ReLU(inplace)

(25): ReflectionPad2d((3, 3, 3, 3))

(26): Conv2d(64, 3, kernel_size=(7, 7), stride=(1, 1))

(27): Tanh()

)

)

Total number of parameters: 11378179

NLayerDiscriminator(

(model): Sequential(

(0): Conv2d(3, 64, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))

(1): LeakyReLU(0.2, inplace)

(2): Conv2d(64, 128, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))

(3): InstanceNorm2d(128, eps=1e-05, momentum=0.1, affine=False)

(4): LeakyReLU(0.2, inplace)

(5): Conv2d(128, 256, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))

(6): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False)

(7): LeakyReLU(0.2, inplace)

(8): Conv2d(256, 512, kernel_size=(4, 4), stride=(1, 1), padding=(1, 1))

(9): InstanceNorm2d(512, eps=1e-05, momentum=0.1, affine=False)

(10): LeakyReLU(0.2, inplace)

(11): Conv2d(512, 1, kernel_size=(4, 4), stride=(1, 1), padding=(1, 1))

)

)

Total number of parameters: 2764737

NLayerDiscriminator(

(model): Sequential(

(0): Conv2d(3, 64, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))

(1): LeakyReLU(0.2, inplace)

(2): Conv2d(64, 128, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))

(3): InstanceNorm2d(128, eps=1e-05, momentum=0.1, affine=False)

(4): LeakyReLU(0.2, inplace)

(5): Conv2d(128, 256, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))

(6): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False)

(7): LeakyReLU(0.2, inplace)

(8): Conv2d(256, 512, kernel_size=(4, 4), stride=(1, 1), padding=(1, 1))

(9): InstanceNorm2d(512, eps=1e-05, momentum=0.1, affine=False)

(10): LeakyReLU(0.2, inplace)

(11): Conv2d(512, 1, kernel_size=(4, 4), stride=(1, 1), padding=(1, 1))

)

)

Total number of parameters: 2764737

-----------------------------------------------

model [CycleGANModel] was created#print("model [%s] was created" % (model.name()))

create web directory ./checkpoints/girls_cyclegan/web...#print('create web directory %s...' % self.web_dir)

最后编辑于
©著作权归作者所有,转载或内容合作请联系作者
  • 序言:七十年代末,一起剥皮案震惊了整个滨河市,随后出现的几起案子,更是在滨河造成了极大的恐慌,老刑警刘岩,带你破解...
    沈念sama阅读 213,616评论 6 492
  • 序言:滨河连续发生了三起死亡事件,死亡现场离奇诡异,居然都是意外死亡,警方通过查阅死者的电脑和手机,发现死者居然都...
    沈念sama阅读 91,020评论 3 387
  • 文/潘晓璐 我一进店门,熙熙楼的掌柜王于贵愁眉苦脸地迎上来,“玉大人,你说我怎么就摊上这事。” “怎么了?”我有些...
    开封第一讲书人阅读 159,078评论 0 349
  • 文/不坏的土叔 我叫张陵,是天一观的道长。 经常有香客问我,道长,这世上最难降的妖魔是什么? 我笑而不...
    开封第一讲书人阅读 57,040评论 1 285
  • 正文 为了忘掉前任,我火速办了婚礼,结果婚礼上,老公的妹妹穿的比我还像新娘。我一直安慰自己,他们只是感情好,可当我...
    茶点故事阅读 66,154评论 6 385
  • 文/花漫 我一把揭开白布。 她就那样静静地躺着,像睡着了一般。 火红的嫁衣衬着肌肤如雪。 梳的纹丝不乱的头发上,一...
    开封第一讲书人阅读 50,265评论 1 292
  • 那天,我揣着相机与录音,去河边找鬼。 笑死,一个胖子当着我的面吹牛,可吹牛的内容都是我干的。 我是一名探鬼主播,决...
    沈念sama阅读 39,298评论 3 412
  • 文/苍兰香墨 我猛地睁开眼,长吁一口气:“原来是场噩梦啊……” “哼!你这毒妇竟也来了?” 一声冷哼从身侧响起,我...
    开封第一讲书人阅读 38,072评论 0 268
  • 序言:老挝万荣一对情侣失踪,失踪者是张志新(化名)和其女友刘颖,没想到半个月后,有当地人在树林里发现了一具尸体,经...
    沈念sama阅读 44,491评论 1 306
  • 正文 独居荒郊野岭守林人离奇死亡,尸身上长有42处带血的脓包…… 初始之章·张勋 以下内容为张勋视角 年9月15日...
    茶点故事阅读 36,795评论 2 328
  • 正文 我和宋清朗相恋三年,在试婚纱的时候发现自己被绿了。 大学时的朋友给我发了我未婚夫和他白月光在一起吃饭的照片。...
    茶点故事阅读 38,970评论 1 341
  • 序言:一个原本活蹦乱跳的男人离奇死亡,死状恐怖,灵堂内的尸体忽然破棺而出,到底是诈尸还是另有隐情,我是刑警宁泽,带...
    沈念sama阅读 34,654评论 4 337
  • 正文 年R本政府宣布,位于F岛的核电站,受9级特大地震影响,放射性物质发生泄漏。R本人自食恶果不足惜,却给世界环境...
    茶点故事阅读 40,272评论 3 318
  • 文/蒙蒙 一、第九天 我趴在偏房一处隐蔽的房顶上张望。 院中可真热闹,春花似锦、人声如沸。这庄子的主人今日做“春日...
    开封第一讲书人阅读 30,985评论 0 21
  • 文/苍兰香墨 我抬头看了看天上的太阳。三九已至,却和暖如春,着一层夹袄步出监牢的瞬间,已是汗流浃背。 一阵脚步声响...
    开封第一讲书人阅读 32,223评论 1 267
  • 我被黑心中介骗来泰国打工, 没想到刚下飞机就差点儿被人妖公主榨干…… 1. 我叫王不留,地道东北人。 一个月前我还...
    沈念sama阅读 46,815评论 2 365
  • 正文 我出身青楼,却偏偏与公主长得像,于是被迫代替她去往敌国和亲。 传闻我的和亲对象是个残疾皇子,可洞房花烛夜当晚...
    茶点故事阅读 43,852评论 2 351

推荐阅读更多精彩内容

  • 相爱的两个人能量场应该是互补的,甚至是相通的,共同的爱好,兴趣,共同的梦想才能够一生生活的有声有色,结婚三年,一路...
    小胖子手工工作室阅读 821评论 0 0
  • 如果你愿意重塑自己:今天,我己经成功完成了十四天的净食之旅,在这过程中,我要感谢自己的坚持及毅力,因为在我净食的每...
    吉祥天女阅读 328评论 0 3
  • 这个秋天 花香弥漫 热情浪漫 情深义重 我心君懂 在秋天堕落 颓废中多少也带有醉人的味道 阳光在惊艳里坍塌 凝望 ...
    梦双眸阅读 496评论 5 15