The fastai library simplifies training fast and accurate neural nets using modern best practices. See the fastai website to get started. The library is based on research into deep learning best practices undertaken at fast.ai
, and includes “out of the box” support for vision
, text
, tabular
, and collab
(collaborative filtering) models.
Grab the pets dataset and Specify folders:
URLs_PETS()
= 'oxford-iiit-pet'
path = paste(path, 'images', sep = '/')
path_hr = paste(path, 'crappy', sep = '/') path_lr
Prepare the input data by crappifying images:
# run this only for the first time, then skip
= get_image_files(path_hr)
items parallel(crappifier(path_lr, path_hr), items)
= 10
bs = 64
size = resnet34()
arch
= function(bs, size) {
get_dls = DataBlock(blocks = list(ImageBlock, ImageBlock),
dblock get_items = get_image_files,
get_y = function(x) {paste(path_hr, as.character(x$name), sep = '/')},
splitter = RandomSplitter(),
item_tfms = Resize(size),
batch_tfms = list(
aug_transforms(max_zoom = 2.),
Normalize_from_stats( imagenet_stats() )
))= dblock %>% dataloaders(path_lr, bs = bs, path = path)
dls $c = 3L
dls
dls
}
= get_dls(bs, size) dls_gen
See batch:
%>% show_batch(max_n = 4, dpi = 150) dls_gen
Define loss function and create unet_learner
:
= 1e-3
wd
= c(-3.,3.)
y_range
= MSELossFlat()
loss_gen
= function() {
create_gen_learner unet_learner(dls_gen, arch, loss_func = loss_gen,
config = unet_config(blur=TRUE, norm_type = "Weight",
self_attention = TRUE, y_range = y_range))
}
= create_gen_learner()
learn_gen
%>% fit_one_cycle(2, pct_start = 0.8, wd = wd) learn_gen
epoch train_loss valid_loss time
0 0.025911 0.035153 00:42
1 0.019524 0.019408 00:39
Plot results:
%>% show_results(max_n = 6, dpi = 200) learn_gen