LSTM to Generate Synthetic ECG Data

Sandya Subramanian

HST (Health Sciences and Technology), 2nd year Ph.D.

This was kind of just for fun. It's the analog to what we did in one of the assignments, where we generated fake Shakespeare text. I wanted to see if I could generate "fake" ECG data.

Setup

In [1]:
using CSV, Plots
using Knet, AutoGrad
using Knet: sigm_dot, tanh_dot
plotlyjs()

Plotly javascript loaded.

To load again call

init_notebook(true)

WARNING: Method definition ==(Base.Nullable{S}, Base.Nullable{T}) in module Base at nullable.jl:238 overwritten in module NullableArrays at /home/nsrl/juliapro/JuliaPro-0.6.1.1/JuliaPro/pkgs-0.6.1.1/v0.6/NullableArrays/src/operators.jl:99.
Out[1]:
Plots.PlotlyJSBackend()
In [2]:
Knet.gpu(0)
Out[2]:
0

Data Sanity Checks

In [3]:
fs_motion = 500;
sample_motion = CSV.read("test22_00j.csv", header = ["Time", "ECG1", "ECG2", "ECG3", "ECG4"], datarow=1, nullable=false)
fs_arrhyth = 360;
sample_arrhyth = CSV.read("207.csv",header = ["Time", "ECG1", "ECG2"], datarow=1, nullable=false)
plot(sample_motion[:,1],sample_motion[:,4],title = "Sample ECG during Jumping with Motion Artifact",xlabel="Time (sec)")
Out[3]:
In [4]:
plot(sample_arrhyth[360*515:360*535,1],sample_arrhyth[360*515:360*535,3],title = "Sample ECG during Arrhythmia",xlabel="Time (sec)")
Out[4]:

LSTM

The setup is pretty much exactly the same as the assignment, except with some obvious differences to account for the different data format.

In [5]:
#Task 1: Create dictionary
function createVocabulary(signal)
    vocab = Dict{Float64,Int64}()
    # List of unique values in signal
    chars = unique(signal)
    for i = 1:length(chars)
        vocab[chars[i]] = i
    end
    return vocab
end

#LSTM Network Function
function lstm(weight,bias,hidden,cell,input)
    gates   = hcat(input,hidden) * weight .+ bias
    hsize   = size(hidden,2) #size of layer
    forget  = sigm_dot(gates[:,1:hsize]) #Assumed size H+I * 4H
    ingate  = sigm_dot(gates[:,1+hsize:2hsize])
    outgate = sigm_dot(gates[:,1+2hsize:3hsize])
    change  = tanh_dot(gates[:,1+3hsize:end])
    cell    = cell .* forget + ingate .* change
    hidden  = outgate .* tanh_dot(cell)
    return (hidden,cell)
end

#Task 2: Create Initial Weights
function initweights(lenhidden, lenvocab, embed, arraytype)
    init(d...) = xavier(d...)
    bias(d...) = zeros(d...)
    tmp_model = Vector{Array{Float64}}(2*length(lenhidden)+3) #2*num layers + 3
    X = embed #size of x input
    for k = 1:length(lenhidden)
        tmp_model[2k-1] = init(X+lenhidden[k],4*lenhidden[k]) #Each element of the Vector is a matrix itself
        tmp_model[2k] = bias(1,4*lenhidden[k])
        #Combine all the weights
        #Biases separately
        X = lenhidden[k] #Replace previous layer size
    end
    tmp_model[end-2] = init(lenvocab,embed) #Size of vocab by size of input x?
    tmp_model[end-1] = init(lenhidden[end],lenvocab) #Size of last layer by number of words - for output?
    tmp_model[end] = bias(1,lenvocab) #Zero vector for each word?
    
    if arraytype == "KnetArray"
        model = Vector{KnetArray{Float64}}(2*length(lenhidden)+3)
        for k = 1:length(tmp_model)
            model[k] = KnetArray(tmp_model[k]);
            println(typeof(model[k]))
        end
    else
        model = copy(tmp_model);
    end
    return model
end

#Task 3: Create Initial State
function initstate(model, batch)
    #Check what type to use
    if typeof(model[1]) == Array{Float64,2}
        arraytype = "Array"
    else 
        arraytype = "KnetArray"
    end
    nlayers = div(length(model)-3,2)
    state = Vector{Any}(2*nlayers)
    for k = 1:nlayers
        #Get length of layer
        H = div(size(model[2k],2),4)
        state[2k-1] = state[2k] = zeros(batch,H)
        #cell and hidden for each layer both vectors length H
    end
    
    if arraytype == "KnetArray"
        for k = 1:length(state)
            state[k] = KnetArray(state[k]);
        end
    end
    return state
end

#Task 4: Create Predict function
function predict(model, state, input; pdrop=0)
    nlayers = div(length(model)-3,2)
    newstate = similar(state)
    for k = 1:nlayers
        #Run through by selecting the right elements corresponding to that layer
        #state[2k-1] -> hidden
        #state[2k] -> cell
        #model[2k-1] -> weights
        #model[2k] -> bias
        hidden, cell = lstm(model[2k-1],model[2k],state[2k-1],state[2k],input)
        newstate[2k-1] = hidden
        newstate[2k] = cell
        input = hidden
    end
    return input,newstate
end

#Generate and Sample Functions
function generate(model, tok2int, nchar)
    #tok2int is the dictionary
    int2tok = Vector{Float64}(length(tok2int))
    for (k,v) in tok2int; int2tok[v] = k; end #Make vector with characters in order of indexing based on dictionary
    input = tok2int[0] #Number of 0 - starting input
    state = initstate(model, 1)
    synth_signal = zeros(0)
    for t in 1:nchar
        embed = model[end-2][[input],:] #Entire row of input x based on character
        ypred,state = predict(model,state,embed)
        ypred = ypred * model[end-1] .+ model[end] #To get outputs
        input = sample(exp.(logp(ypred))) #Convert to prob dist and sample
        synth_signal = append!(synth_signal,int2tok[input])
    end
    return synth_signal
end

function sample(p)
    p = convert(Array,p)
    r = rand()
    for c = 1:length(p)
        r -= p[c]
        r < 0 && return c
    end
end


function minibatches(signal, tok2int, batch_size)
    signal = collect(signal)
    if length(signal) > batch_size
        nbatch = div(length(signal), batch_size)
    else
        nbatch = 1
        batch_size = length(signal)
    end
    data = [zeros(Int,batch_size) for i=1:nbatch ]
    for n = 1:nbatch
        for b = 1:batch_size
            sig = signal[(b-1)*nbatch + n]
            data[n][b] = tok2int[sig]
        end
    end
    return data
end

#Task 5: Create Loss Function
function loss(model, state, sequence, range=1:length(sequence)-1; newstate=nothing, pdrop=0)
    preds = []
    for t in range
        input = model[end-2][sequence[t],:] #Convert sequence of chars to rows
        pred,state = predict(model,state,input; pdrop=pdrop) #pdrop is dropout probability..?
        push!(preds,pred)
    end
    if newstate != nothing
        copy!(newstate, map(AutoGrad.getval,state))
    end
    pred0 = vcat(preds...)
    pred1 = dropout(pred0,pdrop)
    pred2 = pred1 * model[end-1]
    pred3 = pred2 .+ model[end]
    logp1 = logp(pred3,2) #Convert to dist
    nrows,ncols = size(pred3)
    golds = vcat(sequence[range+1]...) #Actual answers - gold standard
    index = similar(golds)
    @inbounds for i=1:length(golds)
        index[i] = i + (golds[i]-1)*nrows
        #index[i] = golds[i] + nrows*(i-1)
    end
    logp2 = logp1[index]
    logp3 = sum(logp2)
    return -(logp3) / length(golds)
end

function avgloss(model, sequence, S)
    T = length(sequence)
    B = length(sequence[1])
    state = initstate(model, B)
    total = count = 0.0
    for i in 1:S:T-1 #S is length of smaller sequences
        j = min(i+S-1,T-1)
        n = j-i+1
        total += n * Float64(loss(model, state, sequence, i:j; newstate=state))
        count += n
    end
    return total / count
end

#Task 6: Create Train Function
function train(model, sequence, optim, S, lossgradient; pdrop=0)
    T = length(sequence)
    B = length(sequence[1])
    state = initstate(model, B)
    for i in 1:S:T-1
        j = min(i+S-1,T-1)
        grads = lossgradient(model, state, sequence, i:j; newstate=state, pdrop=pdrop)
        update!(model, grads, optim)
    end
    return model
end
Out[5]:
train (generic function with 1 method)
In [6]:
function setup(numepochs,mode,arraytype)
  #Options
  if mode == "Normal"
    datafiles = ["test01_00s.csv","test02_45s.csv","test03_90s.csv","test04_00s.csv","test05_45s.csv","test06_90s.csv","test07_00s.csv","test08_45s.csv","test09_90s.csv","test10_00w.csv","test11_45w.csv","test12_90w.csv","test13_00w.csv","test14_45w.csv","test15_90w.csv","test16_00w.csv","test17_45w.csv","test18_90w.csv","test19_00j.csv","test20_45j.csv","test21_90j.csv","test22_00j.csv","test23_45j.csv","test24_90j.csv","test25_00j.csv","test26_45j.csv","test27_90j.csv"];  # If provided, use first file for training, second for dev, others for test.
  elseif mode == "Short"
    datafiles = ["207.csv","203.csv","212.csv"]
  else
    datafiles = ["207.csv","203.csv","212.csv","209.csv","201.csv","202.csv","205.csv","208.csv","210.csv","213.csv"];  
  end
  lenhidden    = [128];          # Sizes of one or more LSTM layers.

  options1 = Dict{String,Int64}()
  options1["togenerate"]   = Int64(4000)            # If non-zero generate given number of characters.
  options1["epochs"]       = Int64(numepochs)      # Number of epochs for training.
  options1["embed"]        = Int64(168)            # Size of the embedding vector.
  options1["batchsize"]    = Int64(512)            # Number of sequences to train on in parallel
  options1["seqlength"]    = Int64(20)             # Maximum number of steps to unroll the network for bptt. Initial epochs will use the epoch number as bptt length for faster convergence.
  options1["seed"]         = -1            # Random number seed. -1 or 0 is no fixed seed

  options2 = Dict{String,Float64}()
  options2["lr"]           = 1e-1           # Initial learning rate
  options2["gclip"]        = 3.0           # Value to clip the gradient norm at.
  options2["dpout"]        = 0.0            # Dropout probability.

  #options1["seed"] > 0 && srand(options1["seed"])

  # read text and report lengths
  signal = [];
  for i = 1:length(datafiles)
    sig = CSV.read(datafiles[i], nullable = false);
    signal = append!(signal, sig[:,3]);
  end
  !isempty(signal) && info("Values read: $(length(signal))")

  vocab = createVocabulary(signal);
  info("$(length(vocab)) unique values.") # The output should be 75 unique chars for input.txt

  #Now, let's generate some random text
  model = initweights(lenhidden, length(vocab), options1["embed"],arraytype);
  firststate = initstate(model,1);

  randsignal = generate(model, vocab, options1["togenerate"]) ## change to generate if you want longer sample text
  return randsignal, options1, options2, datafiles, lenhidden, signal, vocab, model
end

function getbatches(options1,signal,vocab)
  #Now we are ready. First, let's see the intial loss.
  data =  minibatches(signal, vocab, options1["batchsize"]);
  return data
end

function setuptrain(options2, data, model)
  # Knet magic
  lossgradient = grad(loss,1);

  # Print the loss of randomly initialized model.
  losses = avgloss(model,data,100);
  println((:epoch,0,:loss,losses))

  #Below is the training part of RNN (with Adam)
  optim = map(x->Adam(lr=options2["lr"], gclip=options2["gclip"]), model);
  return lossgradient, optim
end

function trainloop(options1, options2, vocab, data, lossgradient, optim, model)
  # MAIN LOOP
  for epoch=1:options1["epochs"]
      @time train(model, data, optim, min(epoch,options1["seqlength"]), lossgradient; pdrop=options2["dpout"])
      # Calculate and print the losses after each epoch
      losses = avgloss(model,data,100);
      println((:epoch,epoch,:loss,losses))
  end

  #If you have checked that the loss is decreasing, let's create some text with our model.
  state = initstate(model,1);
  synth_signal = generate(model, vocab, options1["togenerate"])
  return synth_signal
end
Out[6]:
trainloop (generic function with 1 method)

Starting with Sinus Rhythm (Normal)

I first did it on just very small snippets of normal ECG only. All together there are under 10000 data points for training, which is very little, but that's what the other normal dataset had.

In [7]:
randsignal, options1, options2, datafiles, lenhidden, signal, vocab, model = setup(40,"Normal","KnetArray");
plot(signal)
INFO: Values read: 107973
INFO: 854 unique values.
Knet.KnetArray{Float64,2}
Knet.KnetArray{Float64,2}
Knet.KnetArray{Float64,2}
Knet.KnetArray{Float64,2}
Knet.KnetArray{Float64,2}
Out[7]:

Here's the random signal before the LSTM was trained.

In [8]:
plot(randsignal,title = "Random Signal")
Out[8]:
In [9]:
data = getbatches(options1, signal, vocab);
size(data)
Out[9]:
(210,)
In [10]:
lossgradient, optim = setuptrain(options2, data, model);
(:epoch, 0, :loss, 6.750024895575731)
In [11]:
synth_signal = trainloop(options1, options2, vocab, data, lossgradient, optim, model);
  8.080580 seconds (1.68 M allocations: 78.938 MiB, 38.54% gc time)
(:epoch, 1, :loss, 2.7306747365065753)
  4.503444 seconds (873.25 k allocations: 25.392 MiB, 24.98% gc time)
(:epoch, 2, :loss, 2.494463208134128)
 11.677167 seconds (745.15 k allocations: 22.432 MiB, 76.83% gc time)
(:epoch, 3, :loss, 2.387746763378587)
  4.038743 seconds (765.15 k allocations: 22.007 MiB, 24.82% gc time)
(:epoch, 4, :loss, 2.354082985971185)
  3.960186 seconds (761.79 k allocations: 21.355 MiB, 25.40% gc time)
(:epoch, 5, :loss, 2.309400065233938)
  3.404398 seconds (752.66 k allocations: 21.108 MiB, 18.43% gc time)
(:epoch, 6, :loss, 2.277541259844965)
  3.417413 seconds (756.34 k allocations: 21.079 MiB, 18.15% gc time)
(:epoch, 7, :loss, 2.251168855519881)
  3.376852 seconds (759.20 k allocations: 21.054 MiB, 18.43% gc time)
(:epoch, 8, :loss, 2.2361805684059703)
  5.780621 seconds (740.21 k allocations: 20.094 MiB, 56.00% gc time)
(:epoch, 9, :loss, 2.2140249170195756)
  3.874395 seconds (777.40 k allocations: 21.292 MiB, 26.01% gc time)
(:epoch, 10, :loss, 2.201339926391189)
  3.392350 seconds (760.76 k allocations: 20.942 MiB, 18.93% gc time)
(:epoch, 11, :loss, 2.182266440110345)
  3.371702 seconds (766.51 k allocations: 21.534 MiB, 18.66% gc time)
(:epoch, 12, :loss, 2.1706520532869504)
  3.495207 seconds (768.17 k allocations: 21.103 MiB, 21.35% gc time)
(:epoch, 13, :loss, 2.1635139168173922)
  3.848262 seconds (793.08 k allocations: 22.051 MiB, 26.54% gc time)
(:epoch, 14, :loss, 2.1603654136409136)
  3.431057 seconds (766.63 k allocations: 20.937 MiB, 18.48% gc time)
(:epoch, 15, :loss, 2.1437450481696665)
  3.458687 seconds (766.29 k allocations: 20.886 MiB, 21.66% gc time)
(:epoch, 16, :loss, 2.1431847199199767)
  3.928266 seconds (778.34 k allocations: 21.080 MiB, 28.60% gc time)
(:epoch, 17, :loss, 2.1386937472435603)
  3.332582 seconds (767.89 k allocations: 20.901 MiB, 18.70% gc time)
(:epoch, 18, :loss, 2.1270462796029097)
  2.921152 seconds (750.11 k allocations: 20.541 MiB, 8.66% gc time)
(:epoch, 19, :loss, 2.1183336776888018)
  3.503821 seconds (768.00 k allocations: 20.864 MiB, 21.57% gc time)
(:epoch, 20, :loss, 2.1141392750459707)
  3.765651 seconds (769.24 k allocations: 20.393 MiB, 26.96% gc time)
(:epoch, 21, :loss, 2.111140665130706)
  3.787640 seconds (769.19 k allocations: 20.392 MiB, 26.92% gc time)
(:epoch, 22, :loss, 2.107770317643729)
  3.759041 seconds (769.23 k allocations: 20.393 MiB, 26.66% gc time)
(:epoch, 23, :loss, 2.1017167269178394)
  3.752442 seconds (769.23 k allocations: 20.393 MiB, 26.70% gc time)
(:epoch, 24, :loss, 2.099467990934383)
  3.779443 seconds (769.19 k allocations: 20.392 MiB, 27.06% gc time)
(:epoch, 25, :loss, 2.0947930455936667)
  3.978614 seconds (769.23 k allocations: 20.393 MiB, 27.59% gc time)
(:epoch, 26, :loss, 2.092952301397931)
  3.796721 seconds (769.23 k allocations: 20.393 MiB, 27.03% gc time)
(:epoch, 27, :loss, 2.092391783900897)
  3.768006 seconds (769.19 k allocations: 20.392 MiB, 26.88% gc time)
(:epoch, 28, :loss, 2.089740885871313)
  3.776699 seconds (769.23 k allocations: 20.393 MiB, 27.06% gc time)
(:epoch, 29, :loss, 2.0863362315206264)
  3.765632 seconds (769.23 k allocations: 20.393 MiB, 26.82% gc time)
(:epoch, 30, :loss, 2.0821271649533215)
  3.767496 seconds (769.19 k allocations: 20.392 MiB, 26.91% gc time)
(:epoch, 31, :loss, 2.083041412783952)
  3.773932 seconds (769.23 k allocations: 20.393 MiB, 26.94% gc time)
(:epoch, 32, :loss, 2.0814251372555552)
  3.777947 seconds (769.23 k allocations: 20.393 MiB, 26.81% gc time)
(:epoch, 33, :loss, 2.077604984087978)
  3.794641 seconds (769.19 k allocations: 20.392 MiB, 27.09% gc time)
(:epoch, 34, :loss, 2.07426934439883)
  3.783485 seconds (769.23 k allocations: 20.393 MiB, 26.88% gc time)
(:epoch, 35, :loss, 2.0713727162269815)
  3.763595 seconds (769.23 k allocations: 20.393 MiB, 26.78% gc time)
(:epoch, 36, :loss, 2.070033787198752)
  4.024290 seconds (769.19 k allocations: 20.392 MiB, 28.27% gc time)
(:epoch, 37, :loss, 2.067881541855103)
  3.798506 seconds (769.23 k allocations: 20.393 MiB, 26.77% gc time)
(:epoch, 38, :loss, 2.066438465529288)
  3.780749 seconds (769.23 k allocations: 20.393 MiB, 27.05% gc time)
(:epoch, 39, :loss, 2.065210478195719)
  3.777492 seconds (769.19 k allocations: 20.392 MiB, 26.93% gc time)
(:epoch, 40, :loss, 2.064790571026254)

After 40 epochs of training, here's the final signal. Not bad!

In [12]:
plot(synth_signal,title="Synthetic Signal")
Out[12]:

Abnormal Data (Arrhythmia) - first only 3 subjects

I did it again with a few arrhythmia subjects and only training for very few epochs.

In [13]:
randsignal, options1, options2, datafiles, lenhidden, signal, vocab, model = setup(5,"Short","KnetArray");
plot(randsignal,title = "Random Signal")
Knet.KnetArray{Float64,2}
Knet.KnetArray{Float64,2}
Knet.KnetArray{Float64,2}
Knet.KnetArray{Float64,2}
Knet.KnetArray{Float64,2}
INFO: Values read: 1949997
INFO: 1164 unique values.
Out[13]:
In [14]:
data = getbatches(options1, signal, vocab);
size(data)
Out[14]:
(3808,)
In [15]:
lossgradient, optim = setuptrain(options2, data, model);
(:epoch, 0, :loss, 7.059617989510601)
In [16]:
synth_signal = trainloop(options1, options2, vocab, data, lossgradient, optim, model);
 72.725276 seconds (10.71 M allocations: 364.230 MiB, 20.38% gc time)
(:epoch, 1, :loss, 3.839420859861107)
 70.212813 seconds (12.10 M allocations: 353.690 MiB, 16.81% gc time)
(:epoch, 2, :loss, 3.579847942801794)
 64.522319 seconds (12.53 M allocations: 350.868 MiB, 16.59% gc time)
(:epoch, 3, :loss, 3.4969228215264057)
 66.621984 seconds (12.78 M allocations: 349.418 MiB, 17.35% gc time)
(:epoch, 4, :loss, 3.45582025727332)
 64.352762 seconds (12.91 M allocations: 347.927 MiB, 16.86% gc time)
(:epoch, 5, :loss, 3.38196140754816)

Definitely better than before - with only 5 epochs!

In [17]:
plot(synth_signal,title="Synthetic Signal")
Out[17]:

Arrhythmia Data - with 10 subjects and wayy more training epochs

Finally, I did it with 10 subjects for 50 epochs - which took several hours to run, even with a GPU.

In [18]:
randsignal, options1, options2, datafiles, lenhidden, signal, vocab, model = setup(50,"Arrhyth","KnetArray");
plot(randsignal,title = "Random Signal")
INFO: Values read: 6499990
Knet.KnetArray{Float64,2}
Knet.KnetArray{Float64,2}
Knet.KnetArray{Float64,2}
Knet.KnetArray{Float64,2}
Knet.KnetArray{Float64,2}
INFO: 1294 unique values.
Out[18]:
In [19]:
data = getbatches(options1, signal, vocab);
size(data)
Out[19]:
(12695,)
In [20]:
lossgradient, optim = setuptrain(options2, data, model);
(:epoch, 0, :loss, 7.165496024322549)
In [21]:
synth_signal = trainloop(options1, options2, vocab, data, lossgradient, optim, model);
365.044659 seconds (35.71 M allocations: 1.184 GiB, 45.18% gc time)
(:epoch, 1, :loss, 3.76779713357085)
337.137348 seconds (40.25 M allocations: 1.148 GiB, 39.73% gc time)
(:epoch, 2, :loss, 3.5745764255596537)
321.156340 seconds (41.78 M allocations: 1.141 GiB, 41.63% gc time)
(:epoch, 3, :loss, 3.4231918704123583)
324.275434 seconds (42.55 M allocations: 1.135 GiB, 41.23% gc time)
(:epoch, 4, :loss, 3.426337668637658)
318.314786 seconds (43.01 M allocations: 1.130 GiB, 40.57% gc time)
(:epoch, 5, :loss, 3.3521919821556945)
319.371869 seconds (43.31 M allocations: 1.132 GiB, 42.38% gc time)
(:epoch, 6, :loss, 3.368692726015728)
323.371053 seconds (43.53 M allocations: 1.129 GiB, 42.73% gc time)
(:epoch, 7, :loss, 3.325455988301986)
319.518989 seconds (43.69 M allocations: 1.128 GiB, 42.51% gc time)
(:epoch, 8, :loss, 3.3165191861065484)
307.513973 seconds (43.82 M allocations: 1.126 GiB, 38.88% gc time)
(:epoch, 9, :loss, 3.2922924031889753)
359.577053 seconds (43.93 M allocations: 1.125 GiB, 48.04% gc time)
(:epoch, 10, :loss, 3.318430457732104)
337.118242 seconds (44.01 M allocations: 1.124 GiB, 44.08% gc time)
(:epoch, 11, :loss, 3.2954082191611818)
299.863831 seconds (44.09 M allocations: 1.124 GiB, 43.73% gc time)
(:epoch, 12, :loss, 3.30556235241969)
292.989603 seconds (44.15 M allocations: 1.127 GiB, 41.13% gc time)
(:epoch, 13, :loss, 3.279936954891283)
297.956185 seconds (44.21 M allocations: 1.127 GiB, 38.44% gc time)
(:epoch, 14, :loss, 3.2619261924885796)
359.847776 seconds (44.25 M allocations: 1.126 GiB, 56.89% gc time)
(:epoch, 15, :loss, 3.2428819015555277)
346.793600 seconds (44.29 M allocations: 1.125 GiB, 55.30% gc time)
(:epoch, 16, :loss, 3.2362721517950366)
360.125422 seconds (44.33 M allocations: 1.125 GiB, 50.50% gc time)
(:epoch, 17, :loss, 3.225550707026472)
352.184350 seconds (44.37 M allocations: 1.125 GiB, 48.77% gc time)
(:epoch, 18, :loss, 3.2260827040983124)
344.969998 seconds (44.40 M allocations: 1.125 GiB, 47.13% gc time)
(:epoch, 19, :loss, 3.2266719534565413)
306.452202 seconds (44.43 M allocations: 1.124 GiB, 50.48% gc time)
(:epoch, 20, :loss, 3.2113960680478377)
307.734484 seconds (44.43 M allocations: 1.124 GiB, 50.67% gc time)
(:epoch, 21, :loss, 3.211493291382252)
306.912369 seconds (44.43 M allocations: 1.124 GiB, 50.51% gc time)
(:epoch, 22, :loss, 3.213408829349041)
306.897576 seconds (44.43 M allocations: 1.124 GiB, 50.50% gc time)
(:epoch, 23, :loss, 3.2195346051798306)
306.940262 seconds (44.43 M allocations: 1.124 GiB, 50.52% gc time)
(:epoch, 24, :loss, 3.2062081518916212)
307.387444 seconds (44.43 M allocations: 1.124 GiB, 50.58% gc time)
(:epoch, 25, :loss, 3.2080284229041256)
306.722677 seconds (44.43 M allocations: 1.124 GiB, 50.52% gc time)
(:epoch, 26, :loss, 3.212495324154836)
306.854266 seconds (44.43 M allocations: 1.124 GiB, 50.53% gc time)
(:epoch, 27, :loss, 3.2090372296084664)
306.981840 seconds (44.43 M allocations: 1.124 GiB, 50.56% gc time)
(:epoch, 28, :loss, 3.208049762679108)
307.000860 seconds (44.43 M allocations: 1.124 GiB, 50.55% gc time)
(:epoch, 29, :loss, 3.2015412670505192)
306.799541 seconds (44.43 M allocations: 1.124 GiB, 50.54% gc time)
(:epoch, 30, :loss, 3.2032848619844265)
306.885195 seconds (44.43 M allocations: 1.124 GiB, 50.55% gc time)
(:epoch, 31, :loss, 3.2053831898696754)
306.500991 seconds (44.43 M allocations: 1.124 GiB, 50.50% gc time)
(:epoch, 32, :loss, 3.199080271784316)
305.790977 seconds (44.43 M allocations: 1.124 GiB, 50.35% gc time)
(:epoch, 33, :loss, 3.226080575755441)
305.918327 seconds (44.43 M allocations: 1.124 GiB, 50.43% gc time)
(:epoch, 34, :loss, 3.222162807940824)
305.879692 seconds (44.43 M allocations: 1.124 GiB, 50.37% gc time)
(:epoch, 35, :loss, 3.204400526567293)
306.037594 seconds (44.43 M allocations: 1.124 GiB, 50.42% gc time)
(:epoch, 36, :loss, 3.2231035589872024)
305.513500 seconds (44.43 M allocations: 1.124 GiB, 50.31% gc time)
(:epoch, 37, :loss, 3.2311575097076144)
305.591834 seconds (44.43 M allocations: 1.124 GiB, 50.29% gc time)
(:epoch, 38, :loss, 3.2236128662206105)
305.707154 seconds (44.43 M allocations: 1.124 GiB, 50.31% gc time)
(:epoch, 39, :loss, 3.2117906486007333)
305.495251 seconds (44.43 M allocations: 1.124 GiB, 50.39% gc time)
(:epoch, 40, :loss, 3.2138156770967314)
305.441449 seconds (44.43 M allocations: 1.124 GiB, 50.32% gc time)
(:epoch, 41, :loss, 3.2146751121390285)
305.502181 seconds (44.43 M allocations: 1.124 GiB, 50.28% gc time)
(:epoch, 42, :loss, 3.214179237038549)
305.800856 seconds (44.43 M allocations: 1.124 GiB, 50.38% gc time)
(:epoch, 43, :loss, 3.214879129756629)
306.633028 seconds (44.43 M allocations: 1.124 GiB, 50.50% gc time)
(:epoch, 44, :loss, 3.2200462617699057)
306.058588 seconds (44.43 M allocations: 1.124 GiB, 50.46% gc time)
(:epoch, 45, :loss, 3.2178111915138574)
305.379233 seconds (44.43 M allocations: 1.124 GiB, 50.33% gc time)
(:epoch, 46, :loss, 3.2176201051398094)
305.276625 seconds (44.43 M allocations: 1.124 GiB, 50.33% gc time)
(:epoch, 47, :loss, 3.226205742303123)
305.725583 seconds (44.43 M allocations: 1.124 GiB, 50.32% gc time)
(:epoch, 48, :loss, 3.226928788346386)
305.391357 seconds (44.43 M allocations: 1.124 GiB, 50.38% gc time)
(:epoch, 49, :loss, 3.229751505101685)
306.111256 seconds (44.43 M allocations: 1.124 GiB, 50.48% gc time)
(:epoch, 50, :loss, 3.22944642359955)

This isn't going to fool any cardiologist, but it might do for the untrained eye.

In [22]:
plot(synth_signal,title="Synthetic Signal")
Out[22]: