Hi all again,
The model stills giving me some headaches.
I solve the problem of the multinomial above by sampling from a squence of nested binomials as in
It works in JAGS (I'm not sure if it converges) but not in nimble.
With nimble I'm getting the message below all the time, but I've checked again and again and and I don't see the problem:
Error: Error, wrong number of indices provided for getNodeFunctionIndexedInfo(ARG1_INDEXEDNODEINFO__,3)[1].
This occurred for: getNodeFunctionIndexedInfo(ARG1_INDEXEDNODEINFO__,3)[1]
This was part of the call: model_N_dot_binom_dot_n_dot_G[getNodeFunctionIndexedInfo(ARG1_INDEXEDNODEINFO__,1), getNodeFunctionIndexedInfo(ARG1_INDEXEDNODEINFO__,2), getNodeFunctionIndexedInfo(ARG1_INDEXEDNODEINFO__,3)[1]]
>
The problem seems to be in the red part of the code below.
I'm attaching the whole code but probably there are other problems too.
## Loading Packages
library(nimble)
library(tidyverse) # allow the creation of a list by only giving the var's names
## As suggested:
https://groups.google.com/g/nimble-users/c/X0wFG7rGN1w/m/hXCfw2NKDwAJ# Add always the following code to get around of the PATH problems
######################-------------------------------------
Path0 = Sys.getenv('PATH')
Pathrtools = paste( "C:\\rtools40\\usr\\bin;C:\\rtools40\\mingw64\\bin;",Path0 ,sep="" )
Sys.setenv( PATH = Pathrtools )
##########################################################################################
############################ Data Set ############################
DB <- t(matrix(c(20, 1124, 26374, 40372, 25663, 6624, 2243, 917, 184, 104, 2356, 8658,
32196, 34666, 9972, 1661, 2462, 1152,20, 1710, 47518, 23135, 24635, 15722, 4094, 1799,
799,20, 794, 29072, 54396, 26090, 12371, 2463, 670, 309,20, 1484, 12787, 41529, 30705, 11820,
3322, 868, 256,20, 41, 8450, 14356, 11749, 6837, 2706, 602, 100), 9, 6, byrow=F))
################################################################################
################################################################################
######## Model.6 - m6 ###########################
################################################################################
# Creating Folders needed to write the files in the correct folder
WorkDir <- paste(MyDataFolder, "\\models\\m.6", sep="")
dir.create(WorkDir, showWarnings = FALSE)
Code.Model.6 <- nimbleCode(
{
#################################################
## Initial Population (State 0)
#################################################
N0 ~ dlnorm(N0.loc, taulog=tau.N0) # Prior dist. para o tamanho inicial da população toda em números;
# TLogN: precision
# Os valores vêm de 2011_Lorance, onde se vÊ que no golfo da biscaia havia há umas décadas 150 000 t
#tau.N0 <- 4/3 # Com este valor de tau, a dist. é claramente difusa....
# Sugerido em 2013_Mantyniemi_SSM Appendix A
N0.round <- floor(N0)
#tau.N0 ~ dgamma(4,3)
## CV (coefficient of variation)
#CV.N0 <- 0.5
CV.N0 ~ dbeta(4,10)
sigma.N0 <- sqrt(log(CV.N0^2 + 1))
sigma.N0.2 <- sigma.N0^2
tau.N0 <- 1/(sigma.N0.2)
N0.loc <- log(N0.ini/sqrt(CV.N0^2+1)) # Location parameter of N0's distribution'
#log.N0 <- log(N0) # usar o log para ter valores menores;
N0.ini.unif ~ dunif(5, 17)
N0.ini <- exp(N0.ini.unif)
## Distribution of the number of fish per INITIAL length-class; before the first year of the data
### Approximating the multinomial by nested binomials as in
n.0[1] ~ dbin(g.0[1], N0.round)
prob.binom.n.0[1] <- g.0[1]
for (i in 2:N.length.class) {
prob.binom.n.0[i] <- g.0[i]/(1-sum(g.0[1:(i-1)]))
}
for (i in 2:(N.length.class-1)) {
n.0[i] ~ dbin(prob.binom.n.0[i], N.binom.n.0[i])
N.binom.n.0[i] <- N0.round - sum(n.0[1:(i-1)])
}
n.0[N.length.class] <- N.binom.n.0[N.length.class]
N.binom.n.0[N.length.class] <- N0.round - sum(n.0[1:(N.length.class-1)])
# Learning about the parameters of a Dirichlet distribution
# WinBUGS Manual; This is a trick because the parameters of a Dicrichlet can't be stochastic in BUGS
for (m in 1:N.length.class) {
delta.g.0[m] ~ dgamma(alphas.g.0[m], 1)
g.0[m] <- delta.g.0[m] / sum(delta.g.0[1:N.length.class])
}
## Priors for alphas.g.0[i]
alphas.g.0[1] ~ dgamma(40,20)
alphas.g.0[2] ~ dgamma(160,40)
alphas.g.0[3] ~ dgamma(5000,40)
alphas.g.0[4] ~ dgamma(8000,40)
alphas.g.0[5] ~ dgamma(720,6) # quero uma variância maior para esta
alphas.g.0[6] ~ dgamma(5000,40)
alphas.g.0[7] ~ dgamma(5000,50)
alphas.g.0[8] ~ dgamma(120,35)
alphas.g.0[9] ~ dgamma(49,7)
####################################################
##### Population Total (sobreviventes + recrutados)
####################################################
for (i in 1:N.length.class){
#N[1, i] <- N.S[1,i] + N.Rec[1,i]
N[1, i] <- floor(n.0[i])
}
for (t in 2:T) {
for (i in 1:N.length.class){
N[t, i] <- floor(N.S[t,i] + N.Rec[t,i])
}
}
###################################################################################
###################################################################################
### Length of a fish at instant t for each class (Von Bertalanfy)
## Length at time t of individuals, which at time t − 1 were in length class i.
for ( j in 1:N.length.class ){
L[j] ~ dnorm(mu.L[j], tau.L)
mu.L[j] <- ( L.inf - l[j] ) * (1-exp(-k)) + l[j]
}
mu.l0 <- ( L.inf - l0 ) * (1-exp(-k)) + l0
## Variables transformation
sigma.L <- sqrt(1/(tau.L+0.001))
## PRIORS
tau.L ~ dgamma(shape=25, rate=5) # 1/Length variance
#tau.L ~ dgamma(shape=8, rate=4) # 1/Length variance
k ~ dunif(0.05,0.15) # já que será à volta de 0.15
L.inf ~ dunif(57.5,62.5) # já que será à volta de 60
#########################################################
##### Growing Model
#########################################################
## Growing individuals from class i to one of each other classes j,
## given the number of individuals in the length class i
## in the end of year t-1, N[t-1,i], and given the probabilities of moving/growing, g[t, i, j]
for (i in 1:N.length.class){
for (j in 1:N.length.class){
n.G[1, i, j] <- 77
}
}
for (t in 2:T) {
for (i in 1:N.length.class){
### Approximating the multinomial using several nested binomials
n.G[t, i, 1] ~ dbin(g[t, i, 1], N[t-1, i])
N.binom.n.G[t, i, 1] <- 777
for (j in 2:(N.length.class)) {
prob.binom.n.G[t, i, j] <- g[t, i, j]/(1-sum(g[t, i, 1:(j-1)]) + 0.0001)
}
for (k in 2:(N.length.class-1)) {
n.G[t, i, k] ~ dbin(prob.binom.n.G[t, i, k], N.binom.n.G[t, i, k])
N.binom.n.G[t, i, k] <- N[t-1, i] - sum(n.G[t, i, 1:(k-1)])
}
n.G[t, i, N.length.class] <- N.binom.n.G[t, i, N.length.class]
N.binom.n.G[t, i, N.length.class] <- N[t-1, i] - sum(n.G[t, i, 1:(N.length.class-1)])
}
}
## Probabilities of moving/growing to each class based on its length;
## g probabilities;
for (i in 1:N.length.class){
for (j in 1:N.length.class){
g[1, i, j] <- 0.77
}
}
for (t in 2:T) {
for (i in 1:N.length.class){
for (j in 1:N.length.class){
g[t, i, j] <- max(( phi((I[j+1] - mu.L[i])/sigma.L) - phi( (I[j] - mu.L[i])/sigma.L)) /
( phi( (I[N.length.class+1] - mu.L[i])/sigma.L) - phi( (I[1] - mu.L[i])/sigma.L)), 0.00001)
}
}
}
## Number of individuals moving/growing to class j = 1,..., m
## from each of the other classes.
## Gives the state of the population in year t after growth.
## N.G is the matrix of the population's distribution
for (i in 1:N.length.class){
N.G[1, i] <- 77
}
for (t in 2:T) {
for (j in 1:N.length.class){
N.G[t, j] <- floor(sum(n.G[t, 1:N.length.class, j]))
}
}
####################################################################
##### Survival and Mortality Models ####
####################################################################
#### After growth fishes will be subject to the survival/natural mortality process and fishing/Caught
## Surviving fishes at instant t (constant throughout the classes and time)
## Constant mortality = -Z
# for (t in 2:T) {
# for (i in 1:N.length.class){
# N.S[t, i] ~ dbin(prob=pi.S[t, i], size=N.G[t, i])
# pi.S[t, i] <- exp(-Z)
# }
# }
## number of individuals pertaining to the different outcomes (S, D or C) is modeled using a multinomial distribution
# S: Survival; D: Natural Death; C: Caught;
for (i in 1:N.length.class){
for (j in 1:3){
N.O[1, i, j] <- 77
}
N.S[1, i] <- N.O[1, i, 1]
N.D[1, i] <- N.O[1, i, 2]
N.C[1, i] <- N.O[1, i, 3]
pi.S.norm[1, i] <- 0.33
pi.D.norm[1, i] <- 0.33
pi.C.norm[1, i] <- 0.34
}
for (t in 2:T) {
for (i in 1:N.length.class){
### Approximating the multinomial using several nested binomials
N.O[t, i, 1] ~ dbin(g.O[t, i, 1], N.G[t, i]) # N.O is a vector c(N.S, N.D, N.C)
for (j in 2:3) {
prob.binom.N.O[t, i, j] <- g.O[t, i, j]/(1-sum(g.O[t, i, 1:(j-1)]) + 0.0001)
}
N.O[t, i, 2] ~ dbin(prob.binom.N.O[t, i, 2], N.binom.N.O[t, i, 2])
N.binom.N.O[t, i, 2] <- N.G[t, i] - N.O[t, i, 1]
N.O[t, i, 3] <- N.binom.N.O[t, i, 3]
N.binom.N.O[t, i, 3] <- N.G[t, i] - (N.O[t, i, 1] + N.O[t, i, 2])
##g.O[t, i, 1:3] <- c(pi.S.norm[t, i], pi.D.norm[t, i], pi.C.norm[t, i])
g.O[t, i, 1] <- pi.S.norm[t, i] # vector of probabilities
g.O[t, i, 2] <- pi.D.norm[t, i] # vector of probabilities
g.O[t, i, 3] <- pi.C.norm[t, i] # vector of probabilities
# Vou normalizar os pi's para que a sua soma seja 1. Caso contrário não posso usá-las no multinomial
pi.S.norm[t, i] <- exp(-Total.Mort[t, i]) # probability of survival
pi.D.norm[t, i] <- Nat.Mort[t, i]/Total.Mort[t, i] * (1-exp(-Total.Mort[t, i])) # probability of natural death
pi.C.norm[t, i] <- Fish.Mort[t, i]/Total.Mort[t, i] * (1-exp(-Total.Mort[t, i])) # probability of getting fished/caught
# Creating the matrix of survivals (S), Natural Death (D) and Fishing Death (C)
N.S[t, i] <- floor(N.O[t, i, 1])
N.D[t, i] <- floor(N.O[t, i, 2])
N.C[t, i] <- floor(N.O[t, i, 3])
}
}
## Natural Mortality depending on length l[i];
for (i in 1:N.length.class){
Nat.Mort[1, i] <- 0.77
}
for (t in 2:T) {
for (i in 1:N.length.class){
#Nat.Mort[t,i] <- k*(L[i]/L.inf)^(-1.5) # Natural mortality (Charnov equation); apenas se não usar as 6 linhas seguintes
Nat.Mort[t,i] ~ dlnorm(Nat.Mort.loc[t,i], taulog=tau.Nat.Mort)
Nat.Mort.loc[t,i] <- log(mu.Nat.Mort[t,i]/sqrt(CV.Nat.Mort^2+1)) # Location parameter of N0's distribution'
mu.Nat.Mort[t,i] <- k*(L[i]/L.inf)^(-1.5) # expected value of Nat.Mort[t,i]
}
}
CV.Nat.Mort ~ dbeta(2,10)
sigma.Nat.Mort <- sqrt(log(CV.Nat.Mort^2 + 1))
tau.Nat.Mort <- 1/(sigma.Nat.Mort^2)
## Variables transformation
#sigma.Nat.Surv <- sqrt(1/tau.Nat.Surv)
## PRIORS
#tau.Nat.Surv ~ dgamma(1, 10) # parametrizando em função de CV não preciso disto
#####
## Fishing Mortality depending on length L[i] at instant t for each length category
# maximum instantaneous fishing mortality F_t^max
F.max[1] <- 0.77
mu.F.max[1] <- 0.77
for (t in 2:T) {
F.max[t] ~ dlnorm(loc.param.F.max[t], sdlog = sigma.F.max ) # parametrizando como em Polansky_Newman 2020
loc.param.F.max[t] <- log.mu.F.max[t] - sigma.F.max.2/2
log.mu.F.max[t] <- log( mu.F.max[t] ) # E assim E(r_i) = mu.r_{t,i}
mu.F.max[t] ~ dlnorm(med.fishing, sdlog = 0.22)
}
## Variables transformation
sigma.F.max.2 <- log(CV.F.max^2 + 1) # log-variance
sigma.F.max <- sqrt(sigma.F.max.2) # log-std
tau.F.max <- 1/sigma.F.max.2
## PRIORS
#CV.F.max ~ dbeta(2,6) # ????? # para já fixei-o em 0.25
CV.F.max <- 0.25
## Selectividade da pesca, f(l_i);
for (i in 1:N.length.class){
logit.pi.f[i] ~ dnorm(mu.f[i], tau.f)
mu.f[i] <- alpha.f + beta.f * L[i] # beta.f is fixed
#mu.f[i] <- alpha.f + beta.f * mu.L[i] # beta.f is fixed
f[i] <- ilogit(logit.pi.f[i])
}
alpha.f <- - li.50.select * beta.f # li.50.select = 30cm;
beta.f ~ dlnorm(log(0.30), sdlog=0.22) #
## Variables transformation
sigma.f <- sqrt(1/tau.f)
## PRIORS
tau.f ~ dgamma(2, 15)
## Instantaneously fishing mortality F_{t,i}
for (i in 1:N.length.class){
Fish.Mort[1, i] <- 77
}
for (t in 2:T) {
for (i in 1:N.length.class){
Fish.Mort[t, i] <- f[i] * F.max[t]
}
}
### Total Mortality
for (i in 1:N.length.class){
Total.Mort[1, i] <- 77
}
for (t in 2:T) {
for (i in 1:N.length.class){
Total.Mort[t, i] <- Fish.Mort[t, i] + Nat.Mort[t,i]
}
}
#########################################################
### Sex_Change/sex_definition process (Number of Females by class in year t)
#########################################################
# Number of females in the year 0
for (i in 1:N.length.class){
N.F[1, i] ~ dbin(prob=pi.F[1, i], size=N[1, i]) # we assume that the process occurs in the end of time t after recruitment
#prior for the probabilities
logit.pi.F[1, i] ~ dnorm(mu.F[1,i], tau.F)
mu.F[1,i] <- alpha.F + beta.F * (0.8984*L[i]-0.4634) # alpha.F and beta.F are fixed
#mu.F[1,i] <- alpha.F + beta.F * mu.L[i] # alpha.F and beta.F are fixed
pi.F[1, i] <- ilogit(logit.pi.F[1, i])
}
# Number of females in the following years
for (t in 2:T) {
for (i in 1:N.length.class){
N.F[t, i] ~ dbin(prob=pi.F[t, i], size=N.S[t, i]) # we assume that the process occurs in the end of time t after recruitment
# N.F: number of females
#prior for the probabilities
logit.pi.F[t, i] ~ dnorm(mu.F[t,i], tau.F)
mu.F[t,i] <- alpha.F + beta.F * (0.8984*L[i]-0.4634) # alpha.F and beta.F are fixed
#mu.F[t,i] <- alpha.F + beta.F * mu.L[i] # alpha.F and beta.F are fixed
pi.F[t, i] <- ilogit(logit.pi.F[t, i])
}
}
## Variables transformation
sigma.F <- sqrt(1/tau.F)
## PRIORS
tau.F ~ dgamma(25, 5)
### Percentage of Mature Females per class
for (i in 1:N.length.class){
N.M[1, i] <- 77
logit.pi.M[1, i] <- 77
}
for (t in 2:T) {
for (i in 1:N.length.class){
N.M[t, i] ~ dbin(prob=pi.M[t, i], size=N.F[t, i]) # defino a % de fêmeas maduras por classe em função da % de fêmeas por classe
#prior for the probabilities
logit.pi.M[t, i] ~ dnorm(mu.M[t,i], tau.M)
#mu.M[t, i] <- alpha.M + beta.M * L[i] # alpha.M and beta.M are fixed
#mu.M[t, i] <- alpha.M + beta.M * mu.L[i] # alpha.M and beta.M are fixed
mu.M[t, i] <- alpha.M + beta.M * (0.8984*L[i]-0.4634) # Krug 1989 eq. 1; transforma o L no FL # alpha.M and beta.M are fixed
pi.M[t, i] <- ilogit(logit.pi.M[t, i])
}
}
## Variables transformation
sigma.M <- sqrt(1/tau.M)
## PRIORS
tau.M ~ dgamma(25,5)
#tau.M ~ dunif(20, 60)
#tau.M ~ dgamma(60, 2), 20, 45) # Trying a truncated distribution
#tau.M ~ dgamma(60,2)
### Total number of Eggs in year t as a function of the population size in t
## Number of eggs per mature female of each class, r_{t,i}, as a function of length, l[i]
for (i in 1:N.length.class){
r[1, i] ~ dunif(76.9, 77.1)
}
for (t in 2:T) {
for (i in 1:N.length.class){
r[t, i] ~ dlnorm(loc.param.r[t, i], sdlog = sigma.r ) #
loc.param.r[t, i] <- log.mu.r[t, i] - sigma.r^2/2
log.mu.r[t, i] <- log( mu.r[t, i] ) # E assim E(r_i) = mu.r_{t,i}
mu.r[t, i] <- (0.8984*L[i]-0.4634)^b/exp(a)
}
}
## Variables transformation
sigma.r <- sqrt(log(CV.r^2 + 1))
sigma.r.2 <- sigma.r^2
## PRIORS
for (i in 1:N.length.class) {
E[1, i] <- 77
}
for (t in 2:T) {
for (i in 1:N.length.class){
E[t, i] <- floor(N.M[t, i]*r[t, i]) # number of eggs per mature female in length-class i
#E[t, i] <- N.M[t, i]*mu.r[t, i]
E.viable[t, i] <- floor(E[t, i]*mu.p.egg.surv) # assumo a mesma sobreviv. de 0.000035 no 1º ano, ou seja
# este valor é a prob. de sobreviv. até ao fim do 1º ano de vida
# dos peixes que nascem nesse ano para todas as classes e tempos
}
N.R[t] <- floor(sum( E.viable[t, 1:N.length.class] )) # N.R <- Total number of recruits in year t
}
### Proportions of recruits entering each size class i, N.Rec[t, i],
## given the number of total recruits for each year, N.R[t]
## and given the probabilities of recruiting, g.rec[t, i] # São os \phi_i^R das minhas notas
for (i in 1:N.length.class){
N.Rec[1, i] ~ dunif(76.9, 77.1)
}
for (t in 2:T) {
### Approximating the multinomial using several nested binomials
N.Rec[t, 1] ~ dbin(g.Rec[t, 1], N.R[t])
for (i in 2:(N.length.class)) {
prob.binom.N.Rec[t, i] <- g.Rec[t, i]/(1-sum(g.Rec[t, 1:(i-1)]) + 0.0001)
}
for (j in 2:(N.length.class-1)) {
N.Rec[t, j] ~ dbin(prob.binom.N.Rec[t, j], N.binom.N.Rec[t, j])
N.binom.N.Rec[t, j] <- N.R[t] - sum(N.Rec[t, 1:(j-1)])
}
N.Rec[t, N.length.class] <- N.binom.N.Rec[t, N.length.class]
N.binom.N.Rec[t, N.length.class] <- N.R[t] - sum(N.Rec[t, 1:(N.length.class-1)])
}
## Probabilities of being recruited to each class based on its expected length during year 1;
## g.rec probabilities;
for (i in 1:N.length.class){
g.Rec[1, i] ~ dunif(0.769, 0.771)
}
for (t in 2:T) {
for (i in 1:N.length.class){
g.Rec[t, i] <- ( phi((I[i+1] - mu.l0)/sigma.L) - phi( (I[i] - mu.l0)/sigma.L) ) /
( phi( (I[N.length.class+1] - mu.l0)/sigma.L) - phi( (I[1] - mu.l0)/sigma.L) )
}
}
#################################################
##### Observation Model (Data go here)
#################################################
for (t in 2:T) {
for (i in 1:N.length.class){
N.obs.catch[t, i] ~ dlnorm(loc.obs.catch[t, i], taulog=tau.obs.catch) # uso log.N.obs.catch pois assumo que N.obs.catch ~LNorm
loc.obs.catch[t,i] <- log(N.C[t, i] + 1) - sigma.obs.catch^2/2 # número de peixes capturados em t;
# Com esta parametrização o verdadeiro valor de
# capturados por pesca, N.O[i, 3, t], é a mediana das capturas observadas.
} # +1 é truque para que o log != -inf; e juntar 1 peie não muda em nada
# o total de uma população que tem milhares
}
# Parameterizando em função de CV
#CV.N.obs.catch <- 0.25
#sigma.obs.catch <- sqrt(log(CV.N.obs.catch^2 + 1))
#tau.obs.catch <- 1/(sigma.obs.catch^2)
## Parametrizando em função de CV (coefficient of variation)
CV.obs.catch ~ dbeta(4,10)
sigma.obs.catch <- sqrt(log(CV.obs.catch^2 + 1))
tau.obs.catch <- 1/(sigma.obs.catch^2)
## Variables transformation
#sigma.obs.catch <- sqrt(1/tau.obs.catch)
## PRIORS
#tau.obs.catch ~ dgamma(1, 30)
} ### end model
)
## End nimblecode
#####################
## Initial Conditions
#####################
T <- dim(N.obs.catch)[1] # Number of years in the series for which we have information
#T <- T + 1 # years + 1 step for the prior for N0
lll <- 5 # length of each class (5cm)
N.length.class <- length(N1)
l0 <- min.fish.size <- 15 # Tamanho esperado de um peixe ao fim do 1º ano de vida # Tamanho esperado de um peixe ao fim do 1º ano de vida
#max.fish.size <- 60
I <- seq(min.fish.size, by=lll, length.out = length(N1)+1) # breakpoints vector of the length-based classes
# Produz o vetor c(5,...,55)
l <- seq(I[1]+lll/2, I[length(I)]-lll/2, lll) # mean length of a fish in each class (mid point of the length-based class)
## plot the histogram of the initial population classes
# hist(rep(l, N1))
# Constants:
k <- 0.15 # Taxa de crescimento, Krug 1989 eq. 9
L.inf <- 60 # Maximum length, Krug 1989 eq. 9 diz 57.5, mas nos dados da inês aparecem peixes com 60cm
Z <- 0.25 # Instantaneous mortality rate; constant for all classes and time; Estrada_2016 diz que é entre 18% e 33%
# Mantynieni 2015 simula supondo este valor fixo = 0.25 na Tabela A1 (Deve ter alguma razão!!!!)
# E com este valor paree que já não explosões
#K <- sum(N1)*0.5 # asymptotic maximum expected number of recruits (Isto foi inventado, não sei que % usar.)
#alpha <- 0.25 # alpha \in [0,1]; rate at which K is reached (Isto foi inventado, não sei que valor usar.)
#CV.R <- 0.1 # CV for the standard deviations of the Recruitment
alpha.F <- -7 # Krug 1998 Table 4 (Expected Value of the logit(pi.F) when the l[i]=0)
beta.F <- 0.2 # Krug 1998 Table 4 (slope of the logit(pi.F) )
alpha.M <- -25 # Krug 1998 Table 1 (Expected Value of the logit(pi.M) when the l[i]=0)
beta.M <- 0.8 # Krug 1998 Table 1 (slope of the logit(pi.M) )
#mu.p.egg.surv <- 0.000035 # 0.000035 -> Estrada 2016; after eq. ;
mu.p.egg.surv <- 0.0000016 # O Iuri chegou a um valor que é de 0.0000016.
# para uma população sem pesca. (Parece que funciona melhor.))
#a <- 9.2935 # Krug 1986 page 8 (usando um modelo alométrico para o nº de ovos por fêmea madura em função do comprimento)
a <- 12.2935 # Se usar a <- 12.2935 parece que funciona melhor
b <- 6.914 # Krug 1986 page 8
CV.r <- 0.2 # CV for the standard deviations of the number of eggs
CV.F.max <- 0.25 # CV for the standard deviations of the maximum fishing mortality
med.fishing <- log(0.4) # exp(log(0.4)) será a mediana para o máximo de pesca instantânea.
li.50.select <- 30
#Inits
tau.L <- 5
N <- N.D <- N.C <- N.Rec <- matrix(900, T, N.length.class)
N.O <- array(seq(1000,along.with=1:(T*N.length.class)), dim=c(T, N.length.class, 3))*15
g.O <- array(1/3, dim=c(T, N.length.class, 3))
n.G <- array(seq(1000,along.with=1:(T*N.length.class*N.length.class)), dim=c(T, N.length.class, N.length.class))
L <- l
N.G <- matrix(1000*N.length.class, T, N.length.class)
N.G <- matrix(seq(50000,along.with=1:(T*N.length.class)), T, N.length.class)
N.Gj <- matrix(1, T, N.length.class)
g <- array(1/N.length.class, dim=c(T, N.length.class, N.length.class))
N.S <- matrix(1000, T, N.length.class)
pi.S <- matrix(0.5, T, N.length.class)
logit.pi.S <- matrix(0.5, T, N.length.class)
tau.S <- 0.2
logit.pi.Nat.Surv <- matrix(0.5, T, N.length.class)
tau.Nat.Surv <- 0.2
pi.Nat.Surv <- matrix(0.5, T, N.length.class)
logit.pi.F <- matrix(1, T, N.length.class)
pi.F <- mu.F <- matrix(0.5, T, N.length.class)
N.F <- matrix(5000, T, N.length.class)
tau.F <- 0.5
N.M <- matrix(5000, T, N.length.class)
pi.M <- matrix(0.5, T, N.length.class)
mu.M <- matrix(5000, T, N.length.class)
tau.M <- 30
logit.pi.M <- matrix(1, T, N.length.class)
r <- matrix(exp(seq(7,15, length.out=N.length.class)), T, N.length.class, byrow=T) # ovos inicial
loc.param.r <- matrix(15, T, N.length.class)
mu.r <- log.mu.r <- matrix(15, T, N.length.class)
tau.r <- 0.5
E.viable <- matrix(1000, T, N.length.class)
E <- matrix(10000, T, N.length.class)
N.R <- floor(r[1,]) # número de ovos incial*númeor de fêmeas
N.Rec <- floor(matrix(c(N.R[1]/N.length.class, N.R[2]/N.length.class, rep(0,N.length.class-2)),
T, N.length.class,
byrow=T)
) # nº de recrutas por classe inicial; só coloco massa nas duas primeiras classes
g.Rec <- matrix(1/N.length.class, T, N.length.class)
pi.S.norm <- matrix(0.5, T, N.length.class)
pi.C.norm <- matrix(0.5, T, N.length.class)
pi.D.norm <- matrix(0.5, T, N.length.class)
F.max <- rep(0.7,T)
mu.F.max <- rep(0.7,T)
log.mu.F.max <- rep(2,T)
loc.param.F.max <- rep(0.2, T)
Nat.Mort <- matrix(1.6, T, N.length.class)
CV.Nat.Mort <- 0.21
Total.Mort <- matrix(1.8, T, N.length.class)
tau.f <- 0.2
Fish.Mort <- matrix(0.8, T, N.length.class)
pi.D <- matrix(0.5, T, N.length.class)
mu.Nat.Surv <- matrix(0.5, T, N.length.class)
tau.obs.catch <- 1
#alpha.f <- -7
beta.f <- 0.26
f <- rep(0.2, N.length.class)
loc.obs.catch <- matrix(5000, T, N.length.class)
CV.obs.catch <- 0.2
# Inits Initial State N0
N0 <- exp(13) # Prior dist. para o tamanho inicial da população toda em números;
#n.0 <- round(rep(N0/N.length.class, N.length.class))
n.0 <- N.G[1,]
g.0 <- rdirch(1, rep(1, N.length.class))
#N0 <- sum(n.0)
N0.ini.unif <- 12
tau.N0 <- 500
CV.N0 <- 0.2
alphas.g.0 <- rep(0.75, N.length.class)
delta.g.0 <- rep(1, N.length.class)
N.binom.n.G <- array(1000, dim=c(T, N.length.class, N.length.class))
#####################
## Creating the lists to feed the function nimbleModel
#####################
ImputConsts <- tibble::lst(T, N.length.class, I, l, l0,
Z, alpha.F, beta.F,
alpha.M, beta.M, mu.p.egg.surv, a, b, CV.r,
med.fishing, li.50.select )
N.obs.catch <- DB
ImputData <- tibble::lst(N.obs.catch ) # nº de peixes capturados
ImputInits <- tibble::lst(tau.L, L, L.inf, k,
g, N.S, N.D, N.C,
logit.pi.F, tau.F, mu.F, pi.F,
logit.pi.M, tau.M, pi.M, mu.M,
E, E.viable, N.R,
loc.param.r, mu.r, log.mu.r,
g.O,
pi.S.norm, pi.C.norm, pi.D.norm,
beta.f, f,
F.max, mu.F.max, loc.param.F.max, CV.F.max,
log.mu.F.max,
Nat.Mort, CV.Nat.Mort,
Total.Mort, Fish.Mort,
tau.f, CV.obs.catch,
N0, CV.N0, N0.ini.unif, alphas.g.0, delta.g.0,
N.binom.n.G )
#### Build the model object
#Model6 <- nimbleModel(code = Code.Model.6, name = "Model.6",
# constants = ImputConsts,
# inits = ImputInits,
# data = ImputData)
#
##Model6$modelDef$graph ## See vertices of the graph
#
#Model6MCMC <- buildMCMC(Model6)
#Model6MCMC$run(1)
### Identify nodes and simulate
#nodes.Names <- Model6$getNodeNames()
#nodesToSim <- Model6$getDependencies(nodes.Names, self = T, downstream = T)
##nodesToSim
### Simulate
#Model6$simulate(nodesToSim)
#Model6$tau.L
#####################
## ## run the model and write the output to a file called mcmc.out.Model.6.txt
#####################
mcmc.out.6 <- run.nimble.7steps(code = Code.Model.6,
name = "Model.6",
constants = ImputConsts,
data = ImputData,
inits = ImputInits,
n.steps = 6,
enableWAIC = FALSE,
monitors = c("N", "N0", "N0.loc", "N0.round", "tau.N0", "sigma.N0",
"n.0", "g.0",
"N.G", "n.G", "g", "g.O",
"N.F", "N.M",
"N.S", "N.D", "N.C",
"L", "mu.L", "k", "L.inf", "mu.l0",
"f",
"r", "mu.r", "sigma.r", "loc.param.r",
"g.Rec", "N.Rec", "tau.F",
"N.obs.catch", "loc.obs.catch", "tau.obs.catch",
"Total.Mort", "Nat.Mort", "Fish.Mort",
"pi.S.norm", "pi.D.norm", "pi.C.norm",
"F.max","N.R",
"logit.pi.f", "logit.pi.F", "logit.pi.M",
"tau.M", "sigma.M",
"pi.F", "pi.M",
"E","E.viable","ppp"),
#TypeOfSampling = "RW_block",
#ChangeSamplersName = c("n.0"),
niter = 3000, nburnin = 500, thin = 5,
niter7 = 1000, thin7 = 25,
WorkDir = WorkDir)
## the options for plot are =c("Trace", "ErgodicMean", "Density", "Histogram", "AutoCorr")
post.summs(postsamp = mcmc.out.6, plot="Trace", param="N0")
#####