2018年9月20日木曜日

addTA,approx


Draw the line between given 2 positions on the candle chart.

my_draw_line_on_candle <- function(par_xts,start_val,start_date,end_val,end_date){
  len <- length(seq(as.Date(start_date),as.Date(end_date),by='weeks'))
  plot_data <- approx(seq(1,2,1),c(start_val,end_val),n=len,method='linear')$y
  tmp_xts <- as.xts(plot_data,seq(as.Date(start_date),as.Date(end_date),by='weeks'))
  addTA(tmp_xts,on=1,legend="slope")
}


> my_draw_line_on_candle(weekly_pf,2273486,"2018-01-26",2189051,"2018-09-19")
> my_draw_line_on_candle(weekly_pf,as.vector(first(weekly_pf)[,1]),index(first(weekly_pf)),2188051,"2018-09-19")
> my_draw_line_on_candle(weekly_pf,1160008,"2017-01-06",2163476,"2018-09-18")



> last(weekly_pf)[,4]/as.vector(first(weekly_pf)[,1])
              close
2018-09-19 3.450367
> length(index(weekly_pf))
[1] 247
> 3.450367**(1/247)
[1] 1.005027
> seq(1,247,1)
  [1]   1   2   3   4   5   6   7   8   9  10  11  12  13  14  15  16  17  18  19  20  21  22  23  24  25  26  27
<skip>
[244] 244 245 246 247
> 1.005027**seq(1,247,1)
  [1] 1.005027 1.010079 1.015157 1.020260 1.025389 1.030544 1.035724 1.040931 1.046163 1.051423 1.056708 1.062020
 <skip>
[241] 3.348365 3.365197 3.382114 3.399116 3.416203 3.433376 3.450636
> tmp <- as.xts(634440.2*1.005027**seq(1,247,1),index(weekly_pf))
> addTA(tmp,on=1,legend="powered")

my_draw_line_on_candle(weekly_pf,as.vector(first(weekly_pf)[,1]),index(first(weekly_pf)),last(weekly_pf)[,4],index(last(weekly_pf)[,4]))
tmp <- as.xts((as.vector((last(weekly_pf)[,4]/as.vector(first(weekly_pf)[,1]))**(1/length(index(weekly_pf))))**seq(1,length(index(weekly_pf)),1))*as.vector(first(weekly_pf)[,1]),index(weekly_pf))
addTA(tmp,on=1,legend="powered")
tmp <- as.xts((as.vector((1800000/as.vector(first(weekly_pf)[,1]))**(1/length(index(weekly_pf))))**seq(1,length(index(weekly_pf)),1))*as.vector(first(weekly_pf)[,1]),index(weekly_pf))

addTA(tmp,on=1,legend="")


2018年9月15日土曜日

draw the line between given points on candle chart addTA() addLines()



> candleChart(to.weekly(fas_shares * FAS[,4] +spxl_shares * SPXL[,4]+as.xts(c2+c3+c4,index(fas_shares))),theme='white')
> weekly_pf[106]
               open     high      low    close
2016-01-08 796732.6 804098.1 684234.9 684234.9
> length(index(weekly_pf))-106
[1] 140
>  tmp <- as.xts(append(rep(684234.9,106),approx(seq(1,2,1),c(684234.9,2170000),n=140,method="linear")$y),append(rep(as.Date("2016-01-08"),106),last(index(weekly_pf),n=140)))
> addTA(tmp,on=1,legend="slope")
# length(index(weekly_pf)) - 158 = 88
> tmp <- as.xts(append(rep(1189422,158),approx(seq(1,2,1),c(1189422,2170000),n=88,method="linear")$y),append(rep(as.Date("2017-01-06"),158),last(index(weekly_pf),n=88)))
> addTA(tmp,on=1,legend="slope")
> weekly_pf[158]
              open    high     low   close
2017-01-06 1160008 1189422 1160008 1189422
> weekly_pf[210]
              open    high     low   close
2018-01-05 1906271 1991231 1906271 1991231
> plot(addLines(v=c(106,158,210)))
> addTA(as.xts(approx(seq(1,2,1),c(600000,2170000),n=246,method="linear")$y,index(weekly_pf)),on=1,name='2100000')
> addTA(tmp,on=1,legend="slope")


2018年9月14日金曜日

bp graph with moving average. plot() suffixes,filter()





> length(as.vector(na.omit(filter(apply.daily(bp.bangkok,mean)[,1],rep(1,7))/7)))
[1] 231
> plot(merge(as.xts(as.vector(na.omit(filter(apply.daily(bp.bangkok,mean)[,1],rep(1,7))/7)),last(index(apply.daily(bp.bangkok,mean)[,2]),len)),as.vector(na.omit(filter(apply.daily(bp.bangkok,mean)[,2],rep(1,7))/7)),last(apply.daily(bp.bangkok,mean),n=231)),suffixes=c("mh","ml","high","low"),main="daily w/ 7 day moving average",grid.ticks.on='weeks')

> events
           [,1]       
2018-06-20 "natrix"   
2018-07-14 "weight"   
2018-08-09 "abort natrix"
> addEventLines(events, srt=90, pos=2,col=10)


 plot(merge(as.xts(as.vector(na.omit(filter(apply.daily(bp.bangkok,mean)[,1],rep(1,7))/7)),last(index(apply.daily(bp.bangkok,mean)[,2]),len)),as.vector(na.omit(filter(apply.daily(bp.bangkok,mean)[,2],rep(1,7))/7)),last(apply.daily(bp.bangkok,mean),n=length(as.vector(na.omit(filter(apply.daily(bp.bangkok,mean)[,1],rep(1,7))/7))))),suffixes=c("mh","ml","high","low"),main="daily w/ 7 day moving average",grid.ticks.on='weeks')
addEventLines(events, srt=90, pos=2,col=10)

len <- length(as.vector(na.omit(filter(apply.daily(bp.bangkok,mean)[,1],rep(1,7))/7)))
plot(merge(as.xts(as.vector(na.omit(filter(apply.daily(bp.bangkok,mean)[,1],rep(1,7))/7)),last(index(apply.daily(bp.bangkok,mean)[,2]),len)),
as.vector(na.omit(filter(apply.daily(bp.bangkok,mean)[,2],rep(1,7))/7)),last(apply.daily(bp.bangkok,mean),n=len)),
suffixes=c("mh","ml","high","low"),main="daily w/ 7 day moving average",grid.ticks.on='weeks')
addSeries(as.xts(rep(mean(bp.xts[,1][bp.xts$High > 95]),len),last(index(apply.daily(bp.bangkok,mean)[,2]),len)),on=1,col=6,lwd=1)
addSeries(as.xts(rep(mean(bp.xts[,2][bp.xts$High > 95]),len),last(index(apply.daily(bp.bangkok,mean)[,2]),len)),on=1,col=6,lwd=1)
addSeries(as.xts(rep(last(as.vector(na.omit(filter(apply.daily(bp.bangkok,mean)[,2],rep(1,7))/7))),len),last(index(apply.daily(bp.bangkok,mean)[,2]),len)),on=1,col=6,lwd=1)
addSeries(as.xts(rep(last(as.vector(na.omit(filter(apply.daily(bp.bangkok,mean)[,1],rep(1,7))/7))),len),last(index(apply.daily(bp.bangkok,mean)[,2]),len)),on=1,col=6,lwd=1)
# events <- xts(c("natrix","weight","abort natrix"),as.Date(c("2018-06-20", "2018-07-14","2018-08-09")))
# addEventLines(events, srt=90, pos=2,col=10)
events <- xts(c("natrix","weight","abort natrix","55kg"),as.Date(c("2018-06-20", "2018-07-14","2018-08-09","2018-10-13")))

addEventLines(events, srt=90, pos=2,col=10)




2018年9月13日木曜日

Prepare Data --- getsymbols(), auto.arima(), as.yearmon(), as.yearqtr()




> len_mon <- 48  # # of months to predict
> r <- 1.04 # pesumed GDP growth rate
> i <- seq(2,len_mon/3,1)  # seq of quarters to predict
> d <- as.Date(as.yearqtr(seq(Sys.Date(),as.Date("2100-12-31"),by="quarters")[i])) # pick up the first day of each quarters.
> as.xts(forecast(auto.arima(CS["2012::"]),h=len_mon)$mean[1:len_mon],as.Date(as.yearmon(mondate(index(last(CS)))+seq(1,len_mon,1)),frac=0))[as.Date(as.yearqtr(mondate(index(last(CS)))+seq(3,len_mon,3)),frac=0)]
               [,1]
2018-07-01 225.2251
2018-10-01 227.3579
<skip>
2022-04-01 266.0765
> last(CS)
           SPCS10RSA
2018-06-01  224.8054
> as.xts(forecast(auto.arima(PA),h=len_mon)$mean[1:len_mon],as.Date(as.yearmon(mondate(index(last(PA)))+seq(1,len_mon,1)),frac=0))[as.Date(as.yearqtr(mondate(index(last(PA)))+seq(3,len_mon,3)),frac=0)]
               [,1]
2018-10-01 149684.2
2019-01-01 150292.1
<skip>
2022-07-01 158802.2
>  last(PA)
           PAYEMS
2018-08-01 149279
> last(UC)
           UNDCONTSA
2018-07-01      1122
>
as.xts(forecast(auto.arima(UC),h=len_mon)$mean[1:len_mon],as.Date(as.yearmon(mondate(index(last(UC)))+seq(1,len_mon,1)),frac=0))[as.Date(as.yearqtr(mondate(index(last(UC)))+seq(3,len_mon,3)),frac=0)]
2018-10-01 1130.871
2019-01-01 1139.742
<skip>
2022-07-01 1263.939


len_mon <- 48  # # of months to predict
gdp_g_r <- 1.04 # pesumed GDP growth rate
i <- seq(2,len_mon/3,1)  # seq of quarters to predict
d <- as.Date(as.yearqtr(seq(Sys.Date(),as.Date("2100-12-31"),by="quarters")[i])) # pick up the first day of each quarters.

getSymbols("PAYEMS",src="FRED")
PA <- PAYEMS
m_PA <- as.xts(forecast(auto.arima(PA),h=len_mon)$mean[1:len_mon],as.Date(as.yearmon(mondate(index(last(PA)))+seq(1,len_mon,1)),frac=0))[as.Date(as.yearqtr(mondate(index(last(PA)))+seq(3,len_mon,3)),frac=0)]
PAq <- apply.quarterly(PA[k2k],mean)
length(PAq)

getSymbols("UNDCONTSA",src="FRED")
UC <- UNDCONTSA
m_UC <- as.xts(forecast(auto.arima(UC),h=len_mon)$mean[1:len_mon],as.Date(as.yearmon(mondate(index(last(UC)))+seq(1,len_mon,1)),frac=0))[as.Date(as.yearqtr(mondate(index(last(UC)))+seq(3,len_mon,3)),frac=0)]
UCq <- apply.quarterly(UC[k2k],mean)
length(UCq)

getSymbols('SPCS10RSA',src='FRED')
CS <- SPCS10RSA
m_CS_2012 <- as.xts(forecast(auto.arima(CS["2012::"]),h=len_mon)$mean[1:len_mon],as.Date(as.yearmon(mondate(index(last(CS)))+seq(1,len_mon,1)),frac=0))[as.Date(as.yearqtr(mondate(index(last(CS)))+seq(3,len_mon,3)),frac=0)]
CSq <- apply.quarterly(CS[k2k],mean)
length(CSq)

getSymbols("GDP",src="FRED")
G <- GDP
# m_GDP <- as.xts(as.vector(last(GDP)) * r**(i/4),d)
m_GDP <- as.xts(as.vector(last(G)) * gdp_g_r**(seq(1,len_mon/3,1)/4),as.Date(as.yearqtr(mondate(index(last(G)))+seq(3,len_mon,3)),frac=0))
kikan <- paste("1992-01-01::",as.Date(as.yearmon((mondate(index(last(G)))+2)),frac=1),sep="")
k2k <- paste("2000-01-01::",as.Date(as.yearmon((mondate(index(last(G)))+2)),frac=1),sep="")

SP5 <- as.xts(read.zoo(read.csv("~/SP5.csv")))

length(CSq)
length(UCq)
length(PAq)
summary(lm(apply.quarterly(SP5[k2k],mean)[,1] ~ PAq[k2k] * UCq[k2k] * G[k2k]*CSq[k2k] - UCq[k2k] -G[k2k] - PAq[k2k]*G[k2k] - UCq[k2k]*G[k2k]*CSq[k2k]))
my_sp5cs(k2k,m_GDP[d[1:9]],m_PA[d[1:9]],m_UC[d[1:9]],m_CS_2012[d[1:9]])
result.eps <- lm(apply.quarterly(SP5[,4][k2k],mean) ~ eps_year_xts[k2k]+apply.quarterly(PA[k2k],mean)+apply.quarterly(CS[k2k],mean)+apply.quarterly(UC[k2k],mean))
result.gpuc <- lm(apply.quarterly(SP5[k2k],mean)[,1] ~ PAq[k2k] * UCq[k2k] * G[k2k]*CSq[k2k] - UCq[k2k] -G[k2k] - PAq[k2k]*G[k2k] - UCq[k2k]*G[k2k]*CSq[k2k])
summary(lm(apply.quarterly(SP5[k2k],mean)[,1] ~ PAq[k2k] * UCq[k2k] * G[k2k]*CSq[k2k] - UCq[k2k] -G[k2k] - PAq[k2k]*G[k2k] - UCq[k2k]*G[k2k]*CSq[k2k]))
SP5.result <- merge(residuals(result.gpuc),predict(result.gpuc),residuals(result.eps),predict(result.eps))

GSPC.predict <- merge(to.monthly(GSPC)[substr(k2k,11,23)],last(spline(seq(1,length(SP5.result[,1]),1),as.vector(SP5.result[,2]),n=length(SP5.result[,1])*3+1)$y,n=length(to.monthly(GSPC)[,1][substr(k2k,11,23)])),last(spline(seq(1,length(SP5.result[,1]),1),as.vector(SP5.result[,4]),n=length(SP5.result[,1])*3+1)$y,n=length(to.monthly(GSPC)[,1][substr(k2k,11,23)])),suffixes=c('','spline','eps'))


calculate moving average --- filter(), grid.ticks.on='weeks',addEventLines()



> length(as.vector(na.omit(filter(apply.daily(bp.bangkok,mean)[,1],rep(1,7))/7)))
[1] 230
> class(events)
[1] "xts" "zoo"
> events
           [,1]       
2018-06-20 "natrix"   
2018-07-14 "weight"   
2018-08-09 "abort natrix"
> plot(merge(as.xts(as.vector(na.omit(filter(apply.daily(bp.bangkok,mean)[,1],rep(1,7))/7)),last(seq(as.Date("2018-01-01"),Sys.Date(),by='days'),n=230)),as.vector(na.omit(filter(apply.daily(bp.bangkok,mean)[,2],rep(1,7))/7)),suffixes=c("high","low")),grid.ticks.on='weeks',ylim=c(50,150))
> addEventLines(events, srt=90, pos=2,col=10)
> length(as.vector(na.omit(filter(apply.daily(bp.bangkok,mean)[,1],rep(1,7))/7)))
[1] 230
> last(as.vector(na.omit(filter(apply.daily(bp.bangkok,mean)[,1],rep(1,7))/7)))
[1] 112.8571
> addSeries(as.xts(rep(112.85,len),last(index(apply.daily(bp.bangkok,mean)[,2]),len)),ylim=c(par('yaxp')[1],par('yaxp')[2]),on=1,col=4)
> last(as.vector(na.omit(filter(apply.daily(bp.bangkok,mean)[,2],rep(1,7))/7)))
[1] 72.92857
> addSeries(as.xts(rep(72.92,len),last(index(apply.daily(bp.bangkok,mean)[,2]),len)),ylim=c(par('yaxp')[1],par('yaxp')[2]),on=1,col=4)



len <- length(as.vector(na.omit(filter(apply.daily(bp.bangkok,mean)[,1],rep(1,7))/7)))
plot(merge(as.xts(as.vector(na.omit(filter(apply.daily(bp.bangkok,mean)[,1],rep(1,7))/7)),last(index(apply.daily(bp.bangkok,mean)[,2]),len)),as.vector(na.omit(filter(apply.daily(bp.bangkok,mean)[,2],rep(1,7))/7)),suffixes=c("high","low")),grid.ticks.on='weeks',ylim=c(50,150),main="7days moving average",type='p')
events <- xts(c("natrix","weight","abort natrix"),as.Date(c("2018-06-20", "2018-07-14","2018-08-09")))
addEventLines(events, srt=90, pos=2,col=10)
addSeries(as.xts(rep(85,len),last(index(apply.daily(bp.bangkok,mean)[,2]),len)),ylim=c(par('yaxp')[1],par('yaxp')[2]),on=1,col=3)
addSeries(as.xts(rep(125,len),last(index(apply.daily(bp.bangkok,mean)[,2]),len)),ylim=c(par('yaxp')[1],par('yaxp')[2]),on=1,col=3)


plot(merge(as.xts(as.vector(na.omit(filter(apply.daily(bp.bangkok,mean)[,1],rep(1,7))/7)),last(seq(as.Date("2018-01-01"),Sys.Date(),by='days'),n=length(as.vector(na.omit(filter(apply.daily(bp.bangkok,mean)[,1],rep(1,7))/7))))),as.vector(na.omit(filter(apply.daily(bp.bangkok,mean)[,2],rep(1,7))/7)),suffixes=c("high","low")),grid.ticks.on='weeks',ylim=c(50,150),main="7days moving average")
addEventLines(events, srt=90, pos=2,col=10)


2018年9月11日火曜日

Univ. of Michigan Consumer Confidence Index vs. S&P500.


> length(SP5[kikan][,4])
[1] 318
> cor.test((as.vector((SP5[kikan][,4]/as.vector(lag(SP5[kikan],k=2)[,1]) -1 )[seq(3,318,3)])),(quarterlyReturn(UMCSENT[kikan])))

Pearson's product-moment correlation

data:  (as.vector((SP5[kikan][, 4]/as.vector(lag(SP5[kikan], k = 2)[,  and (quarterlyReturn(UMCSENT[kikan]))    1]) - 1)[seq(3, 318, 3)])) and (quarterlyReturn(UMCSENT[kikan]))
t = 4.6777, df = 104, p-value = 8.76e-06
alternative hypothesis: true correlation is not equal to 0
95 percent confidence interval:
 0.2457038 0.5629063
sample estimates:
      cor
0.4169185

> plot.default((as.vector((SP5[kikan][,4]/as.vector(lag(SP5[kikan],k=2)[,1]) -1 )[seq(3,318,3)])),(quarterlyReturn(UMCSENT[kikan])))
> abline(h=0)
> abline(v=0)

> plot.xts(merge((SP5[kikan][,4]/as.vector(lag(SP5[kikan],k=2)[,1]) -1 )[seq(3,318,3)],quarterlyReturn(UMCSENT[kikan])))



cor.test((as.vector((SP5[kikan][,4]/as.vector(lag(SP5[kikan],k=2)[,1]) -1 )[seq(3,length(SP5[kikan][,4]),3)])),(quarterlyReturn(UMCSENT[kikan])))
plot.default((as.vector((SP5[kikan][,4]/as.vector(lag(SP5[kikan],k=2)[,1]) -1 )[seq(3,length(SP5[kikan][,4]),3)])),(quarterlyReturn(UMCSENT[kikan])))
abline(h=0)
abline(v=0)
plot.xts(merge((SP5[kikan][,4]/as.vector(lag(SP5[kikan],k=2)[,1]) -1 )[seq(3,318,3)],quarterlyReturn(UMCSENT[kikan])))

2018年9月10日月曜日

mondate,as.yearmon and the calculation to treat number of months. mondate as.yearmon as.yearqtr as.Date(date,frac=1) 該当月の初めの日付



> index(last(G))
[1] "2018-04-01"
> mondate(index(last(G)))+2
mondate: timeunits="months"
[1] 2018-06-01
> as.yearmon(as.character(mondate(index(last(G)))+2))
[1] " 6 2018"
> as.yearmon((mondate(index(last(G)))+2))
[1] " 6 2018"
> as.yearmon(mondate(index(last(G))+2))
[1] " 4 2018"
> as.yearmon((mondate(index(last(G)))+2))
[1] " 6 2018"
> as.Date(as.yearmon((mondate(index(last(G)))+2)),frac=1)
[1] "2018-06-30"
>
> paste("1992-01-01::",as.Date(as.yearmon((mondate(index(last(G)))+2)),frac=1),sep="")
[1] "1992-01-01::2018-06-30"
> kikan <- paste("1992-01-01::",as.Date(as.yearmon((mondate(index(last(G)))+2)),frac=1),sep="")
# calculate coming quarter beginning dates
> as.Date(as.yearqtr((mondate(index(last(G)))+seq(3,27,3))))
[1] "2018-07-01" "2018-10-01" "2019-01-01" "2019-04-01" "2019-07-01" "2019-10-01" "2020-01-01" "2020-04-01" "2020-07-01"
> as.Date(as.yearmon(mondate(index(last(CS)))+seq(1,48,1)),frac=0)
 [1] "2018-07-01" "2018-08-01" "2018-09-01" "2018-10-01" "2018-11-01" "2018-12-01" "2019-01-01" "2019-02-01" "2019-03-01"
<skip>
[46] "2022-04-01" "2022-05-01" "2022-06-01"
> as.Date(as.yearqtr(mondate(index(last(CS)))+seq(3,48,3)),frac=0)
 [1] "2018-07-01" "2018-10-01" "2019-01-01" "2019-04-01" "2019-07-01" "2019-10-01" "2020-01-01" "2020-04-01" "2020-07-01"
[10] "2020-10-01" "2021-01-01" "2021-04-01" "2021-07-01" "2021-10-01" "2022-01-01" "2022-04-01"


kikan <- paste("1992-01-01::",as.Date(as.yearmon((mondate(index(last(G)))+2)),frac=1),sep="")
k2k <- paste("2000-01-01::",as.Date(as.yearmon((mondate(index(last(G)))+2)),frac=1),sep="")

addLegend() spline() grid.ticks.on=



# > length(SP5.result[,1])
# [1] 74
# 74 is equal to length(as.vector(SP5.result[,2])), which is quarterly basis as GDP is available only on that term.
# The number of months between those 74 qtr. is 73 * 3 +1 = 220.
# Thus it is able to convert quarterly basis data into monthly.
# > length(to.monthly(GSPC)["::2018-06-30"][,1])
# [1] 138
# > k2k
# [1] "2000-01-01::2018-06-30"

result <- lm(apply.quarterly(SP5[k2k],mean)[,1] ~ PAq[k2k] * UCq[k2k] * G[k2k]*CSq[k2k] - UCq[k2k] -G[k2k] - PAq[k2k]*G[k2k] - UCq[k2k]*G[k2k]*CSq[k2k])
SP5.result <- merge(residuals(result),predict(result))
GSPC.predict <- merge(to.monthly(GSPC)["::2018-06-30"],last(spline(seq(1,74,1),as.vector(SP5.result[,2]),n=220)$y,n=138),suffixes=c('','spline'))
plot(merge(GSPC.predict[,7],GSPC.predict[,4],GSPC.predict[,4]-GSPC.predict[,7]),main="GSPC.predict[,4] vs. GSPC.predict[,7]",grid.ticks.on='months')
tmp.legend <- "Black: theory \nRed: Actual\nGreen: residuals"
addLegend(legend.loc = "topleft", legend.names = tmp.legend,col=3)


result <- lm(apply.quarterly(SP5[k2k],mean)[,1] ~ PAq[k2k] * UCq[k2k] * G[k2k]*CSq[k2k] - UCq[k2k] -G[k2k] - PAq[k2k]*G[k2k] - UCq[k2k]*G[k2k]*CSq[k2k])
SP5.result <- merge(residuals(result),predict(result))
GSPC.predict <- merge(to.monthly(GSPC)[substr(k2k,11,23)],last(spline(seq(1,length(SP5.result[,1]),1),as.vector(SP5.result[,2]),n=length(SP5.result[,1])*3+1)$y,n=length(to.monthly(GSPC)[,1][substr(k2k,11,23)])),suffixes=c('','spline'))
plot(merge(GSPC.predict[,7],GSPC.predict[,4],GSPC.predict[,4]-GSPC.predict[,7]),main="GSPC.predict[,4] vs. GSPC.predict[,7]",grid.ticks.on='months')
tmp.legend <- "Black: theory \nRed: Actual\nGreen: residuals"
addLegend(legend.loc = "topleft", legend.names = tmp.legend,col=3)




2018年9月7日金曜日

draw horizontal and vertical lines on plot.default abline()



> plot.default(tmp.call$acr,tmp.call$intime,first=grid(11,11))
> abline(h=90)
> abline(v=90)
> tmp.call   
   intime   acr
1   85.71 79.74
2   73.19 76.37
3   90.60 95.51
4   92.51 96.71
5   94.89 97.16
6   91.26 96.92
7   73.19 82.20
8   73.17 91.11
9   52.63 70.89
10  92.80 94.69
11  87.79 90.52
12  89.55 93.05
13  97.36 98.27


2018年9月4日火曜日

DARLING in the FRANXX - Full Soundtrack (CD 1, CD 2 & CD 3)


Download from this URL

Music: Asami Tachibana
Albums: DARLING in the FRANXX Original Soundtrack Vol. 01, Vol. 02 & Vol. 03

Disclaimer: I do not own this music.
You can purchase the whole OST together with episodes via CDJapan:

Blu-ray+CD (ep. 01-03): http://www.cdjapan.co.jp/product/ANZX...
DVD+CD (ep. 01-03): http://www.cdjapan.co.jp/product/ANZB...

Blu-ray+CD (ep. 10-12): http://www.cdjapan.co.jp/product/ANZX...
DVD+CD (ep. 10-12): http://www.cdjapan.co.jp/product/ANZB...

Blu-ray+CD (ep. 13-15): http://www.cdjapan.co.jp/product/ANZX...
DVD+CD (ep. 13-15): http://www.cdjapan.co.jp/product/ANZB...

"cÅGE"
Lyricist: cAnON.
Vocalist: Anna Pingina

"Vanquish"
Lyricist: Benjamin & mpi
Vocalist: Monique Dehaney

"FUSE"
Lyricist: Benjamin & mpi
Vocalist: Claudia Vazquez

"Battle Cry"
Lyricist: Dj L-Spade
Vocalist: Dj L-Spade

"D# regards"
Lyricist: cAnON.
Vocalist: Anna Pingina

♫ Tracklist (CD 1):

0:00 cÅGE
4:57 Vanquish
7:35 Odds and ends
9:55 o-DOR
11:42 Dino-S
13:41 BEAST
16:26 Counterattack
19:27 Operation
22:28 Reversal
25:13 In the FRANXX
27:19 Trente
29:07 Distopia
30:57 Godliness
33:14 Aile
35:36 Clarity
38:03 Nuance
39:50 Miel
41:18 Dropping
43:24 CODE:002
45:58 VICTORIA
49:02 Torikago~BGM-Rearrange~

♫ Tracklist (CD 2):

50:57 FUSE
53:56 Battle Cry
57:24 Your smile
59:39 Abandoned Places
1:01:18 The Seven Sages
1:02:58 Klaxosaur
1:05:16 Gutenberg
1:07:48 Shady History
1:09:51 ADuLt
1:11:25 One's Word
1:13:13 Vita
1:14:44 CHiLDRen
1:16:13 CODE:015
1:19:05 Lilac
1:20:55 Red Hibiscus
1:23:20 The Sands
1:25:21 Boys×Girls
1:27:03 VICTORIA -piano ver.-
1:30:11 Lilac -guitar ver.-
1:32:01 Mistilteinn
1:34:28 D# regards

♫ Tracklist (CD 3):

1:38:40 CODE:016
1:41:08 RoCco
1:43:07 Lotus
1:45:05 CODE:001
1:47:29 CoiL
1:50:44 DESPAIR
1:53:08 InVaDeR
1:55:51 GLADIOLUS
1:59:42 JUSTICE
2:02:00 Requiem
2:04:53 Cherry blossoms
2:07:14 HIRO and ZERO TWO
2:10:54 cÅGE -piano ver.-
2:12:50 JUSTICE -Epiano ver.-
2:15:16 Pray for..
2:21:05 cÅGE -SPS ver.-
2:23:47 FUSE -instrumental-
2:26:46 Battle Cry -instrumental-
2:30:14 Vanquish -instrumental-
2:32:54 D# regards -instrumental-
2:37:04 cÅGE -instrumental-
2:41:59 Torikago~BGM-Rearrange-guitar ver.~