var date = new Date();
var year = date.getFullYear();
var month = date.getMonth()+1; //js从0开始取
var date1 = date.getDate();
var hour = date.getHours();
var minutes = date.getMinutes();
var second = date.getSeconds();
return year + "-" + month + "-" + date1 + " " + hour + ":" + minutes + ":" + (parseFloat(second) + parseFloat(10)); //加10秒
(parseFloat(second) + parseFloat(10)) 我这样加是错的,因为second在50秒以上时结果会超出60秒怎样在这个时间里加上10秒呢?
date.setTime(date.getTime() + 10000);
alert(date);
var mins = second+tenMin;
if(second>50){
mins=60;
}
function ShowDate(t){
var date = new Date();
if (t = 1) {
date.setDate(date.getTime() + 10000);
}
var year = date.getFullYear();
var month = date.getMonth()+1;
var date1 = date.getDate();
var hour = date.getHours();
var minutes = date.getMinutes();
var second = date.getSeconds();
return year + "-" + month + "-" + date1 + " " + hour + ":" + minutes + ":" + second;
}我用了你的方法后 打印出的时间是NULL 不信你试试。
var d = new Date("2008/04/15");
d.setMonth(d.getMonth() + 1 + 1);//加一个月,同理,可以加一天:getDate()+1,加一年:getYear()+1
alert(d+"月后是"+d.getFullYear()+"-"+d.getMonth()+"-"+d.getDate());
var date = new Date();
alert(date);
var second=date.setSeconds(date.getSeconds()+10);
alert(date);
对比下