( ! ) Deprecated: Function WP_Dependencies-&gt;add_data() was called with an argument that is <strong>deprecated</strong> since version 6.9.0! IE conditional comments are ignored by all supported browsers. in /var/www/html/wp-includes/functions.php on line 6131
Call Stack
#TimeMemoryFunctionLocation
10.0000484072{main}( ).../index.php:0
20.0005484424require( '/var/www/html/wp-blog-header.php ).../index.php:17
30.29504113112require_once( '/var/www/html/wp-includes/template-loader.php ).../wp-blog-header.php:19
40.30014141824include( '/var/www/html/wp-content/themes/twentyfifteen/archive.php ).../template-loader.php:125
50.30014141824get_header( $name = ???, $args = ??? ).../archive.php:19
60.30014142040locate_template( $template_names = [0 => 'header.php'], $load = TRUE, $load_once = TRUE, $args = [] ).../general-template.php:48
70.30014142136load_template( $_template_file = '/var/www/html/wp-content/themes/twentyfifteen/header.php', $load_once = TRUE, $args = [] ).../template.php:749
80.30044142680require_once( '/var/www/html/wp-content/themes/twentyfifteen/header.php ).../template.php:814
90.30064150088wp_head( ).../header.php:18
100.30064150088do_action( $hook_name = 'wp_head' ).../general-template.php:3197
110.30064150304WP_Hook->do_action( $args = [0 => ''] ).../plugin.php:522
120.30064150304WP_Hook->apply_filters( $value = '', $args = [0 => ''] ).../class-wp-hook.php:365
130.30084155384wp_enqueue_scripts( '' ).../class-wp-hook.php:341
140.30084155384do_action( $hook_name = 'wp_enqueue_scripts' ).../script-loader.php:2311
150.30084155600WP_Hook->do_action( $args = [0 => ''] ).../plugin.php:522
160.30084155600WP_Hook->apply_filters( $value = '', $args = [0 => ''] ).../class-wp-hook.php:365
170.30104281904twentyfifteen_scripts( '' ).../class-wp-hook.php:341
180.30114283424wp_style_add_data( $handle = 'twentyfifteen-ie', $key = 'conditional', $value = 'lt IE 9' ).../functions.php:440
190.30114283424WP_Styles->add_data( $handle = 'twentyfifteen-ie', $key = 'conditional', $value = 'lt IE 9' ).../functions.wp-styles.php:245
200.30114283424WP_Dependencies->add_data( $handle = 'twentyfifteen-ie', $key = 'conditional', $value = 'lt IE 9' ).../class-wp-styles.php:385
210.30114283424_deprecated_argument( $function_name = 'WP_Dependencies->add_data()', $version = '6.9.0', $message = 'IE conditional comments are ignored by all supported browsers.' ).../class-wp-dependencies.php:317
220.30114283744wp_trigger_error( $function_name = '', $message = 'Function WP_Dependencies->add_data() was called with an argument that is <strong>deprecated</strong> since version 6.9.0! IE conditional comments are ignored by all supported browsers.', $error_level = 16384 ).../functions.php:5925
230.30114284496trigger_error( $message = 'Function WP_Dependencies-&gt;add_data() was called with an argument that is <strong>deprecated</strong> since version 6.9.0! IE conditional comments are ignored by all supported browsers.', $error_level = 16384 ).../functions.php:6131

( ! ) Deprecated: Function WP_Dependencies-&gt;add_data() was called with an argument that is <strong>deprecated</strong> since version 6.9.0! IE conditional comments are ignored by all supported browsers. in /var/www/html/wp-includes/functions.php on line 6131
Call Stack
#TimeMemoryFunctionLocation
10.0000484072{main}( ).../index.php:0
20.0005484424require( '/var/www/html/wp-blog-header.php ).../index.php:17
30.29504113112require_once( '/var/www/html/wp-includes/template-loader.php ).../wp-blog-header.php:19
40.30014141824include( '/var/www/html/wp-content/themes/twentyfifteen/archive.php ).../template-loader.php:125
50.30014141824get_header( $name = ???, $args = ??? ).../archive.php:19
60.30014142040locate_template( $template_names = [0 => 'header.php'], $load = TRUE, $load_once = TRUE, $args = [] ).../general-template.php:48
70.30014142136load_template( $_template_file = '/var/www/html/wp-content/themes/twentyfifteen/header.php', $load_once = TRUE, $args = [] ).../template.php:749
80.30044142680require_once( '/var/www/html/wp-content/themes/twentyfifteen/header.php ).../template.php:814
90.30064150088wp_head( ).../header.php:18
100.30064150088do_action( $hook_name = 'wp_head' ).../general-template.php:3197
110.30064150304WP_Hook->do_action( $args = [0 => ''] ).../plugin.php:522
120.30064150304WP_Hook->apply_filters( $value = '', $args = [0 => ''] ).../class-wp-hook.php:365
130.30084155384wp_enqueue_scripts( '' ).../class-wp-hook.php:341
140.30084155384do_action( $hook_name = 'wp_enqueue_scripts' ).../script-loader.php:2311
150.30084155600WP_Hook->do_action( $args = [0 => ''] ).../plugin.php:522
160.30084155600WP_Hook->apply_filters( $value = '', $args = [0 => ''] ).../class-wp-hook.php:365
170.30104281904twentyfifteen_scripts( '' ).../class-wp-hook.php:341
180.32654284840wp_style_add_data( $handle = 'twentyfifteen-ie7', $key = 'conditional', $value = 'lt IE 8' ).../functions.php:444
190.32654284840WP_Styles->add_data( $handle = 'twentyfifteen-ie7', $key = 'conditional', $value = 'lt IE 8' ).../functions.wp-styles.php:245
200.32654284840WP_Dependencies->add_data( $handle = 'twentyfifteen-ie7', $key = 'conditional', $value = 'lt IE 8' ).../class-wp-styles.php:385
210.32654284840_deprecated_argument( $function_name = 'WP_Dependencies->add_data()', $version = '6.9.0', $message = 'IE conditional comments are ignored by all supported browsers.' ).../class-wp-dependencies.php:317
220.32654285160wp_trigger_error( $function_name = '', $message = 'Function WP_Dependencies->add_data() was called with an argument that is <strong>deprecated</strong> since version 6.9.0! IE conditional comments are ignored by all supported browsers.', $error_level = 16384 ).../functions.php:5925
230.32664285384trigger_error( $message = 'Function WP_Dependencies-&gt;add_data() was called with an argument that is <strong>deprecated</strong> since version 6.9.0! IE conditional comments are ignored by all supported browsers.', $error_level = 16384 ).../functions.php:6131

HadoopDataTransport Hadoop数据移动方法

前段时间旧HADOOP升级内存,需要把某些共用数据文件迁移到临时的HADOOP上,如果用hadoop 命令效率N低,
于是就写一个小程序,按照文件列表,把数据迁移到新HADOOP上,命令:

java -classpath ./lib/hadoop-core-1.0.2.jar:./lib/commons-logging-1.1.1.jar:./lib/commons-configuration-1.6.jar:./lib/commons-lang-2.4.jar:./ test.HadoopDataTransport filelist.txt 100

[root@datanode4 ~]# cat filelist.txt

/201401/21/3bb30e5f-cf3e-4182-a7c0-ed486f80a87a.mp3
/201401/21/1d62fff3-744e-41c9-8152-5243ee0ce7b4.mp3
/201401/21/784a53f4-2125-4bc6-bf6a-0adf40981a64.mp3

代码清单:不喜欢写注释。

package test;

import java.io.BufferedReader;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.List;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;

public class HadoopDataTransport implements Runnable {

private static FileSystem src_fs;
private static FileSystem des_fs;
private static List<String> FailureList = new ArrayList<String>();

private static List<Boolean> jobs = new ArrayList<Boolean>();

static final String basedir = “/data/album”; // 保存在HADOOP中的根目录

public static void main(String[] args) throws Exception {
List<String> al = new ArrayList<String>();
System.out.println(“read list from file : ” + args[0]);
int pm = Integer.parseInt(args[1]);
FileReader fr = new FileReader(args[0]);
BufferedReader br = new BufferedReader(fr);
String line;
while ((line = br.readLine()) != null) {
if (line!=null && line.length()>10) {
al.add(line);
}
}
int pmax = pm;
if(al.size()>0){
for(int i=0;i<al.size();i=i+pmax){
int max = ((i+pmax)>al.size()?al.size():(i+pmax));
new Thread(new HadoopDataTransport(al.subList(i, max),i,0)).start();
jobs.add(true);
}
}
br.close();
fr.close();
while(jobs.size()>0){
Thread.sleep(500);
}
if(FailureList.size()>0){// save failure list
saveFailurelist(args[0]);
}
}

private List<String> filelist;
private int threadid;
public HadoopDataTransport(List<String> list,int id,int opts){
this.filelist = list;
this.threadid = id;
}

@Override
public void run() {
int success = 0;
int failure =0;

for(String file : filelist){
System.out.println(threadid + ” ==> processing …. “+file);
try {
InputStream in = readFromHadoopInputStream(file);
if(in !=null){
String filename = createHdfsFile(file, in);
System.out.println(threadid + ” ==> “+filename + ” …… done.”);
success++;
}
} catch (Exception e) {
AddFailure(file);
System.out.println(threadid + ” ==> “+file + ” …… failure.” + e.getMessage());
failure++;
}
}
System.out.println(“===============ThreadId: “+threadid+” Finished Total:” + filelist.size() +” Success : “+ success+” Failure :”+ failure+”==========”);
jobs.remove(true);
}

private static void AddFailure(String filename){
FailureList.add(filename);
}
private static void saveFailurelist(String failurefile) throws Exception{
System.out.println(“Save “+”failure_”+failurefile);
FileWriter w = new FileWriter(“failure_”+failurefile);
PrintWriter out = new PrintWriter(w);
for(String s : FailureList){
out.println(s);
}
out.close();
}

private static String createHdfsFile(String dst, InputStream in)
throws Exception {
FSDataOutputStream out =des_fs.create(
getPath(dst));
IOUtils.copyBytes(in, out, 256 * 1024);
out.close();
return dst;
}

private static InputStream readFromHadoopInputStream(String dst) throws Exception {
Path path = getPath(dst);
if (src_fs.exists(path)) {
FSDataInputStream is = src_fs.open(path);
return is;
} else {
throw new Exception(“the file is not found .”);
}
}

private static Path getPath(String dir) {
return new Path(basedir + dir);
}

static {
Configuration src_conf = new Configuration();
src_conf.set(“fs.default.name”, “hdfs://192.168.2.50:8020”); // conf.set(“fs.default.name”,
// “hdfs://namenode-backup-vip:8020”);
src_conf.set(“dfs.block.size”, “524288”);
src_conf.set(“dfs.replication”, “2”);
src_conf.set(“dfs.permissions”, “false”);
src_conf.set(“dfs.permissions.supergroup”, “resin”);
src_conf.set(“dfs.web.ugi”, “resin”);
try {
src_fs = FileSystem.get(src_conf);
} catch (IOException e) {
e.printStackTrace();
}
System.out
.println(“Initialize Hadoop Server src fs hdfs://192.168.2.50:8020”);

Configuration des_conf = new Configuration();
des_conf.set(“fs.default.name”, “hdfs://192.168.2.85:8020”); // conf.set(“fs.default.name”,
// “hdfs://namenode-backup-vip:8020”);
des_conf.set(“dfs.block.size”, “524288”);
des_conf.set(“dfs.replication”, “2”);
des_conf.set(“dfs.permissions”, “false”);
des_conf.set(“dfs.permissions.supergroup”, “resin”);
des_conf.set(“dfs.web.ugi”, “resin”);
try {
des_fs = FileSystem.get(des_conf);
} catch (IOException e) {
e.printStackTrace();
}
System.out
.println(“Initialize Hadoop Server des fs hdfs://192.168.2.85:8020”);
}

}

upgrade tomcat6xx to tomcat7xx with 3 problem3

今天把tomcat从6.0.18升级到7.0.25,发现了两个问题

问题1

java.lang.ClassNotFoundException: org.apache.catalina.mbeans.ServerLifecycleListener

发现居然找不到这个类,然后把catatina.jar下载下来反编译一看mbenas这个文件夹居然是空的

解决办法

6.0.18以前,conf/server.xml里面的配置有这项

注释掉就可以了

问题2

严重: Begin event threw exception
java.lang.IllegalArgumentException: taglib definition not consistent with specification version

tomcat 6.0.18里面的web.xml里面的tab配置如下
http://java.sun.com/jstl/core
/WEB-INF/c.tld

tomcat 7.0.25里面web.xml的tag配置应该如下所示

http://java.sun.com/jstl/core
/WEB-INF/c.tld

 

问题2

Aug 11, 2015 10:41:11 AM org.apache.jasper.compiler.JDTCompiler$1 findType
SEVERE: Compilation error
org.eclipse.jdt.internal.compiler.classfmt.ClassFormatException

 

原来是JDK的版本问题,系统自是OpenJDK 1.8,

要改回OpenJDK 1.6

yum install java-1.6.0-openjdk