run-tests.php这是个什么东西?代码看看
解决方案 »
- 请问在APMServ5.2.0下如何安装pear
- 变量中含有单引号如何比较相等呢?
- php 用哪个免费的cms做门户站比较好
- 求一正规
- xdebug的怪异问题
- php 5开发工具
- 各位朋友,怎么让我们开发的网站在google搜索中,让他的排名更靠前一点,(google的排名原理)
- 请问这个正则表达式该怎么写?刚开始学正则,想找个范例
- php+iis+sqlserver文章输出问题?请指教
- 急啊,我的一个win2000server的服务器,有时,别的机子ping的时候,有时,很正常的,但有时,就会出出掉包现象,有时,更严重,ping都不
- 为什么在windows2000下执行和linux下不执行?
- 喜欢看网站的朋友请进.........,我的网站今天正式开通....
/*
+----------------------------------------------------------------------+
| PHP Version 4 |
+----------------------------------------------------------------------+
| Copyright (c) 1997-2002 The PHP Group |
+----------------------------------------------------------------------+
| This source file is subject to version 2.02 of the PHP license, |
| that is bundled with this package in the file LICENSE, and is |
| available at through the world-wide-web at |
| http://www.php.net/license/2_02.txt. |
| If you did not receive a copy of the PHP license and are unable to |
| obtain it through the world-wide-web, please send a note to |
| [email protected] so we can mail you a copy immediately. |
+----------------------------------------------------------------------+
| Authors: Ilia Alshanetsky <[email protected]> |
| Preston L. Bannister <[email protected]> |
| Marcus Boerger <[email protected]> |
| Derick Rethans <[email protected]> |
| Sander Roobol <[email protected]> |
| (based on version by: Stig Bakken <[email protected]>) |
| (based on the PHP 3 test framework by Rasmus Lerdorf) |
+----------------------------------------------------------------------+
*//*
Require exact specification of PHP executable to test (no guessing!).
Die if any internal errors encountered in test script.
Regularized output for simpler post-processing of output.
Optionally output error lines indicating the failing test source and log
for direct jump with MSVC or Emacs.
*//*
* TODO:
* - do not test PEAR components if base class and/or component class cannot be instanciated
*/
/* Sanity check to ensure that pcre extension needed by this script is avaliable.
* In the event it is not, print a nice error message indicating that this script will
* not run without it.
*/if (!extension_loaded("pcre")) {
echo <<<NO_PCRE_ERROR+-----------------------------------------------------------+
| ! ERROR ! |
| The test-suite requires that you have pcre extension |
| enabled. To enable this extension either compile your PHP |
| with --with-pcre-regex or if you've compiled pcre as a |
| shared module load it via php.ini. |
+-----------------------------------------------------------+NO_PCRE_ERROR;
exit;
}// change into the PHP source directory.if (getenv('TEST_PHP_SRCDIR')) {
@chdir(getenv('TEST_PHP_SRCDIR'));
}$cwd = getcwd();
set_time_limit(0);// delete as much output buffers as possible
while(@ob_end_clean());
if (ob_get_level()) echo "Not all buffers were deleted.\n";error_reporting(E_ALL);
ini_set('magic_quotes_runtime',0); // this would break tests by modifying EXPECT sectionsif (ini_get('safe_mode')) {
echo <<< SAFE_MODE_WARNING+-----------------------------------------------------------+
| ! WARNING ! |
| You are running the test-suite with "safe_mode" ENABLED ! |
| |
| Chances are high that no test will work at all, |
| depending on how you configured "safe_mode" ! |
+-----------------------------------------------------------+
SAFE_MODE_WARNING;
}// Don't ever guess at the PHP executable location.
// Require the explicit specification.
// Otherwise we could end up testing the wrong file!if (getenv('TEST_PHP_EXECUTABLE')) {
$php = getenv('TEST_PHP_EXECUTABLE');
if ($php=='auto') {
$php = $cwd.'/sapi/cli/php';
putenv("TEST_PHP_EXECUTABLE=$php");
}
}
if (empty($php) || !file_exists($php)) {
error("environment variable TEST_PHP_EXECUTABLE must be set to specify PHP executable!");
}if (getenv('TEST_PHP_LOG_FORMAT')) {
$log_format = strtoupper(getenv('TEST_PHP_LOG_FORMAT'));
} else {
$log_format = 'LEOD';
}if (function_exists('is_executable') && !@is_executable($php)) {
error("invalid PHP executable specified by TEST_PHP_EXECUTABLE = " . $php);
}// Check whether a detailed log is wanted.
if (getenv('TEST_PHP_DETAILED')) {
define('DETAILED', getenv('TEST_PHP_DETAILED'));
} else {
define('DETAILED', 0);
}// Check whether user test dirs are requested.
if (getenv('TEST_PHP_USER')) {
$user_tests = explode (',', getenv('TEST_PHP_USER'));
} else {
$user_tests = array();
}// Get info from php
$info_file = realpath(dirname(__FILE__)) . '/run-test-info.php';
@unlink($info_file);
$php_info = '<?php echo "
PHP_SAPI : " . PHP_SAPI . "
PHP_VERSION : " . phpversion() . "
ZEND_VERSION: " . zend_version() . "
PHP_OS : " . PHP_OS . " - " . php_uname() . "
INI actual : " . realpath(get_cfg_var("cfg_file_path")) . "
More .INIs : " . (function_exists(\'php_ini_scanned_files\') ? str_replace("\n","", php_ini_scanned_files()) : "** not determined **"); ?>';
save_text($info_file, $php_info);
$ini_overwrites = array(
'mbstring.script_encoding=pass',
'output_handler=',
'zlib.output_compression=Off',
'open_basedir=',
'safe_mode=0',
'disable_functions=',
'output_buffering=Off',
'error_reporting=2047',
'display_errors=1',
'log_errors=0',
'html_errors=0',
'track_errors=1',
'report_memleaks=1',
'docref_root=/phpmanual/',
'docref_ext=.html',
'error_prepend_string=',
'error_append_string=',
'auto_prepend_file=',
'auto_append_file=',
'magic_quotes_runtime=0',
);
$info_params = array();
settings2array($ini_overwrites,$info_params);
settings2params($info_params);
$php_info = `$php $info_params $info_file`;
@unlink($info_file);
define('TESTED_PHP_VERSION', `$php -r 'echo PHP_VERSION;'`);// Write test context information.echo "
=====================================================================
CWD : $cwd
PHP : $php $php_info
Extra dirs : ";
foreach ($user_tests as $test_dir) {
echo "{$test_dir}\n ";
}
echo "
=====================================================================
";// Determine the tests to be run.$test_files = array();
$test_results = array();
$GLOBALS['__PHP_FAILED_TESTS__'] = array();// If parameters given assume they represent selected tests to run.
if (isset($argc) && $argc > 1) {
for ($i=1; $i<$argc; $i++) {
$testfile = realpath($argv[$i]);
if (is_dir($testfile)) {
find_files($testfile);
} else if (preg_match("/\.phpt$/", $testfile)) {
$test_files[] = $testfile;
}
}
$test_files = array_unique($test_files); // Run selected tests.
if (count($test_files)) {
usort($test_files, "test_sort");
echo "Running selected tests.\n";
foreach($test_files AS $name) {
$test_results[$name] = run_test($php,$name);
}
if (getenv('REPORT_EXIT_STATUS') == 1 and ereg('FAILED( |$)', implode(' ', $test_results))) {
exit(1);
}
exit(0);
}
}
$test_files = array();
$exts_to_test = get_loaded_extensions();
$exts_tested = count($exts_to_test);
$exts_skipped = 0;
$ignored_by_ext = 0;
sort($exts_to_test);
$test_dirs = array('tests', 'pear', 'ext');foreach ($test_dirs as $dir) {
find_files("{$cwd}/{$dir}", ($dir == 'ext'));
}foreach ($user_tests as $dir) {
find_files($dir, ($dir == 'ext'));
}function find_files($dir,$is_ext_dir=FALSE,$ignore=FALSE)
{
global $test_files, $exts_to_test, $ignored_by_ext, $exts_skipped, $exts_tested; $o = opendir($dir) or error("cannot open directory: $dir");
while (($name = readdir($o)) !== FALSE) {
if (is_dir("{$dir}/{$name}") && !in_array($name, array('.', '..', 'CVS'))) {
$skip_ext = ($is_ext_dir && !in_array($name, $exts_to_test));
if ($skip_ext) {
$exts_skipped++;
}
find_files("{$dir}/{$name}", FALSE, $ignore || $skip_ext);
} // Cleanup any left-over tmp files from last run.
if (substr($name, -4) == '.tmp') {
@unlink("$dir/$name");
continue;
} // Otherwise we're only interested in *.phpt files.
if (substr($name, -5) == '.phpt') {
if ($ignore) {
$ignored_by_ext++;
} else {
$testfile = realpath("{$dir}/{$name}");
$test_files[] = $testfile;
}
}
}
closedir($o);
}function test_sort($a, $b) {
global $cwd; $ta = strpos($a, "{$cwd}/tests")===0 ? 1 + (strpos($a, "{$cwd}/tests/run-test")===0 ? 1 : 0) : 0;
$tb = strpos($b, "{$cwd}/tests")===0 ? 1 + (strpos($b, "{$cwd}/tests/run-test")===0 ? 1 : 0) : 0;
if ($ta == $tb) {
return strcmp($a, $b);
} else {
return $tb - $ta;
}
}$test_files = array_unique($test_files);
usort($test_files, "test_sort");$start_time = time();echo "TIME START " . date('Y-m-d H:i:s', $start_time) . "
=====================================================================
";foreach ($test_files as $name) {
$test_results[$name] = run_test($php,$name);
}$end_time = time();// Summarize resultsif (0 == count($test_results)) {
echo "No tests were run.\n";
return;
}$n_total = count($test_results);
$n_total += $ignored_by_ext;$sum_results = array('PASSED'=>0, 'WARNED'=>0, 'SKIPPED'=>0, 'FAILED'=>0);
foreach ($test_results as $v) {
$sum_results[$v]++;
}
$sum_results['SKIPPED'] += $ignored_by_ext;
$percent_results = array();
while (list($v,$n) = each($sum_results)) {
$percent_results[$v] = (100.0 * $n) / $n_total;
}echo "
=====================================================================
TIME END " . date('Y-m-d H:i:s', $end_time);$summary = "
=====================================================================
TEST RESULT SUMMARY
---------------------------------------------------------------------
Exts skipped : " . sprintf("%4d",$exts_skipped) . "
Exts tested : " . sprintf("%4d",$exts_tested) . "
---------------------------------------------------------------------
Number of tests : " . sprintf("%4d",$n_total) . "
Tests skipped : " . sprintf("%4d (%2.1f%%)",$sum_results['SKIPPED'],$percent_results['SKIPPED']) . "
Tests warned : " . sprintf("%4d (%2.1f%%)",$sum_results['WARNED'],$percent_results['WARNED']) . "
Tests failed : " . sprintf("%4d (%2.1f%%)",$sum_results['FAILED'],$percent_results['FAILED']) . "
Tests passed : " . sprintf("%4d (%2.1f%%)",$sum_results['PASSED'],$percent_results['PASSED']) . "
---------------------------------------------------------------------
Time taken : " . sprintf("%4d seconds", $end_time - $start_time) . "
=====================================================================
";
echo $summary;$failed_test_summary = '';
if (count($GLOBALS['__PHP_FAILED_TESTS__'])) {
$failed_test_summary .= "
=====================================================================
FAILED TEST SUMMARY
---------------------------------------------------------------------
";
foreach ($GLOBALS['__PHP_FAILED_TESTS__'] as $failed_test_data) {
$failed_test_summary .= $failed_test_data['test_name'] . $failed_test_data['info'] . "\n";
}
$failed_test_summary .= "=====================================================================\n";
}if ($failed_test_summary && !getenv('NO_PHPTEST_SUMMARY')) {
echo $failed_test_summary;
}define('PHP_QA_EMAIL', '[email protected]');
define('QA_SUBMISSION_PAGE', 'http://qa.php.net/buildtest-process.php');
if (!getenv('NO_INTERACTION')) {
$fp = fopen("php://stdin", "r+");
echo "\nPlease allow this report to be sent to the PHP QA\nteam. This will give us a better understanding in how\n";
echo "PHP's test cases are doing. Note that the report will include\ndetailed configuration data about your system\n";
echo "so if you are worried about exposing sensitive data,\nsave this to a file first and remove any sensitive data\n";
echo "and then send this file to [email protected].\n";
echo "(choose \"s\" to just save the results to a file)? [Yns]: ";
flush();
$user_input = fgets($fp, 10);
$just_save_results = (strtolower($user_input[0]) == 's');
if ($just_save_results || strlen(trim($user_input)) == 0 || strtolower($user_input[0]) == 'y') {
/*
* Collect information about the host system for our report
* Fetch phpinfo() output so that we can see the PHP enviroment
* Make an archive of all the failed tests
* Send an email
*/
/* Ask the user to provide an email address, so that QA team can contact the user */
if (!strncasecmp($user_input, 'y', 1) || strlen(trim($user_input)) == 0) {
echo "\nPlease enter your email address.\n(You address will be mangled so that it will not go out on any\nmailinglist in plain text): ";
flush();
$fp = fopen("php://stdin", "r+");
$user_email = trim(fgets($fp, 1024));
$user_email = str_replace("@", " at ", str_replace(".", " dot ", $user_email));
}
$failed_tests_data = '';
$sep = "\n" . str_repeat('=', 80) . "\n";
$failed_tests_data .= $failed_test_summary . "\n";
$failed_tests_data .= $summary . "\n"; if ($sum_results['FAILED']) {
foreach ($GLOBALS['__PHP_FAILED_TESTS__'] as $test_info) {
$failed_tests_data .= $sep . $test_info['name'] . $test_info['info'];
$failed_tests_data .= $sep . file_get_contents(realpath($test_info['output']));
$failed_tests_data .= $sep . file_get_contents(realpath($test_info['diff']));
$failed_tests_data .= $sep . "\n\n";
}
$status = "failed";
} else {
$status = "success";
}
$failed_tests_data .= "\n" . $sep . 'BUILD ENVIRONMENT' . $sep;
$failed_tests_data .= "OS:\n" . PHP_OS . " - " . php_uname() . "\n\n";
$ldd = $automake = $autoconf = $libtool = $compiler = 'N/A'; if (substr(PHP_OS, 0, 3) != "WIN") {
$automake = shell_exec('automake --version');
$autoconf = shell_exec('autoconf --version');
/* Always use the generated libtool - Mac OSX uses 'glibtool' */
$libtool = shell_exec('./libtool --version');
/* Try the most common flags for 'version' */
$flags = array('-v', '-V', '--version');
$cc_status=0;
foreach($flags AS $flag) {
system(getenv('CC')." $flag >/dev/null 2>&1", $cc_status);
if ($cc_status == 0) {
$compiler = shell_exec(getenv('CC')." $flag 2>&1");
break;
}
}
$ldd = shell_exec("ldd $php");
}
$failed_tests_data .= "Automake:\n$automake\n";
$failed_tests_data .= "Autoconf:\n$autoconf\n";
$failed_tests_data .= "Libtool:\n$libtool\n";
$failed_tests_data .= "Compiler:\n$compiler\n";
$failed_tests_data .= "Bison:\n". @shell_exec('bison --version'). "\n";
$failed_tests_data .= "Libraries:\n$ldd\n";
$failed_tests_data .= "\n";
if (isset($user_email)) {
$failed_tests_data .= "User's E-mail: ".$user_email."\n\n";
}
$failed_tests_data .= $sep . "PHPINFO" . $sep;
$failed_tests_data .= shell_exec($php.' -dhtml_errors=0 -i');
$compression = 0;
if ($just_save_results || !mail_qa_team($failed_tests_data, $compression, $status)) {
$output_file = 'php_test_results_' . date('Ymd') . ( $compression ? '.txt.gz' : '.txt' );
$fp = fopen($output_file, "w");
fwrite($fp, $failed_tests_data);
fclose($fp);
if (!$just_save_results) {
echo "\nThe test script was unable to automatically send the report to PHP's QA Team\n";
} echo "Please send ".$output_file." to ".PHP_QA_EMAIL." manually, thank you.\n";
} else {
fwrite($fp, "\nThank you for helping to make PHP better.\n");
fclose($fp);
}
}
}
if (getenv('REPORT_EXIT_STATUS') == 1 and $sum_results['FAILED']) {
exit(1);
}//
// Send Email to QA Team
//function mail_qa_team($data, $compression, $status = FALSE)
{
$url_bits = parse_url(QA_SUBMISSION_PAGE);
if (empty($url_bits['port'])) $url_bits['port'] = 80;
$data = "php_test_data=" . urlencode(base64_encode(preg_replace("/[\\x00]/", "[0x0]", $data)));
$data_length = strlen($data);
$fs = fsockopen($url_bits['host'], $url_bits['port'], $errno, $errstr, 10);
if (!$fs) {
return FALSE;
} $php_version = urlencode(TESTED_PHP_VERSION); echo "\nPosting to {$url_bits['host']} {$url_bits['path']}\n";
fwrite($fs, "POST ".$url_bits['path']."?status=$status&version=$php_version HTTP/1.1\r\n");
fwrite($fs, "Host: ".$url_bits['host']."\r\n");
fwrite($fs, "User-Agent: QA Browser 0.1\r\n");
fwrite($fs, "Content-Type: application/x-www-form-urlencoded\r\n");
fwrite($fs, "Content-Length: ".$data_length."\r\n\r\n");
fwrite($fs, $data);
fwrite($fs, "\r\n\r\n");
fclose($fs); return 1;
}
//
// Write the given text to a temporary file, and return the filename.
//function save_text($filename,$text)
{
$fp = @fopen($filename,'w') or error("Cannot open file '" . $filename . "' (save_text)");
fwrite($fp,$text);
fclose($fp);
if (1 < DETAILED) echo "
FILE $filename {{{
$text
}}}
";
}//
// Write an error in a format recognizable to Emacs or MSVC.
//function error_report($testname,$logname,$tested)
{
$testname = realpath($testname);
$logname = realpath($logname);
switch (strtoupper(getenv('TEST_PHP_ERROR_STYLE'))) {
case 'MSVC':
echo $testname . "(1) : $tested\n";
echo $logname . "(1) : $tested\n";
break;
case 'EMACS':
echo $testname . ":1: $tested\n";
echo $logname . ":1: $tested\n";
break;
}
}//
// Run an individual test case.
//