用BufferedInputStream阿 BufferedInputStream(InputStream in, int size) Creates a BufferedInputStream with the specified buffer size, and saves its argument, the input stream in, for later use.API里复制的,呵呵。
BufferedInputStream(InputStream in, int size) 创建具有指定缓冲区大小的 BufferedInputStream,并保存其参数,即输入流 in,以便将来使用
laziogo() 这位仁兄正解啊 我这几天在看《THINKING IN JAVA》的java.nio有这个东西 MappedByteBuffer out = new RandomAccessFile("test.dat", "rw").getChannel() .map(FileChannel.MapMode.READ_WRITE, 0, length);
MappedByteBuffer out = new RandomAccessFile("test.dat", "rw").getChannel() .map(FileChannel.MapMode.READ_WRITE, 0, length); ------------------------ 这个也许并不好,因为,共享的事进程的地址空间总共2G,实际能用的估计也就1G。如果还有其他IO之类,就会出错了。拙见
我读过6000行4800列的DEM数据是这样读的 import java.io.*; import java.awt.*; public class ReadFile { public static void main(String[] args) { int hang=6000; int lie=4800; int m=1,n=1; int point; int i=1,j=1; try { BufferedInputStream Input=new BufferedInputStream(new FileInputStream("D:/today/today 30/read/E140S10.DEM")); DataInputStream DataIn=new DataInputStream(Input); System.out.println("X Y Z"); while(m<=hang) { point=DataIn.readShort();
// ShortHand: Mapping an entire file into memory for reading public static void MappedFile(String fileName) throws Exception { long length = new File(fileName).length(); MappedByteBuffer in = new FileInputStream(fileName).getChannel().map(FileChannel.MapMode.READ_ONLY,0,length); int i = 0; while(i < length) { System.out.print((char)in.get(i++)); } }
private static long len = 100000; while (readEnd) { for (int i = 0; i < len; i++) { line = bReader.readLine(); if (line == null) { readEnd = false; break; } numList.add(line.trim()); } for (int i = 0; i < numList.size(); i++) { String num = numList.get(i); if (!isBlackNumber(num)) {// 重庆地区和黑名单号码 blackList.add(num); // numList.remove(i); } else { useList.add(num); } } for (String num : useList) { usedBufferedwrite.write(num + "\r\n"); usedCount++; } for (String blackNum : blackList) { invalidBufferedwrite.write(blackNum + "\r\n"); invalidCount++; } numList.clear(); blackList.clear(); useList.clear(); }边读边处理肯定很慢,全读再处理可能会溢出,所以就读100000每次,处理了再读
{
BufferedReader
bin = new BufferedReader(new InputStreamReader(new FileInputStream(File file)));
String str= null;
while((str= bin.readLine()) != null)
{
// 每次读出文件的一行,写你的处理代码,
}
bin.close();
}catch(IOException ioe){}
BufferedInputStream(InputStream in, int size)
Creates a BufferedInputStream with the specified buffer size, and saves its argument, the input stream in, for later use.API里复制的,呵呵。
创建具有指定缓冲区大小的 BufferedInputStream,并保存其参数,即输入流 in,以便将来使用
不行,建立InputStream对象的时候已经把指向的文件读成流,存在内存里了。
我就是用上面提到的方法,不行。 liltos(糊涂鬼) ( ) 信誉:99 Blog 2007-3-23 15:20:03 得分: 0
学一下 nio
这个还没有试
//------------------------
import help.Functions;
import java.io.RandomAccessFile;public class A {
public static void main(String[] args) {
try {
long start = Runtime.getRuntime().freeMemory();
RandomAccessFile rF = new RandomAccessFile("D:/cxz/压缩软件/S60-SDK-200634-3.1-Cpp-f.1090b.zip", "r");
for (int i = 0; i < 100; i++) {
if ((i % 16) == 0)
System.out.print("\r\n");
System.out.print(Functions.getByteHexStr(rF.readByte()) + " ");
}
long end = Runtime.getRuntime().freeMemory();
System.out.println("\n\nused menory:" + (start - end) / 1024 + "KB");
rF.close();
} catch (Exception e) {
e.printStackTrace();
}
}
}
原讨论链接:http://community.csdn.net/expert/topicview1.asp?id=5355433
所属论坛:J2SE / 基础类 审核组:JAVA
提问者:lpg4_012 解决者:flushtime
感谢:flushtime
关键字:Java J2SE / 基础类 文件 import mb
答案:
我要从一个文本文件中提有用的数据
文本文件200多MB
是不是可以建一个缓存来把有用的数据一段一段的提出来,请问该怎么做?
--------------------------------------------------------------- 呵呵,200MB而已...
JAVA中可以使用内存映射文件来操作大文件.
最大可达2GB.
下面是个简单的示例,更具体的自己看Java API DOCS或相关资料
import java.io.*;
import java.nio.*;
import java.nio.channels.*;
public class LargeMappedFiles {
static int length = 0x8FFFFFF; // 128 Mb
public static void main(String[] args) throws Exception {
MappedByteBuffer out =
new RandomAccessFile("test.dat", "rw").getChannel()
.map(FileChannel.MapMode.READ_WRITE, 0, length);
for(int i = 0; i < length; i++)
out.put((byte)'x');
System.out.println("Finished writing");
for(int i = length/2; i < length/2 + 6; i++)
System.out.print((char)out.get(i)); //read file
}
} ///
首先,你要将-Xmx调大至512M,其实,再看看你的代码有什么问题。
BufferedInputStream绝对不是缓存整个文件流的。而是读取一大块,缓存起来,这个块的大小一般也就在几K左右。
用NIO当然可以解决问题。
这位仁兄正解啊
我这几天在看《THINKING IN JAVA》的java.nio有这个东西
MappedByteBuffer out =
new RandomAccessFile("test.dat", "rw").getChannel()
.map(FileChannel.MapMode.READ_WRITE, 0, length);
new RandomAccessFile("test.dat", "rw").getChannel()
.map(FileChannel.MapMode.READ_WRITE, 0, length);
------------------------
这个也许并不好,因为,共享的事进程的地址空间总共2G,实际能用的估计也就1G。如果还有其他IO之类,就会出错了。拙见
import java.io.*;
import java.awt.*;
public class ReadFile
{
public static void main(String[] args)
{
int hang=6000;
int lie=4800;
int m=1,n=1;
int point;
int i=1,j=1;
try
{
BufferedInputStream Input=new BufferedInputStream(new FileInputStream("D:/today/today 30/read/E140S10.DEM"));
DataInputStream DataIn=new DataInputStream(Input);
System.out.println("X Y Z");
while(m<=hang)
{
point=DataIn.readShort();
System.out.print(point);
System.out.print(" ");
i++;
if(i>3)
{
System.out.println();
i=1;
}
n++;
if(n>lie)
{
n=1;
m++;
}
}
}
catch(IOException e)
{
e.printStackTrace();
}
}
}
public static void MappedFile(String fileName) throws Exception
{
long length = new File(fileName).length();
MappedByteBuffer in = new FileInputStream(fileName).getChannel().map(FileChannel.MapMode.READ_ONLY,0,length);
int i = 0;
while(i < length)
{
System.out.print((char)in.get(i++));
}
}
private static long len = 100000;
while (readEnd) { for (int i = 0; i < len; i++) {
line = bReader.readLine();
if (line == null) {
readEnd = false;
break;
}
numList.add(line.trim());
} for (int i = 0; i < numList.size(); i++) {
String num = numList.get(i);
if (!isBlackNumber(num)) {// 重庆地区和黑名单号码
blackList.add(num);
// numList.remove(i);
} else {
useList.add(num);
}
}
for (String num : useList) {
usedBufferedwrite.write(num + "\r\n");
usedCount++;
}
for (String blackNum : blackList) {
invalidBufferedwrite.write(blackNum + "\r\n");
invalidCount++;
}
numList.clear();
blackList.clear();
useList.clear();
}边读边处理肯定很慢,全读再处理可能会溢出,所以就读100000每次,处理了再读